repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
nhippenmeyer/django
tests/extra_regress/models.py
281
1401
from __future__ import unicode_literals import copy import datetime from django.contrib.auth.models import User from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class RevisionableModel(models.Model): base = models.ForeignKey('self', models.SET_NULL, null=True) title = models.CharField(blank=True, max_length=255) when = models.DateTimeField(default=datetime.datetime.now) def __str__(self): return "%s (%s, %s)" % (self.title, self.id, self.base.id) def save(self, *args, **kwargs): super(RevisionableModel, self).save(*args, **kwargs) if not self.base: self.base = self kwargs.pop('force_insert', None) kwargs.pop('force_update', None) super(RevisionableModel, self).save(*args, **kwargs) def new_revision(self): new_revision = copy.copy(self) new_revision.pk = None return new_revision class Order(models.Model): created_by = models.ForeignKey(User, models.CASCADE) text = models.TextField() @python_2_unicode_compatible class TestObject(models.Model): first = models.CharField(max_length=20) second = models.CharField(max_length=20) third = models.CharField(max_length=20) def __str__(self): return 'TestObject: %s,%s,%s' % (self.first, self.second, self.third)
bsd-3-clause
0Chencc/CTFCrackTools
Lib/test/test_wave.py
141
1151
from test.test_support import TESTFN, run_unittest import os import wave import unittest nchannels = 2 sampwidth = 2 framerate = 8000 nframes = 100 class TestWave(unittest.TestCase): def setUp(self): self.f = None def tearDown(self): if self.f is not None: self.f.close() try: os.remove(TESTFN) except OSError: pass def test_it(self): self.f = wave.open(TESTFN, 'wb') self.f.setnchannels(nchannels) self.f.setsampwidth(sampwidth) self.f.setframerate(framerate) self.f.setnframes(nframes) output = '\0' * nframes * nchannels * sampwidth self.f.writeframes(output) self.f.close() self.f = wave.open(TESTFN, 'rb') self.assertEqual(nchannels, self.f.getnchannels()) self.assertEqual(sampwidth, self.f.getsampwidth()) self.assertEqual(framerate, self.f.getframerate()) self.assertEqual(nframes, self.f.getnframes()) self.assertEqual(self.f.readframes(nframes), output) def test_main(): run_unittest(TestWave) if __name__ == '__main__': test_main()
gpl-3.0
tsiktsiris/falcon
Documentation/networking/cxacru-cf.py
14668
1626
#!/usr/bin/env python # Copyright 2009 Simon Arlott # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation; either version 2 of the License, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 # Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # Usage: cxacru-cf.py < cxacru-cf.bin # Output: values string suitable for the sysfs adsl_config attribute # # Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110 # contains mis-aligned values which will stop the modem from being able # to make a connection. If the first and last two bytes are removed then # the values become valid, but the modulation will be forced to ANSI # T1.413 only which may not be appropriate. # # The original binary format is a packed list of le32 values. import sys import struct i = 0 while True: buf = sys.stdin.read(4) if len(buf) == 0: break elif len(buf) != 4: sys.stdout.write("\n") sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf))) sys.exit(1) if i > 0: sys.stdout.write(" ") sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0])) i += 1 sys.stdout.write("\n")
gpl-2.0
sursum/buckanjaren
buckanjaren/lib/python3.5/site-packages/django/contrib/auth/context_processors.py
189
1939
# PermWrapper and PermLookupDict proxy the permissions system into objects that # the template system can understand. class PermLookupDict(object): def __init__(self, user, app_label): self.user, self.app_label = user, app_label def __repr__(self): return str(self.user.get_all_permissions()) def __getitem__(self, perm_name): return self.user.has_perm("%s.%s" % (self.app_label, perm_name)) def __iter__(self): # To fix 'item in perms.someapp' and __getitem__ interaction we need to # define __iter__. See #18979 for details. raise TypeError("PermLookupDict is not iterable.") def __bool__(self): return self.user.has_module_perms(self.app_label) def __nonzero__(self): # Python 2 compatibility return type(self).__bool__(self) class PermWrapper(object): def __init__(self, user): self.user = user def __getitem__(self, app_label): return PermLookupDict(self.user, app_label) def __iter__(self): # I am large, I contain multitudes. raise TypeError("PermWrapper is not iterable.") def __contains__(self, perm_name): """ Lookup by "someapp" or "someapp.someperm" in perms. """ if '.' not in perm_name: # The name refers to module. return bool(self[perm_name]) app_label, perm_name = perm_name.split('.', 1) return self[app_label][perm_name] def auth(request): """ Returns context variables required by apps that use Django's authentication system. If there is no 'user' attribute in the request, uses AnonymousUser (from django.contrib.auth). """ if hasattr(request, 'user'): user = request.user else: from django.contrib.auth.models import AnonymousUser user = AnonymousUser() return { 'user': user, 'perms': PermWrapper(user), }
mit
eduNEXT/edunext-platform
openedx/core/djangoapps/content/block_structure/transformer_registry.py
4
2387
""" Block Structure Transformer Registry implemented using the platform's PluginManager. """ from base64 import b64encode from hashlib import sha1 import six from openedx.core.lib.cache_utils import process_cached from openedx.core.lib.plugins import PluginManager class TransformerRegistry(PluginManager): """ Registry for all of the block structure transformers that have been made available. All block structure transformers should implement `BlockStructureTransformer`. """ NAMESPACE = 'openedx.block_structure_transformer' USE_PLUGIN_MANAGER = True @classmethod def get_registered_transformers(cls): """ Returns a set of all registered transformers. Returns: {BlockStructureTransformer} - All transformers that are registered with the platform's PluginManager. """ if cls.USE_PLUGIN_MANAGER: return set(six.itervalues(cls.get_available_plugins())) else: return set() @classmethod @process_cached def get_write_version_hash(cls): """ Returns a deterministic hash value of the WRITE_VERSION of all registered transformers. """ hash_obj = sha1() sorted_transformers = sorted(cls.get_registered_transformers(), key=lambda t: t.name()) for transformer in sorted_transformers: hash_obj.update(six.b(transformer.name())) hash_obj.update(six.b(str(transformer.WRITE_VERSION))) return b64encode(hash_obj.digest()).decode('utf-8') @classmethod def find_unregistered(cls, transformers): """ Find and returns the names of all the transformers from the given list that aren't registered with the platform's PluginManager. Arguments: transformers ([BlockStructureTransformer] - List of transformers to check in the registry. Returns: set([string]) - Set of names of a subset of the given transformers that weren't found in the registry. """ registered_transformer_names = set(reg_trans.name() for reg_trans in cls.get_registered_transformers()) requested_transformer_names = set(transformer.name() for transformer in transformers) return requested_transformer_names - registered_transformer_names
agpl-3.0
UNINETT/nav
python/nav/ipdevpoll/plugins/psuwatch.py
1
4821
# # Copyright (C) 2019 Uninett AS # # This file is part of Network Administration Visualized (NAV). # # NAV is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License version 3 as published by # the Free Software Foundation. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. You should have received a copy of the GNU General Public License # along with NAV. If not, see <http://www.gnu.org/licenses/>. # """ipdevpoll plugin to monitor the state of known field-replaceable power supply and fan units. """ import datetime from twisted.internet import defer from django.db import transaction from nav.event2 import EventFactory from nav.ipdevpoll import Plugin, db from nav.models.manage import PowerSupplyOrFan from .psu import get_mibretrievers_from_vendor_id CALL_MAP = {"powerSupply": "get_power_supply_status", "fan": "get_fan_status"} PSU_EVENT = EventFactory("ipdevpoll", "eventEngine", "psuState", "psuNotOK", "psuOK") FAN_EVENT = EventFactory("ipdevpoll", "eventEngine", "fanState", "fanNotOK", "fanOK") EVENT_MAP = {"powerSupply": PSU_EVENT, "fan": FAN_EVENT} # Shorthand for database states STATE_UNKNOWN = PowerSupplyOrFan.STATE_UNKNOWN STATE_UP = PowerSupplyOrFan.STATE_UP STATE_DOWN = PowerSupplyOrFan.STATE_DOWN STATE_WARNING = PowerSupplyOrFan.STATE_WARNING class PowerSupplyOrFanStateWatcher(Plugin): """Collects PSU and FAN statues from netboxes""" def __init__(self, *args, **kwargs): super(PowerSupplyOrFanStateWatcher, self).__init__(*args, **kwargs) self.vendor_id = ( self.netbox.type.get_enterprise_id() if self.netbox.type else None ) self.miblist = get_mibretrievers_from_vendor_id(self.vendor_id, self.agent) @defer.inlineCallbacks def handle(self): units = yield db.run_in_thread(self._get_database_unit_list) state_map = {} for unit in units: old_state = unit.up new_state = yield self._retrieve_current_unit_state(unit) state_map[unit] = new_state if old_state != new_state: yield self._handle_state_change(unit, new_state) defer.returnValue(True) @defer.inlineCallbacks def _retrieve_current_unit_state(self, unit): """ :type unit: nav.models.manage.PowerSupplyOrFan """ method_name = CALL_MAP.get(unit.physical_class) assert method_name is not None if unit.internal_id is not None: for mib in self.miblist: method = getattr(mib, method_name, None) if method: state = yield method(unit.internal_id) defer.returnValue(state or STATE_UNKNOWN) else: self._logger.debug("unit has no internal id: %r", unit) defer.returnValue(STATE_UNKNOWN) @defer.inlineCallbacks def _handle_state_change(self, unit, new_state): self._logger.info( "%s state changed from %s to %s", unit.name, unit.up, new_state ) yield db.run_in_thread(self._update_internal_state, unit, new_state) yield db.run_in_thread(self._post_event, unit, new_state) # # Synchronous database access methods # def _get_database_unit_list(self): return list(PowerSupplyOrFan.objects.filter(netbox_id=self.netbox.id)) @staticmethod def _update_internal_state(unit, new_state): old_state = unit.up if old_state in (STATE_UP, STATE_UNKNOWN) and new_state in ( STATE_WARNING, STATE_DOWN, ): unit.downsince = datetime.datetime.now() elif old_state in (STATE_DOWN, STATE_WARNING) and new_state == STATE_UP: unit.downsince = None PowerSupplyOrFan.objects.filter(id=unit.id).update( up=new_state, downsince=unit.downsince ) def _post_event(self, unit, new_state): factory = EVENT_MAP.get(unit.physical_class) assert factory is not None if new_state in (STATE_DOWN, STATE_WARNING): construct = factory.start else: construct = factory.end varmap = { "sysname": unit.netbox.sysname, "unitname": unit.name, "state": new_state, } event = construct( netbox=unit.netbox, device=unit.device if unit.device_id else None, subid=unit.id, varmap=varmap, ) self._logger.debug("posting state change event for %s: %r", unit, event) with transaction.atomic(): event.save()
gpl-2.0
kineticgar/mecanum
avr_controller/scripts/voltage.py
4
1549
#!/usr/bin/env python import sys import serial import time print('Opening port') arduino = serial.Serial('/dev/ttyACM0', 115200, timeout=1, dsrdtr=False) time.sleep(3) print('Flushing the buffer') arduino.readline() def a2s(arr): """ Array of integer byte values --> binary string """ return ''.join(chr(b) for b in arr) def getVoltage(): ''' Ask the arduino for a voltage reading. ''' arduino.write(a2s([3, # Message size 8, # FSM_ANALOGPUBLISHER 11])) # BATTERY_VOLTAGE msgSize = ord(arduino.read()) fsmId = ord(arduino.read()) pin = ord(arduino.read()) highByte = ord(arduino.read()) lowByte = ord(arduino.read()) R1 = 16.05 # kOhm R2 = 9.87 # kOhm offset = 0.98343 voltage = ((highByte << 8) + lowByte) * 5.0 / 1024 * (R1 + R2) / R2 * offset return voltage def main(): if len(sys.argv) > 1: version = sys.argv[1] else: version = str(0) file = open('voltages_' + version + '.csv', 'w') file.write('Seconds,Volts\n') start = time.time() while True: voltage = str(getVoltage()) seconds = int(time.time() - start) file.write(str(seconds) + ',' + voltage + '\n') if seconds >= 60 * 60: # Hours sys.stdout.write(str(seconds / (60 * 60)) + ':') seconds = seconds % (60 * 60) if seconds >= 60: # Minutes sys.stdout.write(str(seconds / 60) + ':') seconds = seconds % 60 # Seconds sys.stdout.write(str(seconds)) print(' - Voltage: ' + voltage) time.sleep(30) file.close() return if __name__ == "__main__": main()
gpl-2.0
marty331/jakesclock
flask/lib/python2.7/site-packages/setuptools/extension.py
192
1821
import sys import re import functools import distutils.core import distutils.errors import distutils.extension from .dist import _get_unpatched from . import msvc9_support _Extension = _get_unpatched(distutils.core.Extension) msvc9_support.patch_for_specialized_compiler() def have_pyrex(): """ Return True if Cython or Pyrex can be imported. """ pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext' for pyrex_impl in pyrex_impls: try: # from (pyrex_impl) import build_ext __import__(pyrex_impl, fromlist=['build_ext']).build_ext return True except Exception: pass return False class Extension(_Extension): """Extension that uses '.c' files in place of '.pyx' files""" def __init__(self, *args, **kw): _Extension.__init__(self, *args, **kw) self._convert_pyx_sources_to_lang() def _convert_pyx_sources_to_lang(self): """ Replace sources with .pyx extensions to sources with the target language extension. This mechanism allows language authors to supply pre-converted sources but to prefer the .pyx sources. """ if have_pyrex(): # the build has Cython, so allow it to compile the .pyx files return lang = self.language or '' target_ext = '.cpp' if lang.lower() == 'c++' else '.c' sub = functools.partial(re.sub, '.pyx$', target_ext) self.sources = list(map(sub, self.sources)) class Library(Extension): """Just like a regular Extension, but built as a library instead""" distutils.core.Extension = Extension distutils.extension.Extension = Extension if 'distutils.command.build_ext' in sys.modules: sys.modules['distutils.command.build_ext'].Extension = Extension
gpl-2.0
Vector35/binaryninja-api
python/lineardisassembly.py
2
15064
# Copyright (c) 2015-2021 Vector 35 Inc # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import ctypes import binaryninja from binaryninja import _binaryninjacore as core from binaryninja import highlight from binaryninja import function from binaryninja import basicblock from binaryninja.enums import LinearViewObjectIdentifierType class LinearDisassemblyLine(object): def __init__(self, line_type, func, block, contents): self.type = line_type self.function = func self.block = block self.contents = contents def __repr__(self): return repr(self.contents) def __str__(self): return str(self.contents) class LinearViewObjectIdentifier(object): def __init__(self, name, start = None, end = None): self._name = name self._start = start self._end = end def __repr__(self): return "<LinearViewObjectIdentifier: " + str(self) + ">" def __str__(self): if not self.has_address: return self._name if self.has_range: return "%s 0x%x-0x%x" % (self._name, self._start, self._end) return "%s 0x%x" % (self._name, self._start) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented return (self._name, self._start, self._end) == (other._name, other._start, other._end) def __ne__(self, other): if not isinstance(other, self.__class__): return NotImplemented return not (self == other) def __hash__(self): return hash((self._name, self._start, self._end)) def _to_api_object(self, obj = None): if obj is None: result = core.BNLinearViewObjectIdentifier() else: result = obj result.name = self.name if self.has_range: result.type = LinearViewObjectIdentifierType.AddressRangeLinearViewObject result.start = self.start result.end = self.end elif self.has_address: result.type = LinearViewObjectIdentifierType.AddressLinearViewObject result.start = self.start result.end = self.start else: result.type = LinearViewObjectIdentifierType.SingleLinearViewObject result.start = 0 result.end = 0 return result @property def name(self): return self._name @property def address(self): return self._start @property def start(self): return self._start @property def end(self): return self._end @property def has_address(self): return self._start is not None @property def has_range(self): return self._start is not None and self._end is not None @classmethod def _from_api_object(cls, obj): if obj.type == LinearViewObjectIdentifierType.AddressLinearViewObject: result = LinearViewObjectIdentifier(obj.name, obj.start) elif obj.type == LinearViewObjectIdentifierType.AddressRangeLinearViewObject: result = LinearViewObjectIdentifier(obj.name, obj.start, obj.end) else: result = LinearViewObjectIdentifier(obj.name) return result class LinearViewObject(object): def __init__(self, handle, parent = None): self.handle = handle self._parent = parent def __del__(self): core.BNFreeLinearViewObject(self.handle) def __repr__(self): return "<LinearViewObject: " + str(self) + ">" def __len__(self): return self.end - self.start def __str__(self): result = str(self.identifier) if self._parent is not None: result = str(self._parent) + "/" + result return result @property def first_child(self): result = core.BNGetFirstLinearViewObjectChild(self.handle) if not result: return None return LinearViewObject(result, self) @property def last_child(self): result = core.BNGetLastLinearViewObjectChild(self.handle) if not result: return None return LinearViewObject(result, self) @property def previous(self): if self._parent is None: return None result = core.BNGetPreviousLinearViewObjectChild(self._parent.handle, self.handle) if not result: return None return LinearViewObject(result, self._parent) @property def next(self): if self._parent is None: return None result = core.BNGetNextLinearViewObjectChild(self._parent.handle, self.handle) if not result: return None return LinearViewObject(result, self._parent) @property def start(self): return core.BNGetLinearViewObjectStart(self.handle) @property def end(self): return core.BNGetLinearViewObjectEnd(self.handle) @property def parent(self): return self._parent @property def identifier(self): ident = core.BNGetLinearViewObjectIdentifier(self.handle) result = LinearViewObjectIdentifier._from_api_object(ident) core.BNFreeLinearViewObjectIdentifier(ident) return result @property def cursor(self): root = self while root.parent is not None: root = root.parent return LinearViewCursor(root) @property def ordering_index(self): if self.parent is None: return 0 return self.parent.ordering_index_for_child(self) @property def ordering_index_total(self): return core.BNGetLinearViewObjectOrderingIndexTotal(self.handle) def child_for_address(self, addr): result = core.BNGetLinearViewObjectChildForAddress(self.handle, addr) if not result: return None return LinearViewObject(result, self) def child_for_identifier(self, ident): ident_obj = ident._to_api_object() result = core.BNGetLinearViewObjectChildForIdentifier(self.handle, ident_obj) if not result: return None return LinearViewObject(result, self) def child_for_ordering_index(self, idx): result = core.BNGetLinearViewObjectChildForOrderingIndex(self.handle, idx) if not result: return None return LinearViewObject(result, self) def compare_children(self, a, b): return core.BNCompareLinearViewObjectChildren(self.handle, a.handle, b.handle) def get_lines(self, prev_obj, next_obj): if prev_obj is not None: prev_obj = prev_obj.handle if next_obj is not None: next_obj = next_obj.handle count = ctypes.c_ulonglong(0) lines = core.BNGetLinearViewObjectLines(self.handle, prev_obj, next_obj, count) result = [] for i in range(0, count.value): func = None block = None if lines[i].function: func = binaryninja.function.Function(self, core.BNNewFunctionReference(lines[i].function)) if lines[i].block: block = binaryninja.basicblock.BasicBlock(core.BNNewBasicBlockReference(lines[i].block), self) color = highlight.HighlightColor._from_core_struct(lines[i].contents.highlight) addr = lines[i].contents.addr tokens = binaryninja.function.InstructionTextToken.get_instruction_lines(lines[i].contents.tokens, lines[i].contents.count) contents = binaryninja.function.DisassemblyTextLine(tokens, addr, color = color) result.append(LinearDisassemblyLine(lines[i].type, func, block, contents)) core.BNFreeLinearDisassemblyLines(lines, count.value) return result def ordering_index_for_child(self, child): return core.BNGetLinearViewObjectOrderingIndexForChild(self.handle, child.handle) @classmethod def disassembly(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewDisassembly(view.handle, settings)) @classmethod def lifted_il(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewLiftedIL(view.handle, settings)) @classmethod def llil(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewLowLevelIL(view.handle, settings)) @classmethod def llil_ssa_form(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewLowLevelILSSAForm(view.handle, settings)) @classmethod def mlil(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewMediumLevelIL(view.handle, settings)) @classmethod def mlil_ssa_form(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewMediumLevelILSSAForm(view.handle, settings)) @classmethod def mmlil(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewMappedMediumLevelIL(view.handle, settings)) @classmethod def mmlil_ssa_form(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewMappedMediumLevelILSSAForm(view.handle, settings)) @classmethod def hlil(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewHighLevelIL(view.handle, settings)) @classmethod def hlil_ssa_form(cls, view, settings = None): if settings is not None: settings = settings.handle return LinearViewObject(core.BNCreateLinearViewHighLevelILSSAForm(view.handle, settings)) class LinearViewCursor(object): def __init__(self, root_object, handle = None): if handle is not None: self.handle = handle else: self.handle = core.BNCreateLinearViewCursor(root_object.handle) def __del__(self): core.BNFreeLinearViewCursor(self.handle) def __repr__(self): return "<LinearViewCursor: " + str(self.current_object) + ">" def __str__(self): return str(self.current_object) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) == 0 def __ne__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) != 0 def __lt__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) < 0 def __le__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) <= 0 def __gt__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) > 0 def __ge__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) >= 0 def __cmp__(self, other): if not isinstance(other, self.__class__): return NotImplemented return LinearViewCursor.compare(self, other) @property def before_begin(self): return core.BNIsLinearViewCursorBeforeBegin(self.handle) @property def after_end(self): return core.BNIsLinearViewCursorAfterEnd(self.handle) @property def valid(self): return not (self.before_begin or self.after_end) @property def current_object(self): count = ctypes.c_ulonglong(0) path = core.BNGetLinearViewCursorPathObjects(self.handle, count) result = None for i in range(0, count.value): result = LinearViewObject(core.BNNewLinearViewObjectReference(path[i]), result) core.BNFreeLinearViewCursorPathObjects(path, count.value) return result @property def path(self): count = ctypes.c_ulonglong(0) path = core.BNGetLinearViewCursorPath(self.handle, count) result = [] for i in range(0, count.value): result.append(LinearViewObjectIdentifier._from_api_object(path[i])) core.BNFreeLinearViewCursorPath(path, count.value) return result @property def path_objects(self): count = ctypes.c_ulonglong(0) path = core.BNGetLinearViewCursorPathObjects(self.handle, count) result = [] parent = None for i in range(0, count.value): obj = LinearViewObject(core.BNNewLinearViewObjectReference(path[i]), parent) result.append(obj) parent = obj core.BNFreeLinearViewCursorPathObjects(path, count.value) return result @property def ordering_index(self): return core.BNGetLinearViewCursorOrderingIndex(self.handle) @property def ordering_index_total(self): return core.BNGetLinearViewCursorOrderingIndexTotal(self.handle) def seek_to_begin(self): core.BNSeekLinearViewCursorToBegin(self.handle) def seek_to_end(self): core.BNSeekLinearViewCursorToEnd(self.handle) def seek_to_address(self, addr): core.BNSeekLinearViewCursorToAddress(self.handle, addr) def seek_to_path(self, path, addr = None): if isinstance(path, LinearViewCursor): if addr is None: return core.BNSeekLinearViewCursorToCursorPath(self.handle, path.handle) return core.BNSeekLinearViewCursorToCursorPathAndAddress(self.handle, path.handle, addr) path_objs = (core.BNLinearViewObjectIdentifier * len(path))() for i in range(0, len(path)): path[i]._to_api_object(path_objs[i]) if addr is None: return core.BNSeekLinearViewCursorToPath(self.handle, path_objs, len(path)) return core.BNSeekLinearViewCursorToPathAndAddress(self.handle, path_objs, len(path), addr) def seek_to_ordering_index(self, idx): core.BNSeekLinearViewCursorToOrderingIndex(self.handle, idx) def previous(self): return core.BNLinearViewCursorPrevious(self.handle) def next(self): return core.BNLinearViewCursorNext(self.handle) @property def lines(self): count = ctypes.c_ulonglong(0) lines = core.BNGetLinearViewCursorLines(self.handle, count) result = [] for i in range(0, count.value): func = None block = None if lines[i].function: func = binaryninja.function.Function(self, core.BNNewFunctionReference(lines[i].function)) if lines[i].block: block = binaryninja.basicblock.BasicBlock(core.BNNewBasicBlockReference(lines[i].block), self) color = highlight.HighlightColor._from_core_struct(lines[i].contents.highlight) addr = lines[i].contents.addr tokens = binaryninja.function.InstructionTextToken.get_instruction_lines(lines[i].contents.tokens, lines[i].contents.count) contents = binaryninja.function.DisassemblyTextLine(tokens, addr, color = color) result.append(LinearDisassemblyLine(lines[i].type, func, block, contents)) core.BNFreeLinearDisassemblyLines(lines, count.value) return result def duplicate(self): return LinearViewCursor(None, handle = core.BNDuplicateLinearViewCursor(self.handle)) @classmethod def compare(cls, a, b): return core.BNCompareLinearViewCursors(a.handle, b.handle)
mit
3dfxmadscientist/odoo_vi
addons/mass_mailing/controllers/main.py
21
3953
import werkzeug from openerp import http, SUPERUSER_ID from openerp.http import request class MassMailController(http.Controller): @http.route('/mail/track/<int:mail_id>/blank.gif', type='http', auth='none') def track_mail_open(self, mail_id, **post): """ Email tracking. """ mail_mail_stats = request.registry.get('mail.mail.statistics') mail_mail_stats.set_opened(request.cr, SUPERUSER_ID, mail_mail_ids=[mail_id]) response = werkzeug.wrappers.Response() response.mimetype = 'image/gif' response.data = 'R0lGODlhAQABAIAAANvf7wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw=='.decode('base64') return response @http.route(['/mail/mailing/<int:mailing_id>/unsubscribe'], type='http', auth='none') def mailing(self, mailing_id, email=None, res_id=None, **post): cr, uid, context = request.cr, request.uid, request.context MassMailing = request.registry['mail.mass_mailing'] mailing_ids = MassMailing.exists(cr, SUPERUSER_ID, [mailing_id], context=context) if not mailing_ids: return 'KO' mailing = MassMailing.browse(cr, SUPERUSER_ID, mailing_ids[0], context=context) if mailing.mailing_model == 'mail.mass_mailing.contact': list_ids = [l.id for l in mailing.contact_list_ids] record_ids = request.registry[mailing.mailing_model].search(cr, SUPERUSER_ID, [('list_id', 'in', list_ids), ('id', '=', res_id), ('email', 'ilike', email)], context=context) request.registry[mailing.mailing_model].write(cr, SUPERUSER_ID, record_ids, {'opt_out': True}, context=context) else: email_fname = None if 'email_from' in request.registry[mailing.mailing_model]._all_columns: email_fname = 'email_from' elif 'email' in request.registry[mailing.mailing_model]._all_columns: email_fname = 'email' if email_fname: record_ids = request.registry[mailing.mailing_model].search(cr, SUPERUSER_ID, [('id', '=', res_id), (email_fname, 'ilike', email)], context=context) if 'opt_out' in request.registry[mailing.mailing_model]._all_columns: request.registry[mailing.mailing_model].write(cr, SUPERUSER_ID, record_ids, {'opt_out': True}, context=context) return 'OK' @http.route(['/website_mass_mailing/is_subscriber'], type='json', auth="public", website=True) def is_subscriber(self, list_id, **post): cr, uid, context = request.cr, request.uid, request.context Contacts = request.registry['mail.mass_mailing.contact'] Users = request.registry['res.users'] is_subscriber = False email = None if uid != request.website.user_id.id: email = Users.browse(cr, SUPERUSER_ID, uid, context).email elif request.session.get('mass_mailing_email'): email = request.session['mass_mailing_email'] if email: contact_ids = Contacts.search(cr, SUPERUSER_ID, [('list_id', '=', int(list_id)), ('email', '=', email)], context=context) is_subscriber = len(contact_ids) > 0 return {'is_subscriber': is_subscriber, 'email': email} @http.route(['/website_mass_mailing/subscribe'], type='json', auth="public", website=True) def subscribe(self, list_id, email, **post): cr, uid, context = request.cr, request.uid, request.context Contacts = request.registry['mail.mass_mailing.contact'] contact_ids = Contacts.search(cr, SUPERUSER_ID, [('list_id', '=', int(list_id)), ('email', '=', email)], context=context) if not contact_ids: contact_ng = Contacts.name_create(cr, SUPERUSER_ID, email, context=context) Contacts.write(cr, SUPERUSER_ID, [contact_ng[0]], {'list_id': int(list_id)}, context=context) # add email to session request.session['mass_mailing_email'] = email return True
agpl-3.0
EnviroCentre/jython-upgrade
jython/lib/test/test_unicode_file.py
122
8491
# Test some Unicode file name semantics # We dont test many operations on files other than # that their names can be used with Unicode characters. import os, glob, time, shutil import unicodedata import unittest from test.test_support import run_unittest, TESTFN_UNICODE from test.test_support import TESTFN_ENCODING, TESTFN_UNENCODABLE try: TESTFN_ENCODED = TESTFN_UNICODE.encode(TESTFN_ENCODING) except (UnicodeError, TypeError): # Either the file system encoding is None, or the file name # cannot be encoded in the file system encoding. raise unittest.SkipTest("No Unicode filesystem semantics on this platform.") if TESTFN_ENCODED.decode(TESTFN_ENCODING) != TESTFN_UNICODE: # The file system encoding does not support Latin-1 # (which test_support assumes), so try the file system # encoding instead. import sys try: TESTFN_UNICODE = unicode("@test-\xe0\xf2", sys.getfilesystemencoding()) TESTFN_ENCODED = TESTFN_UNICODE.encode(TESTFN_ENCODING) if '?' in TESTFN_ENCODED: # MBCS will not report the error properly raise UnicodeError, "mbcs encoding problem" except (UnicodeError, TypeError): raise unittest.SkipTest("Cannot find a suiteable filename.") if TESTFN_ENCODED.decode(TESTFN_ENCODING) != TESTFN_UNICODE: raise unittest.SkipTest("Cannot find a suitable filename.") def remove_if_exists(filename): if os.path.exists(filename): os.unlink(filename) class TestUnicodeFiles(unittest.TestCase): # The 'do_' functions are the actual tests. They generally assume the # file already exists etc. # Do all the tests we can given only a single filename. The file should # exist. def _do_single(self, filename): self.assertTrue(os.path.exists(filename)) self.assertTrue(os.path.isfile(filename)) self.assertTrue(os.access(filename, os.R_OK)) self.assertTrue(os.path.exists(os.path.abspath(filename))) self.assertTrue(os.path.isfile(os.path.abspath(filename))) self.assertTrue(os.access(os.path.abspath(filename), os.R_OK)) os.chmod(filename, 0777) os.utime(filename, None) os.utime(filename, (time.time(), time.time())) # Copy/rename etc tests using the same filename self._do_copyish(filename, filename) # Filename should appear in glob output self.assertTrue( os.path.abspath(filename)==os.path.abspath(glob.glob(filename)[0])) # basename should appear in listdir. path, base = os.path.split(os.path.abspath(filename)) if isinstance(base, str): base = base.decode(TESTFN_ENCODING) file_list = os.listdir(path) # listdir() with a unicode arg may or may not return Unicode # objects, depending on the platform. if file_list and isinstance(file_list[0], str): file_list = [f.decode(TESTFN_ENCODING) for f in file_list] # Normalize the unicode strings, as round-tripping the name via the OS # may return a different (but equivalent) value. base = unicodedata.normalize("NFD", base) file_list = [unicodedata.normalize("NFD", f) for f in file_list] self.assertIn(base, file_list) # Do as many "equivalancy' tests as we can - ie, check that although we # have different types for the filename, they refer to the same file. def _do_equivalent(self, filename1, filename2): # Note we only check "filename1 against filename2" - we don't bother # checking "filename2 against 1", as we assume we are called again with # the args reversed. self.assertTrue(type(filename1)!=type(filename2), "No point checking equivalent filenames of the same type") # stat and lstat should return the same results. self.assertEqual(os.stat(filename1), os.stat(filename2)) self.assertEqual(os.lstat(filename1), os.lstat(filename2)) # Copy/rename etc tests using equivalent filename self._do_copyish(filename1, filename2) # Tests that copy, move, etc one file to another. def _do_copyish(self, filename1, filename2): # Should be able to rename the file using either name. self.assertTrue(os.path.isfile(filename1)) # must exist. os.rename(filename1, filename2 + ".new") self.assertTrue(os.path.isfile(filename1+".new")) os.rename(filename1 + ".new", filename2) self.assertTrue(os.path.isfile(filename2)) shutil.copy(filename1, filename2 + ".new") os.unlink(filename1 + ".new") # remove using equiv name. # And a couple of moves, one using each name. shutil.move(filename1, filename2 + ".new") self.assertTrue(not os.path.exists(filename2)) shutil.move(filename1 + ".new", filename2) self.assertTrue(os.path.exists(filename1)) # Note - due to the implementation of shutil.move, # it tries a rename first. This only fails on Windows when on # different file systems - and this test can't ensure that. # So we test the shutil.copy2 function, which is the thing most # likely to fail. shutil.copy2(filename1, filename2 + ".new") os.unlink(filename1 + ".new") def _do_directory(self, make_name, chdir_name, encoded): cwd = os.getcwd() if os.path.isdir(make_name): os.rmdir(make_name) os.mkdir(make_name) try: os.chdir(chdir_name) try: if not encoded: cwd_result = os.getcwdu() name_result = make_name else: cwd_result = os.getcwd().decode(TESTFN_ENCODING) name_result = make_name.decode(TESTFN_ENCODING) cwd_result = unicodedata.normalize("NFD", cwd_result) name_result = unicodedata.normalize("NFD", name_result) self.assertEqual(os.path.basename(cwd_result),name_result) finally: os.chdir(cwd) finally: os.rmdir(make_name) # The '_test' functions 'entry points with params' - ie, what the # top-level 'test' functions would be if they could take params def _test_single(self, filename): remove_if_exists(filename) f = file(filename, "w") f.close() try: self._do_single(filename) finally: os.unlink(filename) self.assertTrue(not os.path.exists(filename)) # and again with os.open. f = os.open(filename, os.O_CREAT) os.close(f) try: self._do_single(filename) finally: os.unlink(filename) def _test_equivalent(self, filename1, filename2): remove_if_exists(filename1) self.assertTrue(not os.path.exists(filename2)) f = file(filename1, "w") f.close() try: self._do_equivalent(filename1, filename2) finally: os.unlink(filename1) # The 'test' functions are unittest entry points, and simply call our # _test functions with each of the filename combinations we wish to test def test_single_files(self): self._test_single(TESTFN_ENCODED) self._test_single(TESTFN_UNICODE) if TESTFN_UNENCODABLE is not None: self._test_single(TESTFN_UNENCODABLE) def test_equivalent_files(self): self._test_equivalent(TESTFN_ENCODED, TESTFN_UNICODE) self._test_equivalent(TESTFN_UNICODE, TESTFN_ENCODED) def test_directories(self): # For all 'equivalent' combinations: # Make dir with encoded, chdir with unicode, checkdir with encoded # (or unicode/encoded/unicode, etc ext = ".dir" self._do_directory(TESTFN_ENCODED+ext, TESTFN_ENCODED+ext, True) self._do_directory(TESTFN_ENCODED+ext, TESTFN_UNICODE+ext, True) self._do_directory(TESTFN_UNICODE+ext, TESTFN_ENCODED+ext, False) self._do_directory(TESTFN_UNICODE+ext, TESTFN_UNICODE+ext, False) # Our directory name that can't use a non-unicode name. if TESTFN_UNENCODABLE is not None: self._do_directory(TESTFN_UNENCODABLE+ext, TESTFN_UNENCODABLE+ext, False) def test_main(): run_unittest(__name__) if __name__ == "__main__": test_main()
mit
AndroidOpenDevelopment/android_external_chromium_org
chrome/browser/metrics/variations/generate_resources_map.py
9
5382
#!/usr/bin/python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections import hashlib import operator import os import re import sys RESOURCE_EXTRACT_REGEX = re.compile('^#define (\S*) (\d*)$', re.MULTILINE) class Error(Exception): """Base error class for all exceptions in generated_resources_map.""" class HashCollisionError(Error): """Multiple resource names hash to the same value.""" Resource = collections.namedtuple("Resource", ['hash', 'name', 'index']) def _HashName(name): """Returns the hash id for a name. Args: name: The name to hash. Returns: An int that is at most 32 bits. """ md5hash = hashlib.md5() md5hash.update(name) return int(md5hash.hexdigest()[:8], 16) def _GetNameIndexPairsIter(string_to_scan): """Gets an iterator of the resource name and index pairs of the given string. Scans the input string for lines of the form "#define NAME INDEX" and returns an iterator over all matching (NAME, INDEX) pairs. Args: string_to_scan: The input string to scan. Yields: A tuple of name and index. """ for match in RESOURCE_EXTRACT_REGEX.finditer(string_to_scan): yield match.group(1, 2) def _GetResourceListFromString(resources_content): """Produces a list of |Resource| objects from a string. The input string conaints lines of the form "#define NAME INDEX". The returned list is sorted primarily by hash, then name, and then index. Args: resources_content: The input string to process, contains lines of the form "#define NAME INDEX". Returns: A sorted list of |Resource| objects. """ resources = [Resource(_HashName(name), name, index) for name, index in _GetNameIndexPairsIter(resources_content)] # The default |Resource| order makes |resources| sorted by the hash, then # name, then index. resources.sort() return resources def _CheckForHashCollisions(sorted_resource_list): """Checks a sorted list of |Resource| objects for hash collisions. Args: sorted_resource_list: A sorted list of |Resource| objects. Returns: A set of all |Resource| objects with collisions. """ collisions = set() for i in xrange(len(sorted_resource_list) - 1): resource = sorted_resource_list[i] next_resource = sorted_resource_list[i+1] if resource.hash == next_resource.hash: collisions.add(resource) collisions.add(next_resource) return collisions def _GenDataArray( resources, entry_pattern, array_name, array_type, data_getter): """Generates a C++ statement defining a literal array containing the hashes. Args: resources: A sorted list of |Resource| objects. entry_pattern: A pattern to be used to generate each entry in the array. The pattern is expected to have a place for data and one for a comment, in that order. array_name: The name of the array being generated. array_type: The type of the array being generated. data_getter: A function that gets the array data from a |Resource| object. Returns: A string containing a C++ statement defining the an array. """ lines = [entry_pattern % (data_getter(r), r.name) for r in resources] pattern = """const %(type)s %(name)s[] = { %(content)s }; """ return pattern % {'type': array_type, 'name': array_name, 'content': '\n'.join(lines)} def _GenerateFileContent(resources_content): """Generates the .cc content from the given generated_resources.h content. Args: resources_content: The input string to process, contains lines of the form "#define NAME INDEX". Returns: .cc file content defining the kResourceHashes and kResourceIndices arrays. """ hashed_tuples = _GetResourceListFromString(resources_content) collisions = _CheckForHashCollisions(hashed_tuples) if collisions: error_message = "\n".join( ["hash: %i, name: %s" % (i[0], i[1]) for i in sorted(collisions)]) error_message = ("\nThe following names had hash collisions " "(sorted by the hash value):\n%s\n" %(error_message)) raise HashCollisionError(error_message) hashes_array = _GenDataArray( hashed_tuples, " %iU, // %s", 'kResourceHashes', 'uint32_t', operator.attrgetter('hash')) indices_array = _GenDataArray( hashed_tuples, " %s, // %s", 'kResourceIndices', 'int', operator.attrgetter('index')) return ( "// This file was generated by generate_resources_map.py. Do not edit.\n" "\n\n" "#include " "\"chrome/browser/metrics/variations/generated_resources_map.h\"\n\n" "namespace chrome_variations {\n\n" "%s" "\n" "%s" "\n" "} // namespace chrome_variations\n") % (hashes_array, indices_array) def main(resources_file, map_file): generated_resources_h = "" with open(resources_file, "r") as resources: generated_resources_h = resources.read() if len(generated_resources_h) == 0: raise Error("No content loaded for %s." % (resources_file)) file_content = _GenerateFileContent(generated_resources_h) with open(map_file, "w") as generated_file: generated_file.write(file_content) if __name__ == '__main__': sys.exit(main(sys.argv[1], sys.argv[2]))
bsd-3-clause
ialmetwally/android-resource-remover
setup.py
10
1065
import os import sys from setuptools import setup, find_packages version = '0.1.5' def read(f): return open(os.path.join(os.path.dirname(__file__), f)).read().strip() setup(name='android-resource-remover', version=version, description=('Android resource remover'), long_description='\n\n'.join((read('README.md'), read('CHANGELOG'))), keywords=['android'], classifiers=[ 'License :: OSI Approved :: BSD License', 'Intended Audience :: Developers', 'Programming Language :: Python'], author='Keepsafe', author_email='[email protected]', url='https://github.com/KeepSafe/android-resource-remover/', license='Apache', py_modules=['android_clean_app'], namespace_packages=[], install_requires=['lxml >= 3.3.3'], data_files=[('.', ['AUTHORS', 'CHANGELOG', 'LICENSE', 'README.md'])], entry_points={ 'console_scripts': [ 'android-resource-remover = android_clean_app:main'] }, include_package_data = False)
apache-2.0
denim2x/Vintageous
tests/commands/test__vi_dollar.py
6
4202
import unittest from collections import namedtuple from Vintageous.vi.utils import modes from Vintageous.tests import set_text from Vintageous.tests import add_sel from Vintageous.tests import get_sel from Vintageous.tests import first_sel from Vintageous.tests import ViewTest def get_text(test): return test.view.substr(test.R(0, test.view.size())) def first_sel_wrapper(test): return first_sel(test.view) test_data = namedtuple('test_data', 'cmd initial_text regions cmd_params expected actual_func msg') region_data = namedtuple('region_data', 'regions') TESTS_INTERNAL_NORMAL = ( # NORMAL mode test_data(cmd='_vi_dollar', initial_text='abc\nabc\n', regions=[[(0, 0), (0, 0)]], cmd_params={'mode': modes.NORMAL}, expected=region_data([(0, 2), (0, 2)]), actual_func=first_sel_wrapper, msg=''), test_data(cmd='_vi_dollar', initial_text=('abc\n' * 10), regions=[[(0, 0), (0, 0)]], cmd_params={'mode': modes.NORMAL, 'count': 5}, expected=region_data([18, 18]), actual_func=first_sel_wrapper, msg=''), test_data(cmd='_vi_dollar', initial_text=('abc\n\nabc\n'), regions=[[4, 4]], cmd_params={'mode': modes.NORMAL, 'count': 1}, expected=region_data([4, 4]), actual_func=first_sel_wrapper, msg='should not move on empty line'), # VISUAL mode test_data(cmd='_vi_dollar', initial_text='abc\nabc\n', regions=[[0, 1]], cmd_params={'mode': modes.VISUAL}, expected=region_data([0, 4]), actual_func=first_sel_wrapper, msg=''), test_data(cmd='_vi_dollar', initial_text=('abc\n' * 10), regions=[[0, 1]], cmd_params={'mode': modes.VISUAL, 'count': 5}, expected=region_data([0, 20]), actual_func=first_sel_wrapper, msg=''), test_data(cmd='_vi_dollar', initial_text=('abc\n\nabc\n'), regions=[[4, 5]], cmd_params={'mode': modes.VISUAL, 'count': 1}, expected=region_data([4, 5]), actual_func=first_sel_wrapper, msg=''), test_data(cmd='_vi_dollar', initial_text=('abc\nabc\n'), regions=[[6, 1]], cmd_params={'mode': modes.VISUAL, 'count': 1}, expected=region_data([6, 3]), actual_func=first_sel_wrapper, msg='can move in visual mode with reversed sel no cross over'), test_data(cmd='_vi_dollar', initial_text=('abc\nabc\n'), regions=[[3, 2]], cmd_params={'mode': modes.VISUAL, 'count': 1}, expected=region_data([2, 4]), actual_func=first_sel_wrapper, msg='can move in visual mode with reversed sel at eol'), test_data(cmd='_vi_dollar', initial_text=('abc\nabc\n'), regions=[[5, 4]], cmd_params={'mode': modes.VISUAL, 'count': 2}, expected=region_data([4, 9]), actual_func=first_sel_wrapper, msg='can move in visual mode with revesed sel cross over'), test_data(cmd='_vi_dollar', initial_text=('abc\nabc\nabc\n'), regions=[[0, 4]], cmd_params={'mode': modes.VISUAL_LINE, 'count': 1}, expected=region_data([0, 4]), actual_func=first_sel_wrapper, msg='can move in visual mode with revesed sel cross over'), test_data(cmd='_vi_dollar', initial_text='abc\nabc\n', regions=[[0, 0]], cmd_params={'mode': modes.INTERNAL_NORMAL}, expected=region_data([0, 4]), actual_func=first_sel_wrapper, msg=''), test_data(cmd='_vi_dollar', initial_text='abc\nabc\nabc\nabc\n', regions=[[0, 0]], cmd_params={'mode': modes.INTERNAL_NORMAL, 'count': 3}, expected=region_data([0, 12]), actual_func=first_sel_wrapper, msg=''), ) TESTS = TESTS_INTERNAL_NORMAL class Test_vi_dollar(ViewTest): def testAll(self): for (i, data) in enumerate(TESTS): # TODO: Perhaps we should ensure that other state is reset too? self.view.sel().clear() self.write(data.initial_text) for region in data.regions: self.add_sel(self.R(*region)) self.view.run_command(data.cmd, data.cmd_params) msg = "failed at test index {0} {1}".format(i, data.msg) actual = data.actual_func(self) if isinstance(data.expected, region_data): self.assertEqual(self.R(*data.expected.regions), actual, msg) else: self.assertEqual(data.expected, actual, msg)
mit
eadgarchen/tensorflow
tensorflow/contrib/training/python/training/training.py
33
20450
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Contains various routines and helper functions for training models. This script contains various functions for training models. These include manipulating gradients, creating a `train_op` (an operation that computes the loss and applies the gradients) and a training loop function. The training loop allows the user to pass in the `train_op` and runs the optimization according to user-specified arguments. ************************************ * A simple working training script * ************************************ # Load data and create the model: images, labels = LoadData(...) predictions = MyModel(images) # Define the loss: tf.contrib.losses.log_loss(predictions, labels) total_loss = tf.contrib.losses.get_total_loss() # Define the optimizer: optimizer = tf.train.MomentumOptimizer(FLAGS.learning_rate, FLAGS.momentum) # Create the train_op train_op = tf.contrib.training.create_train_op(total_loss, optimizer) # Run training. tf.contrib.training.train(train_op, my_log_dir) ************************* * Creating the train_op * ************************* In order to use the `train` function, one needs a train_op: an `Operation` that (a) computes the loss, (b) applies the gradients to update the weights and (c) returns the value of the loss. tf.contrib.training.create_train_op creates such an `Operation`. This function also provides the ability to manipulate the gradients using a few arguments: # Create the train_op and clip the gradient norms: train_op = tf.contrib.training.create_train_op( total_loss, optimizer, transform_grads_fn=clip_gradient_norms_fn(3)) # Create the train_op and scale the gradients by providing a map from variable # name (or variable) to a scaling coefficient: def transform_grads_fn(grads): gradient_multipliers = { 'conv0/weights': 1.2, 'fc8/weights': 3.4, } return tf.contrib.training.multiply_gradients( grads, gradient_multipliers) train_op = tf.contrib.training.create_train_op( total_loss, optimizer, transform_grads_fn=transform_grads_fn) **************************************************************** * Performing additional (non-gradient) updates during training * **************************************************************** Many networks utilize modules, like BatchNorm, that require performing a series of non-gradient updates during training. tf.contrib.training.create_train_op allows a user to pass in a list of update_ops to call along with the gradient updates. train_op = tf.contrib.training.create_train_op( total_loss, optimizer, update_ops) By default, tf.contrib.training.create_train_op includes all update ops that are part of the `tf.GraphKeys.UPDATE_OPS` collection. Additionally, the tf.contrib.layers.batch_norm function adds the moving mean and moving variance updates to this collection. Consequently, users who want to use tf.contrib.layers.batch_norm will not need to take any additional steps in order to have the moving mean and moving variance updates be computed. However, users with additional, specialized updates can either override the default update ops or simply add additional update ops to the `tf.GraphKeys.UPDATE_OPS` collection: # Force `create_train_op` to NOT use ANY update_ops: train_op = tf.contrib.training.create_train_op( total_loss, optimizer, update_ops=[]) # Use an alternative set of update ops: train_op = tf.contrib.training.create_train_op( total_loss, optimizer, update_ops=my_other_update_ops) # Use a set of update ops in addition to the default updates: tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, my_update0) tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, my_update1) train_op = tf.contrib.training.create_train_op( total_loss, optimizer) # Which is the same as: train_op = tf.contrib.training.create_train_op( total_loss, optimizer, update_ops=tf.get_collection(tf.GraphKeys.UPDATE_OPS)) ****************************************** * Initializing a model from a checkpoint * ****************************************** It is common to want to 'warm-start' a model from a pre-trained checkpoint. One can use a tf.Scaffold and an initializing function to do so. ... # Create the train_op train_op = tf.contrib.training.create_train_op(total_loss, optimizer) # Create the initial assignment op checkpoint_path = '/path/to/old_model_checkpoint' variables_to_restore = tf.contrib.framework.get_model_variables() init_fn = tf.contrib.framework.assign_from_checkpoint_fn( checkpoint_path, variables_to_restore) # Run training. scaffold = tf.Scaffold(init_fn=init_fn) tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold) *************************************************************************** * Initializing a model from a checkpoint whose variable names don't match * *************************************************************************** At times, a user may want to initialize a new model with values from a checkpoint whose variable names do not match those of the current model. In this case, one needs to create a mapping from the checkpoint variable names to the current model variables. This requires only a small modification of the code above: ... # Creates a model with two variables, var0 and var1 predictions = MyModel(images) ... # Create the train_op train_op = tf.contrib.training.create_train_op(total_loss, optimizer) checkpoint_path = '/path/to/old_model_checkpoint' # Create the mapping: variables_to_restore = { 'name_var_0_in_checkpoint': tf.contrib.framework.get_unique_variable('var0'), 'name_var_1_in_checkpoint': tf.contrib.framework.get_unique_variable('var1') } init_fn = tf.contrib.framework.assign_from_checkpoint_fn( checkpoint_path, variables_to_restore) scaffold = tf.Scaffold(init_fn=init_fn) # Run training. tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold) ************************************************* * Fine-Tuning Part of a model from a checkpoint * ************************************************* Rather than initializing all of the weights of a given model, we sometimes only want to restore some of the weights from a checkpoint. To do this, one need only filter those variables to initialize as follows: ... # Create the train_op train_op = tf.contrib.training.create_train_op(total_loss, optimizer) checkpoint_path = '/path/to/old_model_checkpoint' # Specify the variables to restore via a list of inclusion or exclusion # patterns: variables_to_restore = tf.contrib.framework.get_variables_to_restore( include=["conv"], exclude=["fc8", "fc9]) # or variables_to_restore = tf.contrib.framework.get_variables_to_restore( exclude=["conv"]) init_fn = tf.contrib.framework.assign_from_checkpoint_fn( checkpoint_path, variables_to_restore) scaffold = tf.Scaffold(init_fn=init_fn) # Run training. tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold) ****************************************************** * Initializing model variables from values in memory * ****************************************************** One may want to initialize the weights of a model from values coming from an arbitrary source (a text document, matlab file, etc). While this is technically feasible using assign operations, this strategy results in the values of your weights being stored in the graph. For large models, this becomes prohibitively large. However, it's possible to perform this initial assignment without having to store the values of the initial model in the graph itself by using placeholders and a feed dictionary: ... # Create the train_op train_op = tf.contrib.training.create_train_op(total_loss, optimizer) # Create the mapping from variable names to values: var0_initial_value = ReadFromDisk(...) var1_initial_value = ReadFromDisk(...) var_names_to_values = { 'var0': var0_initial_value, 'var1': var1_initial_value, } init_fn = tf.contrib.framework.assign_from_values_fn(var_names_to_values) scaffold = tf.Scaffold(init_fn=init_fn) # Run training. tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import clip_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import variables as tf_variables from tensorflow.python.platform import tf_logging as logging from tensorflow.python.summary import summary from tensorflow.python.training import monitored_session from tensorflow.python.training import optimizer as tf_optimizer from tensorflow.python.training import training_util # TODO(nsilberman): move add_gradients_summaries, clip_gradient_norms and # multiply_gradients into contrib/summaries and contrib/optimizers.py __all__ = [ 'add_gradients_summaries', 'clip_gradient_norms', 'clip_gradient_norms_fn', 'create_train_op', 'multiply_gradients', 'train', ] def add_gradients_summaries(grads_and_vars): """Add summaries to gradients. Args: grads_and_vars: A list of gradient to variable pairs (tuples). Returns: The list of created summaries. """ summaries = [] for grad, var in grads_and_vars: if grad is not None: if isinstance(grad, ops.IndexedSlices): grad_values = grad.values else: grad_values = grad summaries.append( summary.histogram(var.op.name + '_gradient', grad_values)) summaries.append( summary.scalar(var.op.name + '_gradient_norm', clip_ops.global_norm([grad_values]))) else: logging.info('Var %s has no gradient', var.op.name) return summaries def clip_gradient_norms(gradients_to_variables, max_norm): """Clips the gradients by the given value. Args: gradients_to_variables: A list of gradient to variable pairs (tuples). max_norm: the maximum norm value. Returns: A list of clipped gradient to variable pairs. """ clipped_grads_and_vars = [] for grad, var in gradients_to_variables: if grad is not None: if isinstance(grad, ops.IndexedSlices): tmp = clip_ops.clip_by_norm(grad.values, max_norm) grad = ops.IndexedSlices(tmp, grad.indices, grad.dense_shape) else: grad = clip_ops.clip_by_norm(grad, max_norm) clipped_grads_and_vars.append((grad, var)) return clipped_grads_and_vars def clip_gradient_norms_fn(max_norm): """Returns a `transform_grads_fn` function for gradient clipping.""" def clip_norms(gradients_to_variables): return clip_gradient_norms(gradients_to_variables, max_norm) return clip_norms def multiply_gradients(grads_and_vars, gradient_multipliers): """Multiply specified gradients. Args: grads_and_vars: A list of gradient to variable pairs (tuples). gradient_multipliers: A map from either `Variables` or `Variable` op names to the coefficient by which the associated gradient should be scaled. Returns: The updated list of gradient to variable pairs. Raises: ValueError: If `grads_and_vars` is not a list or if `gradient_multipliers` is empty or None or if `gradient_multipliers` is not a dictionary. """ if not isinstance(grads_and_vars, list): raise ValueError('`grads_and_vars` must be a list.') if not gradient_multipliers: raise ValueError('`gradient_multipliers` is empty.') if not isinstance(gradient_multipliers, dict): raise ValueError('`gradient_multipliers` must be a dict.') multiplied_grads_and_vars = [] for grad, var in grads_and_vars: if var in gradient_multipliers or var.op.name in gradient_multipliers: key = var if var in gradient_multipliers else var.op.name if grad is None: raise ValueError('Requested multiple of `None` gradient.') if isinstance(grad, ops.IndexedSlices): tmp = grad.values * constant_op.constant( gradient_multipliers[key], dtype=grad.dtype) grad = ops.IndexedSlices(tmp, grad.indices, grad.dense_shape) else: grad *= constant_op.constant( gradient_multipliers[key], dtype=grad.dtype) multiplied_grads_and_vars.append((grad, var)) return multiplied_grads_and_vars _USE_GLOBAL_STEP = 0 def create_train_op(total_loss, optimizer, global_step=_USE_GLOBAL_STEP, update_ops=None, variables_to_train=None, transform_grads_fn=None, summarize_gradients=False, gate_gradients=tf_optimizer.Optimizer.GATE_OP, aggregation_method=None, colocate_gradients_with_ops=False, check_numerics=True): """Creates an `Operation` that evaluates the gradients and returns the loss. Args: total_loss: A `Tensor` representing the total loss. optimizer: A tf.Optimizer to use for computing the gradients. global_step: A `Tensor` representing the global step variable. If left as `_USE_GLOBAL_STEP`, then tf.contrib.framework.global_step() is used. update_ops: An optional list of updates to execute. If `update_ops` is `None`, then the update ops are set to the contents of the `tf.GraphKeys.UPDATE_OPS` collection. If `update_ops` is not `None`, but it doesn't contain all of the update ops in `tf.GraphKeys.UPDATE_OPS`, a warning will be displayed. variables_to_train: an optional list of variables to train. If None, it will default to all tf.trainable_variables(). transform_grads_fn: A function which takes a single argument, a list of gradient to variable pairs (tuples), performs any requested gradient updates, such as gradient clipping or multipliers, and returns the updated list. summarize_gradients: Whether or not add summaries for each gradient. gate_gradients: How to gate the computation of gradients. See tf.Optimizer. aggregation_method: Specifies the method used to combine gradient terms. Valid values are defined in the class `AggregationMethod`. colocate_gradients_with_ops: Whether or not to try colocating the gradients with the ops that generated them. check_numerics: Whether or not we apply check_numerics. Returns: A `Tensor` that when evaluated, computes the gradients and returns the total loss value. """ if global_step is _USE_GLOBAL_STEP: global_step = training_util.get_or_create_global_step() # Update ops use GraphKeys.UPDATE_OPS collection if update_ops is None. global_update_ops = set(ops.get_collection(ops.GraphKeys.UPDATE_OPS)) if update_ops is None: update_ops = global_update_ops else: update_ops = set(update_ops) if not global_update_ops.issubset(update_ops): logging.warning('update_ops in create_train_op does not contain all the ' ' update_ops in GraphKeys.UPDATE_OPS') # Make sure update_ops are computed before total_loss. if update_ops: with ops.control_dependencies(update_ops): barrier = control_flow_ops.no_op(name='update_barrier') total_loss = control_flow_ops.with_dependencies([barrier], total_loss) if variables_to_train is None: # Default to tf.trainable_variables() variables_to_train = tf_variables.trainable_variables() else: # Make sure that variables_to_train are in tf.trainable_variables() for v in variables_to_train: assert v in tf_variables.trainable_variables() assert variables_to_train # Create the gradients. Note that apply_gradients adds the gradient # computation to the current graph. grads = optimizer.compute_gradients( total_loss, variables_to_train, gate_gradients=gate_gradients, aggregation_method=aggregation_method, colocate_gradients_with_ops=colocate_gradients_with_ops) if transform_grads_fn: grads = transform_grads_fn(grads) # Summarize gradients. if summarize_gradients: with ops.name_scope('summarize_grads'): add_gradients_summaries(grads) # Create gradient updates. grad_updates = optimizer.apply_gradients(grads, global_step=global_step) with ops.name_scope('train_op'): # Make sure total_loss is valid. if check_numerics: total_loss = array_ops.check_numerics(total_loss, 'LossTensor is inf or nan') # Ensure the train_tensor computes grad_updates. train_op = control_flow_ops.with_dependencies([grad_updates], total_loss) # Add the operation used for training to the 'train_op' collection train_ops = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP) if train_op not in train_ops: train_ops.append(train_op) return train_op def train(train_op, logdir, master='', is_chief=True, scaffold=None, hooks=None, chief_only_hooks=None, save_checkpoint_secs=600, save_summaries_steps=100, config=None, max_wait_secs=7200): """Runs the training loop. Args: train_op: A `Tensor` that, when executed, will apply the gradients and return the loss value. logdir: The directory where the graph and checkpoints are saved. master: The URL of the master. is_chief: Specifies whether or not the training is being run by the primary replica during replica training. scaffold: An tf.train.Scaffold instance. hooks: List of `tf.train.SessionRunHook` callbacks which are run inside the training loop. chief_only_hooks: List of `tf.train.SessionRunHook` instances which are run inside the training loop for the chief trainer only. save_checkpoint_secs: The frequency, in seconds, that a checkpoint is saved using a default checkpoint saver. If `save_checkpoint_secs` is set to `None`, then the default checkpoint saver isn't used. save_summaries_steps: The frequency, in number of global steps, that the summaries are written to disk using a default summary saver. If `save_summaries_steps` is set to `None`, then the default summary saver isn't used. config: An instance of `tf.ConfigProto`. max_wait_secs: Maximum time workers should wait for the session to become available. This should be kept relatively short to help detect incorrect code, but sometimes may need to be increased if the chief takes a while to start up. Returns: the value of the loss function after training. Raises: ValueError: if `logdir` is `None` and either `save_checkpoint_secs` or `save_summaries_steps` are `None. """ if logdir is None and is_chief: if save_summaries_steps: raise ValueError( 'logdir cannot be None when save_summaries_steps is not None') if save_checkpoint_secs: raise ValueError( 'logdir cannot be None when save_checkpoint_secs is not None') with monitored_session.MonitoredTrainingSession( master=master, is_chief=is_chief, checkpoint_dir=logdir, scaffold=scaffold, hooks=hooks, chief_only_hooks=chief_only_hooks, save_checkpoint_secs=save_checkpoint_secs, save_summaries_steps=save_summaries_steps, config=config, max_wait_secs=max_wait_secs) as session: loss = None while not session.should_stop(): loss = session.run(train_op) return loss
apache-2.0
piratos/ctfbulletin
blog/views.py
1
1846
from django.shortcuts import render, Http404 from blog.models import * from django.contrib.auth.decorators import login_required def user_or_challenger(request): user_id = request.user.id chexist = False uexist = False try: user = User.objects.get(id=user_id) uexist = True try: challenger = Challenger.objects.get(user=user) chexist = True user = challenger except Challenger.DoesNotExist: pass except User.DoesNotExist: user = None return user, chexist, uexist def index(request): articles = Article.objects.all() return render(request, 'blog/index.html', {'articles': articles}) def read(request, blog_url): blog_url_name = blog_url.replace('_', ' ') try: blog = Article.objects.get(title=blog_url_name) except Article.DoesNotExist: raise Http404 try: messages = BlogComment.objects.filter(article=blog).order_by('date_comment') except BlogComment.DoesNotExist: pass messages = None return render(request, 'blog/read.html', {'blog': blog, 'messages': messages, }) @login_required() def add_message(request): user, chexist, uexist = user_or_challenger(request) if 'content' and 'article' in request.POST and chexist: comment = request.POST['content'] article_name = request.POST['article'] try: article = Article.objects.get(title=article_name) except Article.DoesNotExist: raise Http404 comment_blog = BlogComment.objects.get_or_create(comment=comment, commenter=user, article=article) return read(request, article.title.replace(' ', '_')) else: return index(request)
mit
gnuhub/intellij-community
python/helpers/docutils/parsers/rst/directives/tables.py
49
19736
# $Id: tables.py 6107 2009-08-31 02:29:08Z goodger $ # Authors: David Goodger <[email protected]>; David Priest # Copyright: This module has been placed in the public domain. """ Directives for table elements. """ __docformat__ = 'reStructuredText' import sys import os.path import csv from docutils import io, nodes, statemachine, utils from docutils.utils import SystemMessagePropagation from docutils.parsers.rst import Directive from docutils.parsers.rst import directives class Table(Directive): """ Generic table base class. """ required_arguments = 0 optional_arguments = 1 final_argument_whitespace = True option_spec = {'class': directives.class_option} has_content = True def make_title(self): if self.arguments: title_text = self.arguments[0] text_nodes, messages = self.state.inline_text(title_text, self.lineno) title = nodes.title(title_text, '', *text_nodes) else: title = None messages = [] return title, messages def process_header_option(self): source = self.state_machine.get_source(self.lineno - 1) table_head = [] max_header_cols = 0 if 'header' in self.options: # separate table header in option rows, max_header_cols = self.parse_csv_data_into_rows( self.options['header'].split('\n'), self.HeaderDialect(), source) table_head.extend(rows) return table_head, max_header_cols def check_table_dimensions(self, rows, header_rows, stub_columns): if len(rows) < header_rows: error = self.state_machine.reporter.error( '%s header row(s) specified but only %s row(s) of data ' 'supplied ("%s" directive).' % (header_rows, len(rows), self.name), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) if len(rows) == header_rows > 0: error = self.state_machine.reporter.error( 'Insufficient data supplied (%s row(s)); no data remaining ' 'for table body, required by "%s" directive.' % (len(rows), self.name), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) for row in rows: if len(row) < stub_columns: error = self.state_machine.reporter.error( '%s stub column(s) specified but only %s columns(s) of ' 'data supplied ("%s" directive).' % (stub_columns, len(row), self.name), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) if len(row) == stub_columns > 0: error = self.state_machine.reporter.error( 'Insufficient data supplied (%s columns(s)); no data remaining ' 'for table body, required by "%s" directive.' % (len(row), self.name), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) def get_column_widths(self, max_cols): if 'widths' in self.options: col_widths = self.options['widths'] if len(col_widths) != max_cols: error = self.state_machine.reporter.error( '"%s" widths do not match the number of columns in table ' '(%s).' % (self.name, max_cols), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) elif max_cols: col_widths = [100 // max_cols] * max_cols else: error = self.state_machine.reporter.error( 'No table data detected in CSV file.', nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) return col_widths def extend_short_rows_with_empty_cells(self, columns, parts): for part in parts: for row in part: if len(row) < columns: row.extend([(0, 0, 0, [])] * (columns - len(row))) class RSTTable(Table): def run(self): if not self.content: warning = self.state_machine.reporter.warning( 'Content block expected for the "%s" directive; none found.' % self.name, nodes.literal_block( self.block_text, self.block_text), line=self.lineno) return [warning] title, messages = self.make_title() node = nodes.Element() # anonymous container for parsing self.state.nested_parse(self.content, self.content_offset, node) if len(node) != 1 or not isinstance(node[0], nodes.table): error = self.state_machine.reporter.error( 'Error parsing content block for the "%s" directive: exactly ' 'one table expected.' % self.name, nodes.literal_block( self.block_text, self.block_text), line=self.lineno) return [error] table_node = node[0] table_node['classes'] += self.options.get('class', []) if title: table_node.insert(0, title) return [table_node] + messages class CSVTable(Table): option_spec = {'header-rows': directives.nonnegative_int, 'stub-columns': directives.nonnegative_int, 'header': directives.unchanged, 'widths': directives.positive_int_list, 'file': directives.path, 'url': directives.uri, 'encoding': directives.encoding, 'class': directives.class_option, # field delimiter char 'delim': directives.single_char_or_whitespace_or_unicode, # treat whitespace after delimiter as significant 'keepspace': directives.flag, # text field quote/unquote char: 'quote': directives.single_char_or_unicode, # char used to escape delim & quote as-needed: 'escape': directives.single_char_or_unicode,} class DocutilsDialect(csv.Dialect): """CSV dialect for `csv_table` directive.""" delimiter = ',' quotechar = '"' doublequote = True skipinitialspace = True lineterminator = '\n' quoting = csv.QUOTE_MINIMAL def __init__(self, options): if 'delim' in options: self.delimiter = str(options['delim']) if 'keepspace' in options: self.skipinitialspace = False if 'quote' in options: self.quotechar = str(options['quote']) if 'escape' in options: self.doublequote = False self.escapechar = str(options['escape']) csv.Dialect.__init__(self) class HeaderDialect(csv.Dialect): """CSV dialect to use for the "header" option data.""" delimiter = ',' quotechar = '"' escapechar = '\\' doublequote = False skipinitialspace = True lineterminator = '\n' quoting = csv.QUOTE_MINIMAL def check_requirements(self): pass def run(self): try: if (not self.state.document.settings.file_insertion_enabled and ('file' in self.options or 'url' in self.options)): warning = self.state_machine.reporter.warning( 'File and URL access deactivated; ignoring "%s" ' 'directive.' % self.name, nodes.literal_block( self.block_text, self.block_text), line=self.lineno) return [warning] self.check_requirements() title, messages = self.make_title() csv_data, source = self.get_csv_data() table_head, max_header_cols = self.process_header_option() rows, max_cols = self.parse_csv_data_into_rows( csv_data, self.DocutilsDialect(self.options), source) max_cols = max(max_cols, max_header_cols) header_rows = self.options.get('header-rows', 0) stub_columns = self.options.get('stub-columns', 0) self.check_table_dimensions(rows, header_rows, stub_columns) table_head.extend(rows[:header_rows]) table_body = rows[header_rows:] col_widths = self.get_column_widths(max_cols) self.extend_short_rows_with_empty_cells(max_cols, (table_head, table_body)) except SystemMessagePropagation, detail: return [detail.args[0]] except csv.Error, detail: error = self.state_machine.reporter.error( 'Error with CSV data in "%s" directive:\n%s' % (self.name, detail), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) return [error] table = (col_widths, table_head, table_body) table_node = self.state.build_table(table, self.content_offset, stub_columns) table_node['classes'] += self.options.get('class', []) if title: table_node.insert(0, title) return [table_node] + messages def get_csv_data(self): """ Get CSV data from the directive content, from an external file, or from a URL reference. """ encoding = self.options.get( 'encoding', self.state.document.settings.input_encoding) if self.content: # CSV data is from directive content. if 'file' in self.options or 'url' in self.options: error = self.state_machine.reporter.error( '"%s" directive may not both specify an external file and' ' have content.' % self.name, nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) source = self.content.source(0) csv_data = self.content elif 'file' in self.options: # CSV data is from an external file. if 'url' in self.options: error = self.state_machine.reporter.error( 'The "file" and "url" options may not be simultaneously' ' specified for the "%s" directive.' % self.name, nodes.literal_block(self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) source_dir = os.path.dirname( os.path.abspath(self.state.document.current_source)) source = os.path.normpath(os.path.join(source_dir, self.options['file'])) source = utils.relative_path(None, source) try: self.state.document.settings.record_dependencies.add(source) csv_file = io.FileInput( source_path=source, encoding=encoding, error_handler=(self.state.document.settings.\ input_encoding_error_handler), handle_io_errors=None) csv_data = csv_file.read().splitlines() except IOError, error: severe = self.state_machine.reporter.severe( 'Problems with "%s" directive path:\n%s.' % (self.name, error), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(severe) elif 'url' in self.options: # CSV data is from a URL. # Do not import urllib2 at the top of the module because # it may fail due to broken SSL dependencies, and it takes # about 0.15 seconds to load. import urllib2 source = self.options['url'] try: csv_text = urllib2.urlopen(source).read() except (urllib2.URLError, IOError, OSError, ValueError), error: severe = self.state_machine.reporter.severe( 'Problems with "%s" directive URL "%s":\n%s.' % (self.name, self.options['url'], error), nodes.literal_block(self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(severe) csv_file = io.StringInput( source=csv_text, source_path=source, encoding=encoding, error_handler=(self.state.document.settings.\ input_encoding_error_handler)) csv_data = csv_file.read().splitlines() else: error = self.state_machine.reporter.warning( 'The "%s" directive requires content; none supplied.' % self.name, nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) return csv_data, source if sys.version_info < (3,): # 2.x csv module doesn't do Unicode def decode_from_csv(s): return s.decode('utf-8') def encode_for_csv(s): return s.encode('utf-8') else: def decode_from_csv(s): return s def encode_for_csv(s): return s decode_from_csv = staticmethod(decode_from_csv) encode_for_csv = staticmethod(encode_for_csv) def parse_csv_data_into_rows(self, csv_data, dialect, source): # csv.py doesn't do Unicode; encode temporarily as UTF-8 csv_reader = csv.reader([self.encode_for_csv(line + '\n') for line in csv_data], dialect=dialect) rows = [] max_cols = 0 for row in csv_reader: row_data = [] for cell in row: # decode UTF-8 back to Unicode cell_text = self.decode_from_csv(cell) cell_data = (0, 0, 0, statemachine.StringList( cell_text.splitlines(), source=source)) row_data.append(cell_data) rows.append(row_data) max_cols = max(max_cols, len(row)) return rows, max_cols class ListTable(Table): """ Implement tables whose data is encoded as a uniform two-level bullet list. For further ideas, see http://docutils.sf.net/docs/dev/rst/alternatives.html#list-driven-tables """ option_spec = {'header-rows': directives.nonnegative_int, 'stub-columns': directives.nonnegative_int, 'widths': directives.positive_int_list, 'class': directives.class_option} def run(self): if not self.content: error = self.state_machine.reporter.error( 'The "%s" directive is empty; content required.' % self.name, nodes.literal_block(self.block_text, self.block_text), line=self.lineno) return [error] title, messages = self.make_title() node = nodes.Element() # anonymous container for parsing self.state.nested_parse(self.content, self.content_offset, node) try: num_cols, col_widths = self.check_list_content(node) table_data = [[item.children for item in row_list[0]] for row_list in node[0]] header_rows = self.options.get('header-rows', 0) stub_columns = self.options.get('stub-columns', 0) self.check_table_dimensions(table_data, header_rows, stub_columns) except SystemMessagePropagation, detail: return [detail.args[0]] table_node = self.build_table_from_list(table_data, col_widths, header_rows, stub_columns) table_node['classes'] += self.options.get('class', []) if title: table_node.insert(0, title) return [table_node] + messages def check_list_content(self, node): if len(node) != 1 or not isinstance(node[0], nodes.bullet_list): error = self.state_machine.reporter.error( 'Error parsing content block for the "%s" directive: ' 'exactly one bullet list expected.' % self.name, nodes.literal_block(self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) list_node = node[0] # Check for a uniform two-level bullet list: for item_index in range(len(list_node)): item = list_node[item_index] if len(item) != 1 or not isinstance(item[0], nodes.bullet_list): error = self.state_machine.reporter.error( 'Error parsing content block for the "%s" directive: ' 'two-level bullet list expected, but row %s does not ' 'contain a second-level bullet list.' % (self.name, item_index + 1), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) elif item_index: # ATTN pychecker users: num_cols is guaranteed to be set in the # "else" clause below for item_index==0, before this branch is # triggered. if len(item[0]) != num_cols: error = self.state_machine.reporter.error( 'Error parsing content block for the "%s" directive: ' 'uniform two-level bullet list expected, but row %s ' 'does not contain the same number of items as row 1 ' '(%s vs %s).' % (self.name, item_index + 1, len(item[0]), num_cols), nodes.literal_block(self.block_text, self.block_text), line=self.lineno) raise SystemMessagePropagation(error) else: num_cols = len(item[0]) col_widths = self.get_column_widths(num_cols) return num_cols, col_widths def build_table_from_list(self, table_data, col_widths, header_rows, stub_columns): table = nodes.table() tgroup = nodes.tgroup(cols=len(col_widths)) table += tgroup for col_width in col_widths: colspec = nodes.colspec(colwidth=col_width) if stub_columns: colspec.attributes['stub'] = 1 stub_columns -= 1 tgroup += colspec rows = [] for row in table_data: row_node = nodes.row() for cell in row: entry = nodes.entry() entry += cell row_node += entry rows.append(row_node) if header_rows: thead = nodes.thead() thead.extend(rows[:header_rows]) tgroup += thead tbody = nodes.tbody() tbody.extend(rows[header_rows:]) tgroup += tbody return table
apache-2.0
rdkls/gps-tracker-server
rest_api/rest_api.py
1
6919
#!/usr/bin/env python import json import config import sys import re from flask_cors import CORS from gevent.pywsgi import WSGIServer from flask import Flask, request from werkzeug.exceptions import NotFound, Unauthorized, BadRequest from models import * app = Flask(__name__) app.debug = True app.config['CORS_HEADERS'] = ['Content-Type', 'X-API-KEY'] cors = CORS(app) def check_auth(): api_key = request.headers.get('X-API-KEY') if not api_key: raise Unauthorized() u = User.check_api_key(api_key) if not u: raise Unauthorized() return u @app.route('/login', methods=['POST']) def login(): try: d = json.loads(request.data) email = d['email'] password = d['password'] except: raise BadRequest() try: u = User.objects.get(email=email) except (User.DoesNotExist, User.MultipleObjectsReturned): raise Unauthorized() if u.check_password(password): return json.dumps({ 'api_key' : u.api_key, 'email' : u.email, 'id' : str(u.id), }) raise Unauthorized() @app.route('/user/register', methods=['POST']) def user_register(): try: d = json.loads(request.data) email = d['email'] password = d['password'] except: raise BadRequest() if not email or not password: raise BadRequest('Please supply both email and password') u = User(email=email, password=password) try: u.save() except mongoengine.NotUniqueError: raise BadRequest('User with that email already exists') except mongoengine.ValidationError: raise BadRequest('Bad email') return json.dumps({ 'api_key' : u.api_key, 'email' : u.email, 'id' : str(u.id), }) @app.route('/device/<device_id>/messages', methods=['GET']) def device_messages(device_id): user = check_auth() try: device = GPSDevice.objects.get(id=device_id) except (GPSDevice.DoesNotExist, mongoengine.ValidationError): raise NotFound() except GPSDevice.MultipleObjectsReturned: raise if device not in user.devices: raise Unauthorized() resp = [] for message in device.messages: resp.append({ 'id' : str(message.id), 'message_type' : message.message_type, 'state' : message.state, 'imei' : message.imei, 'message_datastring': message.message_datastring, 'latitude' : str(message.latitude) if message.latitude else None, 'longitude' : str(message.longitude) if message.longitude else None, 'created' : message.created.isoformat(), }) return json.dumps(resp) @app.route('/device/<device_id>/trackOnce', methods=['POST']) def device_track_once(device_id): user = check_auth() try: device = GPSDevice.objects.get(id=device_id) except (GPSDevice.DoesNotExist, mongoengine.ValidationError): raise NotFound() except GPSDevice.MultipleObjectsReturned: raise if device not in user.devices: raise Unauthorized() print 'IMEI %s requesting to trackOnce' % device.imei m = Message() m.imei = device.imei m.message_type = config.MESSAGE_TYPE_REQ_LOCATION m.save() return 'location request sent' @app.route('/user/<user_id>', methods=['GET']) @app.route('/user', methods=['GET']) def user_list(user_id=None): user = check_auth() if user_id: try: u = User.objects.get(id=user_id) except: raise NotFound() resp = { '_id' : str(u.id), 'email' : str(u.email), 'devices' : [str(d.id) for d in u.devices], } else: resp = [] for u in User.objects.filter(id=user.id): resp.append({ '_id' : str(u.id), 'email' : str(u.email), 'devices' : [str(d.id) for d in u.devices], }) return json.dumps(resp) @app.route('/device/<id>', methods=['DELETE']) def delete_device(id): user = check_auth() try: device = GPSDevice.objects.get(id=id) except GPSDevice.DoesNotExist: raise NotFound() if user != device.user: raise NotFound() user.devices = filter(lambda x:x.id!=id, user.devices) user.save() device.delete() return 'ok' @app.route('/device', methods=['POST']) def add_device(): user = check_auth() try: data = json.loads(request.data) except: raise BadRequest() if not data.get('imei', None): raise BadRequest('imei required') if not re.match('^\d{15}$', data['imei']): raise BadRequest('imei must be 15 digits long') try: device = GPSDevice.objects.get(imei=data['imei']) if device.user != user: raise BadRequest('There was a problem adding that device') except GPSDevice.DoesNotExist: data = { 'imei' : data.get('imei'), 'vehicle_plate' : data.get('vehicle_plate', None), } device = GPSDevice(imei=data['imei']) device.save() user.devices.append(device) user.save() resp = { 'id' : str(device.id), 'imei' : device.imei, } return json.dumps(resp) @app.route('/device/<device_id>', methods=['GET']) @app.route('/device', methods=['GET']) def devices(device_id=None): user = check_auth() if device_id: try: d = filter(lambda d:str(d.id)==device_id, user.devices)[0] resp = { 'id' : str(d.id), 'name' : d.name, 'imei' : d.imei, 'ipaddr' : d.ipaddr, 'vehicle_plate' : d.vehicle_plate, 'is_online' : d.is_online, 'latitude' : str(d.latitude) if d.latitude else None, 'longitude' : str(d.longitude) if d.longitude else None, 'icon' : 'img/car-black.png' } except: raise NotFound() else: resp = [] for device in user.devices: d = { 'id' : str(device.id), 'imei' : device.imei, 'ipaddr' : device.ipaddr, 'is_online' : device.is_online, 'latitude' : str(device.latitude) if device.latitude else None, 'longitude' : str(device.longitude) if device.longitude else None, 'icon' : 'img/car-black.png' } resp.append(d) return json.dumps(resp) if __name__=='__main__': http = WSGIServer(('', 5000), app) http.serve_forever()
gpl-3.0
lesthack/azucar
src/player.py
1
27194
#!/usr/bin/python # -*- coding: utf-8 -*- import gobject import gtk import pango import re import os import keybinder import logging import time import pynotify import ConfigParser import multiprocessing import cover import config UI_FILE = "data/player.ui" class player: def __init__(self, xmms): self.initlogger() self.xmms = xmms self.logger.info('azucar init') self.builder = gtk.Builder() self.builder.add_from_file(UI_FILE) self.window = self.builder.get_object("main") self.__get_config__() self.__properties__() self.__set_signals__() self.__set_hotkeys__() def __properties__(self): self.panel_active = self.builder.get_object("panel_active") self.scrollplaylist = self.builder.get_object("scrollplaylist") self.scrollalbums = self.builder.get_object("scrollalbums") self.list_active = self.builder.get_object("list_active") self.insearch = self.builder.get_object("insearch") self.playerbar = self.builder.get_object("playerbar") self.layout_information = self.builder.get_object("layout_information") self.panel_artists = self.builder.get_object("panel_artists") self.playerbar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(9000, 9000, 9000)) self.layout_information.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(0, 0, 0)) self.album = self.builder.get_object("lb_album") self.artist = self.builder.get_object("lb_artist") self.song = self.builder.get_object("lb_song") self.image_cover = self.builder.get_object("cover_image") self.timer = self.builder.get_object("lb_timer") #self.list_covers = self.builder.get_object("list_covers") #self.scrolledcovers = self.builder.get_object("scrolledcovers") attr_timer = pango.AttrList() attr_timer.insert(pango.AttrSize(24000, 0, -1)) #font size attr_timer.insert(pango.AttrWeight(pango.WEIGHT_BOLD, 0, -1)) #font weight attr_timer.insert(pango.AttrForeground(65535, 65535, 65535, 0, -1)) #font color self.timer.set_attributes(attr_timer) attr_song = pango.AttrList() attr_song.insert(pango.AttrSize(14000, 0, -1)) #font size attr_song.insert(pango.AttrWeight(pango.WEIGHT_BOLD, 0, -1)) #font weight attr_song.insert(pango.AttrForeground(65535, 65535, 65535, 0, -1)) self.song.set_attributes(attr_song) attr_others = pango.AttrList() attr_others.insert(pango.AttrForeground(60000, 60000, 60000, 0, -1)) attr_others.insert(pango.AttrSize(10000, 0, -1)) #font size self.album.set_attributes(attr_others) self.artist.set_attributes(attr_others) # buttons self.bt_next = self.builder.get_object("bt_next") self.bt_prev = self.builder.get_object("bt_prev") self.bt_play = self.builder.get_object("bt_play") self.bt_pause = self.builder.get_object("bt_pause") self.bt_stop = self.builder.get_object("bt_stop") self.bt_albums = self.builder.get_object("bt_albums") self.bt_options = self.builder.get_object("bt_options") self.playerbar.set_fraction(0) self.TARGETS = [ ('MY_TREE_MODEL_ROW', gtk.TARGET_SAME_WIDGET, 0), ('text/plain', 0, 1), ('TEXT', 0, 2), ('STRING', 0, 3), ] self.supported = ( 'mp1', 'mp2', 'mp3', 'm4a', 'm4p', 'ogg', 'flac', 'asf', 'wma', 'wav', 'mpg', 'mpeg', 'm4v', 'mp4', 'avi', 'ogm', 'wmv', 'mod', 'ape', 'apl', 'm4b', 'm4v', 'm4r', '3gp', 'aac', 'mpc', 'mp+', 'mpp', 'oga', 'sid') self.current_song = "" self.current_song_id = None self.current_song_duration = 0 self.status = None self.time_playback = 0 self.seek = 3000 # ms for seek self.volume = 100 self.pos_move = -1 self.model_songs = gtk.ListStore (int, str, str, str, 'gboolean') self.list_active.set_model(self.model_songs) #self.list_active.enable_model_drag_source( gtk.gdk.BUTTON1_MASK, self.TARGETS, gtk.gdk.ACTION_DEFAULT|gtk.gdk.ACTION_MOVE) #self.list_active.enable_model_drag_dest(self.TARGETS, gtk.gdk.ACTION_DEFAULT) render = gtk.CellRendererText() render.set_property('cell-background', '#eee') render.set_alignment(0.5, 0.5) columna_one = gtk.TreeViewColumn("Time", render, text=1, cell_background_set=4) columna_one.set_alignment(0.5) columna_one.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) columna_one.set_fixed_width(50) render_two = gtk.CellRendererText() render_two.set_property('cell-background', '#eee') render_two.set_alignment(0, 0.5) columna_two = gtk.TreeViewColumn("Song", render_two, text=2, cell_background_set=4) columna_two.set_alignment(0.5) columna_two.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) columna_two.set_fixed_width(250) columna_three = gtk.TreeViewColumn("Album", render_two, text=3, cell_background_set=4) columna_three.set_alignment(0.5) columna_three.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) #columna_three.set_fixed_width(100) self.list_active.append_column(columna_one) self.list_active.append_column(columna_two) self.list_active.append_column(columna_three) self.cellbackground = True pynotify.init('Azucar') self.notify = pynotify.Notification("Azucar", "Azucar it's Ok") self.notify.set_timeout(1000) # scrobbling #self.lastfm = scrobble("%s/.config/xmms2/bindata" % os.getenv("HOME"), "data/no-cover.jpg") #self.artists = [] # covers #self.table_covers = gtk.Table(5, 1, True) #self.table_covers = cover.cover(5, 1, True) #self.table_covers.show() #self.scrolledcovers.add_with_viewport(self.table_covers) #self.list_covers = gtk.VBox(None, 0) #self.scrolledcovers.add_with_viewport(self.list_covers) #self.list_covers.show() def __set_signals__(self): self.insearch.connect("changed", self.search_song) self.list_active.connect("key-press-event", self.list_active_keypress) self.list_active.connect("row-activated", self.list_active_row_activated) self.list_active.connect("drag_data_get", self.list_active_drag_data_get) self.list_active.connect("drag_data_received", self.list_active_drag_data_received) self.window.connect("key-press-event", self.main_keypress) self.window.connect("destroy", gtk.main_quit) self.bt_next.connect("clicked", self.xmms2_next) self.bt_prev.connect("clicked", self.xmms2_prev) self.bt_play.connect("clicked", self.xmms2_start) self.bt_stop.connect("clicked", self.xmms2_stop) self.bt_pause.connect("clicked", self.xmms2_pause) self.bt_albums.connect("clicked", self.toggle_list) self.bt_options.connect("clicked", self.show_config) self.playerbar.connect("button-press-event", self.testing) try: self.xmms.playback_current_id(self.handler_playback_current_id) self.xmms.signal_playback_playtime(self.handler_set_time_track) self.xmms.playback_status(self.handler_playback_status) self.xmms.broadcast_playlist_changed(self.handler_playlist_change) self.xmms.broadcast_playback_current_id(self.handler_playback_current_id) self.xmms.broadcast_playback_status(self.handler_playback_status) self.xmms.playback_volume_get(self.xmms2_volume) self.xmms.playlist_list_entries('_active', self.get_tracks) self.load_list_songs() except: self.logger.critical("Error in hanlder's to xmms2") def __get_config__(self): if not os.path.isfile( os.path.expanduser( '%s/.config/xmms2/' % os.getenv("HOME") ) ): print "No config created" def __select_row__(self, id): try: miter = self.get_iter(id) if miter and self.model_songs == self.list_active.get_model(): self.list_active.get_selection().select_iter(miter) except: self.logger.error('to get selection: __select_row__') def __set_hotkeys__(self): key_next = "<Ctrl><Alt>V" keybinder.bind(key_next, self.xmms2_next) key_prev = "<Ctrl><Alt>Z" keybinder.bind(key_prev, self.xmms2_prev) key_start = "<Ctrl><Alt>C" keybinder.bind(key_start, self.xmms.playback_start) key_pause = "<Ctrl><Alt>X" keybinder.bind(key_pause, self.xmms.playback_pause) key_clear = "<Ctrl><Alt>B" keybinder.bind(key_clear, self.xmms.playback_stop) key_focus = "<Ctrl><Alt>M" keybinder.bind(key_focus, self.get_focus) def handler_playback_status(self, result): self.status = result.value() def handler_playlist_change(self, result): update = result.value() if update['type']==0: #add self.xmms.medialib_get_info(update['id'], self.add_track) elif update['type']==1: #add in position self.pos_move = update['position'] self.xmms.medialib_get_info(update['id'], self.add_track_pos) elif update['type']==2: #shuffle self.model_songs.clear() self.xmms.playlist_list_entries('_active', self.get_tracks) elif update['type']==3: #remove self.remove_track(update['position']) elif update['type']==4: #clear self.model_songs.clear() self.list_active.set_model(self.model_songs) elif update['type']==5: #change positions if(update['newposition']<update['position']): self.model_songs.move_before(self.model_songs[update['position']].iter, self.model_songs[update['newposition']].iter) elif(update['newposition']>update['position']): self.model_songs.move_after(self.model_songs[update['position']].iter, self.model_songs[update['newposition']].iter) def handler_playback_current_id(self, result): self.current_song_id = result.value() self.xmms.medialib_get_info(result.value(), self.set_track_player) self.__select_row__(result.value()) def handler_set_time_track(self, result): self.time_playback = result.value() min = self.time_playback/(60*1000) sec = (self.time_playback - min*1000*60)/1000 if len(str(min))==1: min = "0%s" % min if len(str(sec))==1: sec = "0%s" % sec self.timer.set_text("%s:%s" % (min, sec) ) progress = 0.0 if self.current_song_duration > 0: progress = (int(self.time_playback)*100/int(self.current_song_duration))/100.0 try: self.playerbar.set_fraction(progress) except Exception: self.logger.error("%s" % Exception) def set_track_player(self, result): track = self.get_taginfo(result.value()) if track: self.current_song = "%s - %s" % (track[1],track[2]) self.current_song_duration = result.value()['duration'] self.artist.set_text("%s" % track[1]) self.song.set_text("%s" % track[2]) self.album.set_text("%s" % track[3]) try: url_cover = "%s/.config/xmms2/bindata/%s" % (os.getenv("HOME"), result.value()['picture_front']) except: url_cover = "data/no-cover.jpg" #url_cover = self.lastfm.get_album_info(track[1], track[3]) self.set_cover_information(url_cover) self.notify.update(track[2], "Album: %s \nArtis: %s" % (track[3], track[1]) ) self.notify.set_icon_from_pixbuf(self.image_cover.get_pixbuf()) self.notify.show() def set_cover_information(self, url_cover): try: pixbuf = gtk.gdk.pixbuf_new_from_file(url_cover) scaled_buf = pixbuf.scale_simple(100,100,gtk.gdk.INTERP_BILINEAR) self.image_cover.set_from_pixbuf(scaled_buf) except: pixbuf = gtk.gdk.pixbuf_new_from_file("data/no-cover.jpg") scaled_buf = pixbuf.scale_simple(100,100,gtk.gdk.INTERP_BILINEAR) self.image_cover.set_from_pixbuf(scaled_buf) def remove_track(self, position): try: self.model_songs.remove(self.model_songs[position].iter) except: self.logger.error("Can't remove position: %s" % position) def get_tracks(self, result): playlist = result.value() for element in playlist: self.xmms.medialib_get_info(element, self.add_track) def add_track(self, result): taginfo = self.get_taginfo(result.value()) try: # taginfo[0] id # taginfo[1] artist # taginfo[2] song # taginfo[3] album # taginfo[4] cover # taginfo[5] time self.model_songs.append([taginfo[0], taginfo[5], taginfo[2], taginfo[3], self.cellbackground]) self.cellbackground = (True, False)[self.cellbackground==True] except: self.logger.error("Can't to append: %s" % result.value()) def add_track_pos(self, result): if self.pos_move == -1: return else: taginfo = self.get_taginfo(result.value()) try: iter = self.model_songs.get_iter(self.pos_move) self.model_songs.insert_before(iter, [taginfo[0], taginfo[5], taginfo[2], taginfo[3], self.cellbackground]) except: iter = self.model_songs.get_iter(self.pos_move-1) self.model_songs.insert_after(iter, [taginfo[0], taginfo[5], taginfo[2], taginfo[3], self.cellbackground]) cellbackground = True modeltemp = gtk.ListStore (int, str, str, str, 'gboolean') for i in self.model_songs: modeltemp.append([self.model_songs.get_value(i.iter, 0), self.model_songs.get_value(i.iter, 1), self.model_songs.get_value(i.iter, 2), self.model_songs.get_value(i.iter, 3), cellbackground]) cellbackground = (True, False)[cellbackground==True] self.model_songs = modeltemp self.list_active.set_model(self.model_songs) self.pos_move = -1 def get_taginfo(self, info): track = [] try: track.append(info['id']) if info.has_key('artist'): track.append(info["artist"]) else: track.append("No artist") if info.has_key('title'): track.append(info["title"]) else: track.append(self.get_filename(info['url'])) if info.has_key('album'): track.append(info["album"]) else: track.append("") if info.has_key("picture_front"): track.append(info["picture_front"]) else: track.append("") if info.has_key("duration"): track.append(self.get_human_time(info["duration"])) else: track.append("") except: pass return track def list_active_keypress(self, widget, event): try: sel = self.list_active.get_selection().get_selected() id = widget.get_model().get_value(sel[1], 0) if event.keyval == 65535: # Delete Item try: pos = self.get_song_position(id) self.xmms.playlist_remove_entry(pos,'_active') self.list_active.set_model(self.model_songs) except: self.logger.error("in remove item to playlist") else: self.app_cover.window.hide() except: return def list_active_row_activated(self, widget, iter, path): modelo = widget.get_model() new_pos = self.get_song_position(int(modelo.get_value(modelo[iter[0]].iter, 0))) self.xmms2_play(new_pos) def list_active_drag_data_get(self, treeview, context, selection, target_id, etime): treeselection = treeview.get_selection() model, iter = treeselection.get_selected() data = model.get_value(iter, 0) def list_active_drag_data_received(self, treeview, context, x, y, selection, info, etime): treeselection = treeview.get_selection() model, iter_sel = treeselection.get_selected() data = selection.data drop_info = treeview.get_dest_row_at_pos(x, y) if drop_info: path, position = drop_info iter_des = model.get_iter(path) pos_des = path[0] id_sel = model.get_value(iter_sel, 0) pos_sel = model.get_path(iter_sel)[0] self.xmms.playlist_remove_entry(pos_sel,'_active') self.xmms.playlist_insert_id(path[0], id_sel) return def main_keypress(self, widget, event): keyval = event.keyval name = gtk.gdk.keyval_name(keyval) mod = gtk.accelerator_get_label(keyval, event.state) mod = mod.replace("+Mod2","") #print mod, name, keyval if mod == "Ctrl+O": self.xmms2_open_files() elif mod == "Ctrl+P": self.xmms2_open_directory() elif mod == "Ctrl+L": self.xmms2_clear() elif mod == "Ctrl+I": self.xmms.playback_current_id(self.handler_playback_current_id) elif mod == "Ctrl+J": if not self.insearch.get_visible(): self.insearch.set_visible(True) self.insearch.grab_focus() else: if len(self.insearch.get_text()) == 0: self.insearch.set_visible(False) else: self.insearch.grab_focus() elif mod == "Ctrl+A": self.jump_item() elif mod == "Ctrl++": self.volume_up() elif mod == "Ctrl+-": self.volume_down() elif mod == "Ctrl+Q": self.exit() elif mod == "Ctrl+Alt+P": self.show_config() elif mod == "Ctrl+T": self.testing() elif keyval == 65363: try: self.xmms.playback_seek_ms_rel(self.seek) except: print "Exced" elif keyval == 65361: try: self.xmms.playback_seek_ms_rel(self.seek*(-1)) except: print "Exced" def volume_up(self): self.volume = (self.volume+10, 100)[self.volume+10>100] self.xmms.playback_volume_set('master', self.volume) #notify = pynotify.Notification("Volume", "%d%s" % (self.volume,'%') ) #notify.set_timeout(500) #notify.show() self.notify.update("Volume", "%d%s" % (self.volume,'%') ) self.notify.set_icon_from_pixbuf(self.image_cover.get_pixbuf()) self.notify.show() def volume_down(self): self.volume = (self.volume-10, 0)[self.volume-10<=0] self.xmms.playback_volume_set('master', self.volume) #notify = pynotify.Notification("Volume", "%d%s" % (self.volume,'%') ) #notify.set_timeout(500) #notify.show() self.notify.update("Volume", "%d%s" % (self.volume,'%') ) self.notify.set_icon_from_pixbuf(self.image_cover.get_pixbuf()) self.notify.show() def xmms2_volume(self, result): try: self.volume = result.value()['master'] except: self.logger.info("No volume master access") def xmms2_open_files(self): dialog = gtk.FileChooserDialog("Add Music Files..", None, gtk.FILE_CHOOSER_ACTION_OPEN, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) dialog.set_select_multiple(True) dialog.set_current_folder(os.getenv("HOME")) filter = gtk.FileFilter() filter.set_name("Music files") for format in self.supported: filter.add_pattern("*.%s" % format) dialog.add_filter(filter) response = dialog.run() if response == gtk.RESPONSE_OK: for ifile in dialog.get_filenames(): try: self.xmms.playlist_add_url('file://%s' % ifile) except: self.logger.error("Error to adding file %s" % ifile) elif response == gtk.RESPONSE_CANCEL: self.logger.info("Closed, no files selected") dialog.destroy() def xmms2_open_directory(self): dialog = gtk.FileChooserDialog("Add Music Directory..", None, gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) dialog.set_select_multiple(True) dialog.set_current_folder(os.getenv("HOME")) filter = gtk.FileFilter() filter.set_name("Music files") for format in self.supported: filter.add_pattern("*.%s" % format) dialog.add_filter(filter) response = dialog.run() if response == gtk.RESPONSE_OK: for idir in dialog.get_filenames(): try: self.xmms.playlist_radd('file://%s' % idir) except: self.logger.error("Error to adding file %s" % ifile) elif response == gtk.RESPONSE_CANCEL: self.logger.info("Closed, no files selected") dialog.destroy() def xmms2_clear(self): self.xmms.playlist_clear() def xmms2_next(self, widget=None): self.xmms.playlist_set_next_rel(1) self.xmms.playback_tickle() def xmms2_prev(self, widget=None): self.xmms.playlist_set_next_rel(-1) self.xmms.playback_tickle() def xmms2_play(self, pos): self.xmms.playlist_set_next(pos) self.xmms.playback_tickle() if self.status == 0: self.xmms.playback_start() def xmms2_start(self, widget=None): self.xmms.playback_start() def xmms2_stop(self, widget=None): self.xmms.playback_stop() def xmms2_pause(self, widget=None): self.xmms.playback_pause() def get_filename(self, url): n = url.split('/') try: name = n[-1][:-4] except: name = "" return name.replace('+',' ') def get_focus(self): print self.window.props.is_active def get_song_position(self, id): pos = 0 if id > -1: for i in self.model_songs: if i[0] == id: return pos pos+=1 return 0 def get_iter(self, id): for it in self.model_songs: if it[0] == id: return it.iter return None def initlogger(self): self.logger = logging.getLogger('xmms2me') hdlr = logging.FileHandler('%s/.config/xmms2/azucar.log' % os.getenv("HOME")) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.INFO) def search_song(self, widget): if len(widget.get_text())==0: self.list_active.set_reorderable(True) else: self.list_active.set_reorderable(False) modeltemp = gtk.ListStore (int, str, str, str, 'gboolean') cellbackground = True for i in self.model_songs: match = re.search(r'%s' % widget.get_text().lower(), "%s %s" % (self.model_songs.get_value(i.iter, 2).lower(),self.model_songs.get_value(i.iter, 3).lower())) if match: modeltemp.append([self.model_songs.get_value(i.iter, 0), self.model_songs.get_value(i.iter, 1), self.model_songs.get_value(i.iter, 2), self.model_songs.get_value(i.iter, 3),cellbackground]) cellbackground = (True, False)[cellbackground==True] self.list_active.set_model(modeltemp) def jump_item(self): treeselection = self.list_active.get_selection() model, iter = treeselection.get_selected() pos = self.model_songs.get_path(iter)[0] current_pos = self.get_song_position(self.current_song_id) self.xmms.playlist_move(pos, current_pos) def show_config(self, widget=None): self.window_config = config.config(self.xmms) self.window_config.show() def exit(self): gtk.main_quit() def get_human_time(self, time_playback): min = time_playback/(60*1000) sec = (time_playback - min*1000*60)/1000 return "%02d:%02d" % (min, sec) def toggle_list(self, widget=None): if self.list_active.get_visible(): self.list_active.set_visible(False) self.scrolledcovers.set_visible(True) else: self.list_active.set_visible(True) self.scrolledcovers.set_visible(False) def testing(self, widget=None, event=None): #self.table_covers.addCover() #lista_albums = albums() pass width, height = widget.get_size_request() x, y = event.get_coords() percent = x*100/width pos = percent*self.current_song_duration/100 self.xmms.playback_seek_ms(pos) #p = multiprocessing.Process(target=self.lastfm.get_album_info, args=("Delphic", "Acolyte")) #p.start() #print self.lastfm.get_album_info("Delphic", "Acolyte") pass
gpl-3.0
BenediktS/three.js
utils/exporters/blender/addons/io_three/logger.py
176
1423
import os import logging import tempfile from . import constants LOG_FILE = None LOGGER = None LEVELS = { constants.DEBUG: logging.DEBUG, constants.INFO: logging.INFO, constants.WARNING: logging.WARNING, constants.ERROR: logging.ERROR, constants.CRITICAL: logging.CRITICAL } def init(filename, level=constants.DEBUG): """Initialize the logger. :param filename: base name of the log file :param level: logging level (Default value = DEBUG) """ global LOG_FILE LOG_FILE = os.path.join(tempfile.gettempdir(), filename) with open(LOG_FILE, 'w'): pass global LOGGER LOGGER = logging.getLogger('Three.Export') LOGGER.setLevel(LEVELS[level]) if not LOGGER.handlers: stream = logging.StreamHandler() stream.setLevel(LEVELS[level]) format_ = '%(asctime)s - %(name)s - %(levelname)s: %(message)s' formatter = logging.Formatter(format_) stream.setFormatter(formatter) file_handler = logging.FileHandler(LOG_FILE) file_handler.setLevel(LEVELS[level]) file_handler.setFormatter(formatter) LOGGER.addHandler(stream) LOGGER.addHandler(file_handler) def info(*args): LOGGER.info(*args) def debug(*args): LOGGER.debug(*args) def warning(*args): LOGGER.warning(*args) def error(*args): LOGGER.error(*args) def critical(*args): LOGGER.critical(*args)
mit
Sancus/bedrock
bedrock/press/views.py
20
3927
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.core.mail import EmailMessage from django.template.loader import render_to_string from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_protect from django.views.generic.edit import FormView from bedrock.base.urlresolvers import reverse from .forms import (PressInquiryForm, SpeakerRequestForm) from lib import l10n_utils PRESS_INQUIRY_EMAIL_SUBJECT = 'New Press Inquiry' PRESS_INQUIRY_EMAIL_TO = ['[email protected]'] SPEAKER_REQUEST_EMAIL_FROM = PRESS_INQUIRY_EMAIL_FROM = 'Mozilla.com <[email protected]>' SPEAKER_REQUEST_EMAIL_SUBJECT = 'New speaker request form submission' SPEAKER_REQUEST_EMAIL_TO = ['[email protected]'] class PressInquiryView(FormView): form_class = PressInquiryForm template_name = 'press/press-inquiry.html' @method_decorator(csrf_protect) def dispatch(self, request, *args, **kwargs): return super(PressInquiryView, self).dispatch(request, *args, **kwargs) def get_context_data(self, **kwargs): context = super(PressInquiryView, self).get_context_data(**kwargs) context['form_success'] = 'success' in self.request.GET return context def get_success_url(self): return reverse('press.press-inquiry') + '?success=True' def form_valid(self, form): self.send_email(form) return super(PressInquiryView, self).form_valid(form) def send_email(self, form): subject = PRESS_INQUIRY_EMAIL_SUBJECT sender = PRESS_INQUIRY_EMAIL_FROM to = PRESS_INQUIRY_EMAIL_TO msg = render_to_string('press/emails/press-inquiry.txt', form.cleaned_data, request=self.request) email = EmailMessage(subject, msg, sender, to) email.send() def render_to_response(self, context, **response_kwargs): return l10n_utils.render(self.request, self.get_template_names(), context, **response_kwargs) class SpeakerRequestView(FormView): form_class = SpeakerRequestForm template_name = 'press/speaker-request.html' @method_decorator(csrf_protect) def dispatch(self, request, *args, **kwargs): return super(SpeakerRequestView, self).dispatch(request, *args, **kwargs) def get_form_kwargs(self): kwargs = super(SpeakerRequestView, self).get_form_kwargs() kwargs['auto_id'] = '%s' return kwargs def get_context_data(self, **kwargs): context = super(SpeakerRequestView, self).get_context_data(**kwargs) context['form_success'] = 'success' in self.request.GET return context def get_success_url(self): return reverse('press.speaker-request') + '?success=True' def form_valid(self, form): self.send_email(form) return super(SpeakerRequestView, self).form_valid(form) def send_email(self, form): subject = SPEAKER_REQUEST_EMAIL_SUBJECT sender = SPEAKER_REQUEST_EMAIL_FROM to = SPEAKER_REQUEST_EMAIL_TO msg = render_to_string('press/emails/speaker-request.txt', form.cleaned_data, request=self.request) email = EmailMessage(subject, msg, sender, to) attachment = form.cleaned_data['sr_attachment'] if (attachment): email.attach(attachment.name, attachment.read(), attachment.content_type) email.send() def render_to_response(self, context, **response_kwargs): return l10n_utils.render(self.request, self.get_template_names(), context, **response_kwargs)
mpl-2.0
amenonsen/ansible
lib/ansible/plugins/action/dellos6.py
38
4085
# # (c) 2016 Red Hat Inc. # # (c) 2017 Dell EMC. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import sys import copy from ansible import constants as C from ansible.module_utils._text import to_text from ansible.module_utils.connection import Connection from ansible.plugins.action.network import ActionModule as ActionNetworkModule from ansible.module_utils.network.dellos6.dellos6 import dellos6_provider_spec from ansible.module_utils.network.common.utils import load_provider from ansible.utils.display import Display display = Display() class ActionModule(ActionNetworkModule): def run(self, tmp=None, task_vars=None): del tmp # tmp no longer has any effect self._config_module = True if self._task.action == 'dellos6_config' else False socket_path = None if self._play_context.connection == 'network_cli': provider = self._task.args.get('provider', {}) if any(provider.values()): display.warning('provider is unnecessary when using network_cli and will be ignored') del self._task.args['provider'] elif self._play_context.connection == 'local': provider = load_provider(dellos6_provider_spec, self._task.args) pc = copy.deepcopy(self._play_context) pc.connection = 'network_cli' pc.network_os = 'dellos6' pc.remote_addr = provider['host'] or self._play_context.remote_addr pc.port = int(provider['port'] or self._play_context.port or 22) pc.remote_user = provider['username'] or self._play_context.connection_user pc.password = provider['password'] or self._play_context.password pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file command_timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT) pc.become = provider['authorize'] or False if pc.become: pc.become_method = 'enable' pc.become_pass = provider['auth_pass'] display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr) connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin) connection.set_options(direct={'persistent_command_timeout': command_timeout}) socket_path = connection.run() display.vvvv('socket_path: %s' % socket_path, pc.remote_addr) if not socket_path: return {'failed': True, 'msg': 'unable to open shell. Please see: ' + 'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'} task_vars['ansible_socket'] = socket_path # make sure we are in the right cli context which should be # enable mode and not config module if socket_path is None: socket_path = self._connection.socket_path conn = Connection(socket_path) out = conn.get_prompt() while to_text(out, errors='surrogate_then_replace').strip().endswith(')#'): display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr) conn.send_command('exit') out = conn.get_prompt() result = super(ActionModule, self).run(task_vars=task_vars) return result
gpl-3.0
ondrokrc/gramps
gramps/gen/utils/cast.py
1
2885
# # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2000-2007 Donald N. Allingham # Copyright (C) 2009 Gary Burton # Copyright (C) 2011 Tim G L Lyons # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # """ Utility functions to cast types """ #------------------------------------------------------------------------- # # Python modules # #------------------------------------------------------------------------- import os import sys import logging LOG = logging.getLogger(".") #------------------------------------------------------------------------- # # Gramps modules # #------------------------------------------------------------------------- from ..const import GRAMPS_LOCALE as glocale _ = glocale.translation.gettext from ..constfunc import conv_to_unicode #strings in database are utf-8 conv_dbstr_to_unicode = lambda x: conv_to_unicode(x, 'UTF-8') def cast_to_bool(val): return val in ['True', 'true', _('True'), _('true'), '1'] # 3139 def get_type_converter(val): """ Return function that converts strings into the type of val. """ val_type = type(val) if isinstance(val, str): return str elif val_type == int: return int elif val_type == float: return float elif val_type == bool: return cast_to_bool elif val_type in (list, tuple): return list def type_name(val): """ Return the name the type of val. Only numbers and strings are supported. The rest becomes strings (unicode). """ val_type = type(val) if val_type == int: return 'int' elif val_type == float: return 'float' elif val_type == bool: return 'bool' elif isinstance(val, str): return 'unicode' return 'unicode' def get_type_converter_by_name(val_str): """ Return function that converts strings into the type given by val_str. Only numbers and strings are supported. The rest becomes strings (unicode). """ if val_str == 'int': return int elif val_str == 'float': return float elif val_str == 'bool': return cast_to_bool elif val_str in ('str', 'unicode'): return str return str
gpl-2.0
qiankunshe/sky_engine
sky/tools/webkitpy/common/system/crashlogs_unittest.py
58
6058
# Copyright (C) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import unittest from webkitpy.common.system.crashlogs import CrashLogs from webkitpy.common.system.filesystem_mock import MockFileSystem from webkitpy.common.system.systemhost import SystemHost from webkitpy.common.system.systemhost_mock import MockSystemHost def make_mock_crash_report_darwin(process_name, pid): return """Process: {process_name} [{pid}] Path: /Volumes/Data/slave/snowleopard-intel-release-tests/build/WebKitBuild/Release/{process_name} Identifier: {process_name} Version: ??? (???) Code Type: X86-64 (Native) Parent Process: Python [2578] Date/Time: 2011-12-07 13:27:34.816 -0800 OS Version: Mac OS X 10.6.8 (10K549) Report Version: 6 Interval Since Last Report: 1660 sec Crashes Since Last Report: 1 Per-App Crashes Since Last Report: 1 Anonymous UUID: 507D4EEB-9D70-4E2E-B322-2D2F0ABFEDC0 Exception Type: EXC_BREAKPOINT (SIGTRAP) Exception Codes: 0x0000000000000002, 0x0000000000000000 Crashed Thread: 0 Dyld Error Message: Library not loaded: /Volumes/Data/WebKit-BuildSlave/snowleopard-intel-release/build/WebKitBuild/Release/WebCore.framework/Versions/A/WebCore Referenced from: /Volumes/Data/slave/snowleopard-intel-release/build/WebKitBuild/Release/WebKit.framework/Versions/A/WebKit Reason: image not found Binary Images: 0x7fff5fc00000 - 0x7fff5fc3be0f dyld 132.1 (???) <29DECB19-0193-2575-D838-CF743F0400B2> /usr/lib/dyld System Profile: Model: Xserve3,1, BootROM XS31.0081.B04, 8 processors, Quad-Core Intel Xeon, 2.26 GHz, 6 GB, SMC 1.43f4 Graphics: NVIDIA GeForce GT 120, NVIDIA GeForce GT 120, PCIe, 256 MB Memory Module: global_name Network Service: Ethernet 2, Ethernet, en1 PCI Card: NVIDIA GeForce GT 120, sppci_displaycontroller, MXM-Slot Serial ATA Device: OPTIARC DVD RW AD-5670S """.format(process_name=process_name, pid=pid) class CrashLogsTest(unittest.TestCase): def test_find_log_darwin(self): if not SystemHost().platform.is_mac(): return older_mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28528) mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28530) newer_mock_crash_report = make_mock_crash_report_darwin('DumpRenderTree', 28529) other_process_mock_crash_report = make_mock_crash_report_darwin('FooProcess', 28527) misformatted_mock_crash_report = 'Junk that should not appear in a crash report' + make_mock_crash_report_darwin('DumpRenderTree', 28526)[200:] files = {} files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150718_quadzen.crash'] = older_mock_crash_report files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150719_quadzen.crash'] = mock_crash_report files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150720_quadzen.crash'] = newer_mock_crash_report files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150721_quadzen.crash'] = None files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150722_quadzen.crash'] = other_process_mock_crash_report files['/Users/mock/Library/Logs/DiagnosticReports/DumpRenderTree_2011-06-13-150723_quadzen.crash'] = misformatted_mock_crash_report filesystem = MockFileSystem(files) crash_logs = CrashLogs(MockSystemHost(filesystem=filesystem)) log = crash_logs.find_newest_log("DumpRenderTree") self.assertMultiLineEqual(log, newer_mock_crash_report) log = crash_logs.find_newest_log("DumpRenderTree", 28529) self.assertMultiLineEqual(log, newer_mock_crash_report) log = crash_logs.find_newest_log("DumpRenderTree", 28530) self.assertMultiLineEqual(log, mock_crash_report) log = crash_logs.find_newest_log("DumpRenderTree", 28531) self.assertIsNone(log) log = crash_logs.find_newest_log("DumpRenderTree", newer_than=1.0) self.assertIsNone(log) def bad_read(path): raise IOError('IOError: No such file or directory') def bad_mtime(path): raise OSError('OSError: No such file or directory') filesystem.read_text_file = bad_read log = crash_logs.find_newest_log("DumpRenderTree", 28531, include_errors=True) self.assertIn('IOError: No such file or directory', log) filesystem = MockFileSystem(files) crash_logs = CrashLogs(MockSystemHost(filesystem=filesystem)) filesystem.mtime = bad_mtime log = crash_logs.find_newest_log("DumpRenderTree", newer_than=1.0, include_errors=True) self.assertIn('OSError: No such file or directory', log)
bsd-3-clause
centrumholdings/buildbot
buildbot/scripts/logwatcher.py
2
3195
import os from twisted.python.failure import Failure from twisted.internet import defer, reactor, protocol, error from twisted.protocols.basic import LineOnlyReceiver class FakeTransport: disconnecting = False class BuildmasterTimeoutError(Exception): pass class BuildslaveTimeoutError(Exception): pass class ReconfigError(Exception): pass class BuildSlaveDetectedError(Exception): pass class TailProcess(protocol.ProcessProtocol): def outReceived(self, data): self.lw.dataReceived(data) def errReceived(self, data): print "ERR: '%s'" % (data,) class LogWatcher(LineOnlyReceiver): POLL_INTERVAL = 0.1 TIMEOUT_DELAY = 10.0 delimiter = os.linesep def __init__(self, logfile): self.logfile = logfile self.in_reconfig = False self.transport = FakeTransport() self.pp = TailProcess() self.pp.lw = self self.processtype = "buildmaster" self.timer = None def start(self): # If the log file doesn't exist, create it now. if not os.path.exists(self.logfile): open(self.logfile, 'a').close() # return a Deferred that fires when the reconfig process has # finished. It errbacks with TimeoutError if the finish line has not # been seen within 10 seconds, and with ReconfigError if the error # line was seen. If the logfile could not be opened, it errbacks with # an IOError. self.p = reactor.spawnProcess(self.pp, "/usr/bin/tail", ("tail", "-f", "-n", "0", self.logfile), env=os.environ, ) self.running = True d = defer.maybeDeferred(self._start) return d def _start(self): self.d = defer.Deferred() self.timer = reactor.callLater(self.TIMEOUT_DELAY, self.timeout) return self.d def timeout(self): self.timer = None if self.processtype == "buildmaster": e = BuildmasterTimeoutError() else: e = BuildslaveTimeoutError() self.finished(Failure(e)) def finished(self, results): try: self.p.signalProcess("KILL") except error.ProcessExitedAlready: pass if self.timer: self.timer.cancel() self.timer = None self.running = False self.in_reconfig = False self.d.callback(results) def lineReceived(self, line): if not self.running: return if "Log opened." in line: self.in_reconfig = True if "loading configuration from" in line: self.in_reconfig = True if "Creating BuildSlave" in line: self.processtype = "buildslave" if self.in_reconfig: print line if "message from master: attached" in line: return self.finished("buildslave") if "I will keep using the previous config file" in line: return self.finished(Failure(ReconfigError())) if "configuration update complete" in line: return self.finished("buildmaster")
gpl-2.0
DailyActie/Surrogate-Model
01-codes/scikit-learn-master/sklearn/preprocessing/data.py
1
67256
# Authors: Alexandre Gramfort <[email protected]> # Mathieu Blondel <[email protected]> # Olivier Grisel <[email protected]> # Andreas Mueller <[email protected]> # Eric Martin <[email protected]> # Giorgio Patrini <[email protected]> # License: BSD 3 clause import numbers import warnings from itertools import chain, combinations import numpy as np from scipy import sparse from ..utils.sparsefuncs_fast import (inplace_csr_row_normalize_l1, inplace_csr_row_normalize_l2) from ..base import BaseEstimator, TransformerMixin from ..externals import six from ..utils import check_array from ..utils import deprecated from ..utils.extmath import _incremental_mean_and_var from ..utils.extmath import row_norms from ..utils.fixes import combinations_with_replacement as combinations_w_r from ..utils.sparsefuncs import (inplace_column_scale, mean_variance_axis, incr_mean_variance_axis, min_max_axis) from ..utils.validation import check_is_fitted, FLOAT_DTYPES zip = six.moves.zip map = six.moves.map range = six.moves.range __all__ = [ 'Binarizer', 'KernelCenterer', 'MinMaxScaler', 'MaxAbsScaler', 'Normalizer', 'OneHotEncoder', 'RobustScaler', 'StandardScaler', 'add_dummy_feature', 'binarize', 'normalize', 'scale', 'robust_scale', 'maxabs_scale', 'minmax_scale', ] DEPRECATION_MSG_1D = ( "Passing 1d arrays as data is deprecated in 0.17 and will " "raise ValueError in 0.19. Reshape your data either using " "X.reshape(-1, 1) if your data has a single feature or " "X.reshape(1, -1) if it contains a single sample." ) def _handle_zeros_in_scale(scale, copy=True): ''' Makes sure that whenever scale is zero, we handle it correctly. This happens in most scalers when we have constant features.''' # if we are fitting on 1D arrays, scale might be a scalar if np.isscalar(scale): if scale == .0: scale = 1. return scale elif isinstance(scale, np.ndarray): if copy: # New array to avoid side-effects scale = scale.copy() scale[scale == 0.0] = 1.0 return scale def scale(X, axis=0, with_mean=True, with_std=True, copy=True): """Standardize a dataset along any axis Center to the mean and component wise scale to unit variance. Read more in the :ref:`User Guide <preprocessing_scaler>`. Parameters ---------- X : {array-like, sparse matrix} The data to center and scale. axis : int (0 by default) axis used to compute the means and standard deviations along. If 0, independently standardize each feature, otherwise (if 1) standardize each sample. with_mean : boolean, True by default If True, center the data before scaling. with_std : boolean, True by default If True, scale the data to unit variance (or equivalently, unit standard deviation). copy : boolean, optional, default True set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSC matrix and if axis is 1). Notes ----- This implementation will refuse to center scipy.sparse matrices since it would make them non-sparse and would potentially crash the program with memory exhaustion problems. Instead the caller is expected to either set explicitly `with_mean=False` (in that case, only variance scaling will be performed on the features of the CSC matrix) or to call `X.toarray()` if he/she expects the materialized dense array to fit in memory. To avoid memory copy the caller should pass a CSC matrix. See also -------- :class:`sklearn.preprocessing.StandardScaler` to perform centering and scaling using the ``Transformer`` API (e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`) """ X = check_array(X, accept_sparse='csc', copy=copy, ensure_2d=False, warn_on_dtype=True, estimator='the scale function', dtype=FLOAT_DTYPES) if sparse.issparse(X): if with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` instead" " See docstring for motivation and alternatives.") if axis != 0: raise ValueError("Can only scale sparse matrix on axis=0, " " got axis=%d" % axis) if with_std: _, var = mean_variance_axis(X, axis=0) var = _handle_zeros_in_scale(var, copy=False) inplace_column_scale(X, 1 / np.sqrt(var)) else: X = np.asarray(X) if with_mean: mean_ = np.mean(X, axis) if with_std: scale_ = np.std(X, axis) # Xr is a view on the original array that enables easy use of # broadcasting on the axis in which we are interested in Xr = np.rollaxis(X, axis) if with_mean: Xr -= mean_ mean_1 = Xr.mean(axis=0) # Verify that mean_1 is 'close to zero'. If X contains very # large values, mean_1 can also be very large, due to a lack of # precision of mean_. In this case, a pre-scaling of the # concerned feature is efficient, for instance by its mean or # maximum. if not np.allclose(mean_1, 0): warnings.warn("Numerical issues were encountered " "when centering the data " "and might not be solved. Dataset may " "contain too large values. You may need " "to prescale your features.") Xr -= mean_1 if with_std: scale_ = _handle_zeros_in_scale(scale_, copy=False) Xr /= scale_ if with_mean: mean_2 = Xr.mean(axis=0) # If mean_2 is not 'close to zero', it comes from the fact that # scale_ is very small so that mean_2 = mean_1/scale_ > 0, even # if mean_1 was close to zero. The problem is thus essentially # due to the lack of precision of mean_. A solution is then to # subtract the mean again: if not np.allclose(mean_2, 0): warnings.warn("Numerical issues were encountered " "when scaling the data " "and might not be solved. The standard " "deviation of the data is probably " "very close to 0. ") Xr -= mean_2 return X class MinMaxScaler(BaseEstimator, TransformerMixin): """Transforms features by scaling each feature to a given range. This estimator scales and translates each feature individually such that it is in the given range on the training set, i.e. between zero and one. The transformation is given by:: X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0)) X_scaled = X_std * (max - min) + min where min, max = feature_range. This transformation is often used as an alternative to zero mean, unit variance scaling. Read more in the :ref:`User Guide <preprocessing_scaler>`. Parameters ---------- feature_range: tuple (min, max), default=(0, 1) Desired range of transformed data. copy : boolean, optional, default True Set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array). Attributes ---------- min_ : ndarray, shape (n_features,) Per feature adjustment for minimum. scale_ : ndarray, shape (n_features,) Per feature relative scaling of the data. .. versionadded:: 0.17 *scale_* attribute. data_min_ : ndarray, shape (n_features,) Per feature minimum seen in the data .. versionadded:: 0.17 *data_min_* instead of deprecated *data_min*. data_max_ : ndarray, shape (n_features,) Per feature maximum seen in the data .. versionadded:: 0.17 *data_max_* instead of deprecated *data_max*. data_range_ : ndarray, shape (n_features,) Per feature range ``(data_max_ - data_min_)`` seen in the data .. versionadded:: 0.17 *data_range_* instead of deprecated *data_range*. """ def __init__(self, feature_range=(0, 1), copy=True): self.feature_range = feature_range self.copy = copy @property @deprecated("Attribute data_range will be removed in " "0.19. Use ``data_range_`` instead") def data_range(self): return self.data_range_ @property @deprecated("Attribute data_min will be removed in " "0.19. Use ``data_min_`` instead") def data_min(self): return self.data_min_ def _reset(self): """Reset internal data-dependent state of the scaler, if necessary. __init__ parameters are not touched. """ # Checking one attribute is enough, becase they are all set together # in partial_fit if hasattr(self, 'scale_'): del self.scale_ del self.min_ del self.n_samples_seen_ del self.data_min_ del self.data_max_ del self.data_range_ def fit(self, X, y=None): """Compute the minimum and maximum to be used for later scaling. Parameters ---------- X : array-like, shape [n_samples, n_features] The data used to compute the per-feature minimum and maximum used for later scaling along the features axis. """ # Reset internal state before fitting self._reset() return self.partial_fit(X, y) def partial_fit(self, X, y=None): """Online computation of min and max on X for later scaling. All of X is processed as a single batch. This is intended for cases when `fit` is not feasible due to very large number of `n_samples` or because X is read from a continuous stream. Parameters ---------- X : array-like, shape [n_samples, n_features] The data used to compute the mean and standard deviation used for later scaling along the features axis. y : Passthrough for ``Pipeline`` compatibility. """ feature_range = self.feature_range if feature_range[0] >= feature_range[1]: raise ValueError("Minimum of desired feature range must be smaller" " than maximum. Got %s." % str(feature_range)) if sparse.issparse(X): raise TypeError("MinMaxScaler does no support sparse input. " "You may consider to use MaxAbsScaler instead.") X = check_array(X, copy=self.copy, ensure_2d=False, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) data_min = np.min(X, axis=0) data_max = np.max(X, axis=0) # First pass if not hasattr(self, 'n_samples_seen_'): self.n_samples_seen_ = X.shape[0] # Next steps else: data_min = np.minimum(self.data_min_, data_min) data_max = np.maximum(self.data_max_, data_max) self.n_samples_seen_ += X.shape[0] data_range = data_max - data_min self.scale_ = ((feature_range[1] - feature_range[0]) / _handle_zeros_in_scale(data_range)) self.min_ = feature_range[0] - data_min * self.scale_ self.data_min_ = data_min self.data_max_ = data_max self.data_range_ = data_range return self def transform(self, X): """Scaling features of X according to feature_range. Parameters ---------- X : array-like, shape [n_samples, n_features] Input data that will be transformed. """ check_is_fitted(self, 'scale_') X = check_array(X, copy=self.copy, ensure_2d=False, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) X *= self.scale_ X += self.min_ return X def inverse_transform(self, X): """Undo the scaling of X according to feature_range. Parameters ---------- X : array-like, shape [n_samples, n_features] Input data that will be transformed. It cannot be sparse. """ check_is_fitted(self, 'scale_') X = check_array(X, copy=self.copy, ensure_2d=False, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) X -= self.min_ X /= self.scale_ return X def minmax_scale(X, feature_range=(0, 1), axis=0, copy=True): """Transforms features by scaling each feature to a given range. This estimator scales and translates each feature individually such that it is in the given range on the training set, i.e. between zero and one. The transformation is given by:: X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0)) X_scaled = X_std * (max - min) + min where min, max = feature_range. This transformation is often used as an alternative to zero mean, unit variance scaling. Read more in the :ref:`User Guide <preprocessing_scaler>`. .. versionadded:: 0.17 *minmax_scale* function interface to :class:`sklearn.preprocessing.MinMaxScaler`. Parameters ---------- feature_range: tuple (min, max), default=(0, 1) Desired range of transformed data. axis : int (0 by default) axis used to scale along. If 0, independently scale each feature, otherwise (if 1) scale each sample. copy : boolean, optional, default is True Set to False to perform inplace scaling and avoid a copy (if the input is already a numpy array). """ # To allow retro-compatibility, we handle here the case of 1D-input # From 0.17, 1D-input are deprecated in scaler objects # Although, we want to allow the users to keep calling this function # with 1D-input. # Cast input to array, as we need to check ndim. Prior to 0.17, that was # done inside the scaler object fit_transform. # If copy is required, it will be done inside the scaler object. X = check_array(X, copy=False, ensure_2d=False, warn_on_dtype=True, dtype=FLOAT_DTYPES) original_ndim = X.ndim if original_ndim == 1: X = X.reshape(X.shape[0], 1) s = MinMaxScaler(feature_range=feature_range, copy=copy) if axis == 0: X = s.fit_transform(X) else: X = s.fit_transform(X.T).T if original_ndim == 1: X = X.ravel() return X class StandardScaler(BaseEstimator, TransformerMixin): """Standardize features by removing the mean and scaling to unit variance Centering and scaling happen independently on each feature by computing the relevant statistics on the samples in the training set. Mean and standard deviation are then stored to be used on later data using the `transform` method. Standardization of a dataset is a common requirement for many machine learning estimators: they might behave badly if the individual feature do not more or less look like standard normally distributed data (e.g. Gaussian with 0 mean and unit variance). For instance many elements used in the objective function of a learning algorithm (such as the RBF kernel of Support Vector Machines or the L1 and L2 regularizers of linear models) assume that all features are centered around 0 and have variance in the same order. If a feature has a variance that is orders of magnitude larger that others, it might dominate the objective function and make the estimator unable to learn from other features correctly as expected. This scaler can also be applied to sparse CSR or CSC matrices by passing `with_mean=False` to avoid breaking the sparsity structure of the data. Read more in the :ref:`User Guide <preprocessing_scaler>`. Parameters ---------- with_mean : boolean, True by default If True, center the data before scaling. This does not work (and will raise an exception) when attempted on sparse matrices, because centering them entails building a dense matrix which in common use cases is likely to be too large to fit in memory. with_std : boolean, True by default If True, scale the data to unit variance (or equivalently, unit standard deviation). copy : boolean, optional, default True If False, try to avoid a copy and do inplace scaling instead. This is not guaranteed to always work inplace; e.g. if the data is not a NumPy array or scipy.sparse CSR matrix, a copy may still be returned. Attributes ---------- scale_ : ndarray, shape (n_features,) Per feature relative scaling of the data. .. versionadded:: 0.17 *scale_* is recommended instead of deprecated *std_*. mean_ : array of floats with shape [n_features] The mean value for each feature in the training set. var_ : array of floats with shape [n_features] The variance for each feature in the training set. Used to compute `scale_` n_samples_seen_ : int The number of samples processed by the estimator. Will be reset on new calls to fit, but increments across ``partial_fit`` calls. See also -------- :func:`sklearn.preprocessing.scale` to perform centering and scaling without using the ``Transformer`` object oriented API :class:`sklearn.decomposition.RandomizedPCA` with `whiten=True` to further remove the linear correlation across features. """ def __init__(self, copy=True, with_mean=True, with_std=True): self.with_mean = with_mean self.with_std = with_std self.copy = copy @property @deprecated("Attribute ``std_`` will be removed in 0.19. Use ``scale_`` instead") def std_(self): return self.scale_ def _reset(self): """Reset internal data-dependent state of the scaler, if necessary. __init__ parameters are not touched. """ # Checking one attribute is enough, becase they are all set together # in partial_fit if hasattr(self, 'scale_'): del self.scale_ del self.n_samples_seen_ del self.mean_ del self.var_ def fit(self, X, y=None): """Compute the mean and std to be used for later scaling. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data used to compute the mean and standard deviation used for later scaling along the features axis. y: Passthrough for ``Pipeline`` compatibility. """ # Reset internal state before fitting self._reset() return self.partial_fit(X, y) def partial_fit(self, X, y=None): """Online computation of mean and std on X for later scaling. All of X is processed as a single batch. This is intended for cases when `fit` is not feasible due to very large number of `n_samples` or because X is read from a continuous stream. The algorithm for incremental mean and std is given in Equation 1.5a,b in Chan, Tony F., Gene H. Golub, and Randall J. LeVeque. "Algorithms for computing the sample variance: Analysis and recommendations." The American Statistician 37.3 (1983): 242-247: Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data used to compute the mean and standard deviation used for later scaling along the features axis. y: Passthrough for ``Pipeline`` compatibility. """ X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, ensure_2d=False, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) # Even in the case of `with_mean=False`, we update the mean anyway # This is needed for the incremental computation of the var # See incr_mean_variance_axis and _incremental_mean_variance_axis if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.with_std: # First pass if not hasattr(self, 'n_samples_seen_'): self.mean_, self.var_ = mean_variance_axis(X, axis=0) self.n_samples_seen_ = X.shape[0] # Next passes else: self.mean_, self.var_, self.n_samples_seen_ = \ incr_mean_variance_axis(X, axis=0, last_mean=self.mean_, last_var=self.var_, last_n=self.n_samples_seen_) else: self.mean_ = None self.var_ = None else: # First pass if not hasattr(self, 'n_samples_seen_'): self.mean_ = .0 self.n_samples_seen_ = 0 if self.with_std: self.var_ = .0 else: self.var_ = None self.mean_, self.var_, self.n_samples_seen_ = \ _incremental_mean_and_var(X, self.mean_, self.var_, self.n_samples_seen_) if self.with_std: self.scale_ = _handle_zeros_in_scale(np.sqrt(self.var_)) else: self.scale_ = None return self def transform(self, X, y=None, copy=None): """Perform standardization by centering and scaling Parameters ---------- X : array-like, shape [n_samples, n_features] The data used to scale along the features axis. """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy X = check_array(X, accept_sparse='csr', copy=copy, ensure_2d=False, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.scale_ is not None: inplace_column_scale(X, 1 / self.scale_) else: if self.with_mean: X -= self.mean_ if self.with_std: X /= self.scale_ return X def inverse_transform(self, X, copy=None): """Scale back the data to the original representation Parameters ---------- X : array-like, shape [n_samples, n_features] The data used to scale along the features axis. """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot uncenter sparse matrices: pass `with_mean=False` " "instead See docstring for motivation and alternatives.") if not sparse.isspmatrix_csr(X): X = X.tocsr() copy = False if copy: X = X.copy() if self.scale_ is not None: inplace_column_scale(X, self.scale_) else: X = np.asarray(X) if copy: X = X.copy() if self.with_std: X *= self.scale_ if self.with_mean: X += self.mean_ return X class MaxAbsScaler(BaseEstimator, TransformerMixin): """Scale each feature by its maximum absolute value. This estimator scales and translates each feature individually such that the maximal absolute value of each feature in the training set will be 1.0. It does not shift/center the data, and thus does not destroy any sparsity. This scaler can also be applied to sparse CSR or CSC matrices. .. versionadded:: 0.17 Parameters ---------- copy : boolean, optional, default is True Set to False to perform inplace scaling and avoid a copy (if the input is already a numpy array). Attributes ---------- scale_ : ndarray, shape (n_features,) Per feature relative scaling of the data. .. versionadded:: 0.17 *scale_* attribute. max_abs_ : ndarray, shape (n_features,) Per feature maximum absolute value. n_samples_seen_ : int The number of samples processed by the estimator. Will be reset on new calls to fit, but increments across ``partial_fit`` calls. """ def __init__(self, copy=True): self.copy = copy def _reset(self): """Reset internal data-dependent state of the scaler, if necessary. __init__ parameters are not touched. """ # Checking one attribute is enough, becase they are all set together # in partial_fit if hasattr(self, 'scale_'): del self.scale_ del self.n_samples_seen_ del self.max_abs_ def fit(self, X, y=None): """Compute the maximum absolute value to be used for later scaling. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data used to compute the per-feature minimum and maximum used for later scaling along the features axis. """ # Reset internal state before fitting self._reset() return self.partial_fit(X, y) def partial_fit(self, X, y=None): """Online computation of max absolute value of X for later scaling. All of X is processed as a single batch. This is intended for cases when `fit` is not feasible due to very large number of `n_samples` or because X is read from a continuous stream. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data used to compute the mean and standard deviation used for later scaling along the features axis. y: Passthrough for ``Pipeline`` compatibility. """ X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): mins, maxs = min_max_axis(X, axis=0) max_abs = np.maximum(np.abs(mins), np.abs(maxs)) else: max_abs = np.abs(X).max(axis=0) # First pass if not hasattr(self, 'n_samples_seen_'): self.n_samples_seen_ = X.shape[0] # Next passes else: max_abs = np.maximum(self.max_abs_, max_abs) self.n_samples_seen_ += X.shape[0] self.max_abs_ = max_abs self.scale_ = _handle_zeros_in_scale(max_abs) return self def transform(self, X, y=None): """Scale the data Parameters ---------- X : {array-like, sparse matrix} The data that should be scaled. """ check_is_fitted(self, 'scale_') X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): inplace_column_scale(X, 1.0 / self.scale_) else: X /= self.scale_ return X def inverse_transform(self, X): """Scale back the data to the original representation Parameters ---------- X : {array-like, sparse matrix} The data that should be transformed back. """ check_is_fitted(self, 'scale_') X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): inplace_column_scale(X, self.scale_) else: X *= self.scale_ return X def maxabs_scale(X, axis=0, copy=True): """Scale each feature to the [-1, 1] range without breaking the sparsity. This estimator scales each feature individually such that the maximal absolute value of each feature in the training set will be 1.0. This scaler can also be applied to sparse CSR or CSC matrices. Parameters ---------- axis : int (0 by default) axis used to scale along. If 0, independently scale each feature, otherwise (if 1) scale each sample. copy : boolean, optional, default is True Set to False to perform inplace scaling and avoid a copy (if the input is already a numpy array). """ # To allow retro-compatibility, we handle here the case of 1D-input # From 0.17, 1D-input are deprecated in scaler objects # Although, we want to allow the users to keep calling this function # with 1D-input. # Cast input to array, as we need to check ndim. Prior to 0.17, that was # done inside the scaler object fit_transform. # If copy is required, it will be done inside the scaler object. X = check_array(X, accept_sparse=('csr', 'csc'), copy=False, ensure_2d=False, dtype=FLOAT_DTYPES) original_ndim = X.ndim if original_ndim == 1: X = X.reshape(X.shape[0], 1) s = MaxAbsScaler(copy=copy) if axis == 0: X = s.fit_transform(X) else: X = s.fit_transform(X.T).T if original_ndim == 1: X = X.ravel() return X class RobustScaler(BaseEstimator, TransformerMixin): """Scale features using statistics that are robust to outliers. This Scaler removes the median and scales the data according to the Interquartile Range (IQR). The IQR is the range between the 1st quartile (25th quantile) and the 3rd quartile (75th quantile). Centering and scaling happen independently on each feature (or each sample, depending on the `axis` argument) by computing the relevant statistics on the samples in the training set. Median and interquartile range are then stored to be used on later data using the `transform` method. Standardization of a dataset is a common requirement for many machine learning estimators. Typically this is done by removing the mean and scaling to unit variance. However, outliers can often influence the sample mean / variance in a negative way. In such cases, the median and the interquartile range often give better results. .. versionadded:: 0.17 Read more in the :ref:`User Guide <preprocessing_scaler>`. Parameters ---------- with_centering : boolean, True by default If True, center the data before scaling. This does not work (and will raise an exception) when attempted on sparse matrices, because centering them entails building a dense matrix which in common use cases is likely to be too large to fit in memory. with_scaling : boolean, True by default If True, scale the data to interquartile range. copy : boolean, optional, default is True If False, try to avoid a copy and do inplace scaling instead. This is not guaranteed to always work inplace; e.g. if the data is not a NumPy array or scipy.sparse CSR matrix, a copy may still be returned. Attributes ---------- center_ : array of floats The median value for each feature in the training set. scale_ : array of floats The (scaled) interquartile range for each feature in the training set. .. versionadded:: 0.17 *scale_* attribute. See also -------- :class:`sklearn.preprocessing.StandardScaler` to perform centering and scaling using mean and variance. :class:`sklearn.decomposition.RandomizedPCA` with `whiten=True` to further remove the linear correlation across features. Notes ----- See examples/preprocessing/plot_robust_scaling.py for an example. http://en.wikipedia.org/wiki/Median_(statistics) http://en.wikipedia.org/wiki/Interquartile_range """ def __init__(self, with_centering=True, with_scaling=True, copy=True): self.with_centering = with_centering self.with_scaling = with_scaling self.copy = copy def _check_array(self, X, copy): """Makes sure centering is not enabled for sparse matrices.""" X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): if self.with_centering: raise ValueError( "Cannot center sparse matrices: use `with_centering=False`" " instead. See docstring for motivation and alternatives.") return X def fit(self, X, y=None): """Compute the median and quantiles to be used for scaling. Parameters ---------- X : array-like, shape [n_samples, n_features] The data used to compute the median and quantiles used for later scaling along the features axis. """ if sparse.issparse(X): raise TypeError("RobustScaler cannot be fitted on sparse inputs") X = self._check_array(X, self.copy) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if self.with_centering: self.center_ = np.median(X, axis=0) if self.with_scaling: q = np.percentile(X, (25, 75), axis=0) self.scale_ = (q[1] - q[0]) self.scale_ = _handle_zeros_in_scale(self.scale_, copy=False) return self def transform(self, X, y=None): """Center and scale the data Parameters ---------- X : array-like The data used to scale along the specified axis. """ if self.with_centering: check_is_fitted(self, 'center_') if self.with_scaling: check_is_fitted(self, 'scale_') X = self._check_array(X, self.copy) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): if self.with_scaling: inplace_column_scale(X, 1.0 / self.scale_) else: if self.with_centering: X -= self.center_ if self.with_scaling: X /= self.scale_ return X def inverse_transform(self, X): """Scale back the data to the original representation Parameters ---------- X : array-like The data used to scale along the specified axis. """ if self.with_centering: check_is_fitted(self, 'center_') if self.with_scaling: check_is_fitted(self, 'scale_') X = self._check_array(X, self.copy) if X.ndim == 1: warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning) if sparse.issparse(X): if self.with_scaling: inplace_column_scale(X, self.scale_) else: if self.with_scaling: X *= self.scale_ if self.with_centering: X += self.center_ return X def robust_scale(X, axis=0, with_centering=True, with_scaling=True, copy=True): """Standardize a dataset along any axis Center to the median and component wise scale according to the interquartile range. Read more in the :ref:`User Guide <preprocessing_scaler>`. Parameters ---------- X : array-like The data to center and scale. axis : int (0 by default) axis used to compute the medians and IQR along. If 0, independently scale each feature, otherwise (if 1) scale each sample. with_centering : boolean, True by default If True, center the data before scaling. with_scaling : boolean, True by default If True, scale the data to unit variance (or equivalently, unit standard deviation). copy : boolean, optional, default is True set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR matrix and if axis is 1). Notes ----- This implementation will refuse to center scipy.sparse matrices since it would make them non-sparse and would potentially crash the program with memory exhaustion problems. Instead the caller is expected to either set explicitly `with_centering=False` (in that case, only variance scaling will be performed on the features of the CSR matrix) or to call `X.toarray()` if he/she expects the materialized dense array to fit in memory. To avoid memory copy the caller should pass a CSR matrix. See also -------- :class:`sklearn.preprocessing.RobustScaler` to perform centering and scaling using the ``Transformer`` API (e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`) """ s = RobustScaler(with_centering=with_centering, with_scaling=with_scaling, copy=copy) if axis == 0: return s.fit_transform(X) else: return s.fit_transform(X.T).T class PolynomialFeatures(BaseEstimator, TransformerMixin): """Generate polynomial and interaction features. Generate a new feature matrix consisting of all polynomial combinations of the features with degree less than or equal to the specified degree. For example, if an input sample is two dimensional and of the form [a, b], the degree-2 polynomial features are [1, a, b, a^2, ab, b^2]. Parameters ---------- degree : integer The degree of the polynomial features. Default = 2. interaction_only : boolean, default = False If true, only interaction features are produced: features that are products of at most ``degree`` *distinct* input features (so not ``x[1] ** 2``, ``x[0] * x[2] ** 3``, etc.). include_bias : boolean If True (default), then include a bias column, the feature in which all polynomial powers are zero (i.e. a column of ones - acts as an intercept term in a linear model). Examples -------- >>> X = np.arange(6).reshape(3, 2) >>> X array([[0, 1], [2, 3], [4, 5]]) >>> poly = PolynomialFeatures(2) >>> poly.fit_transform(X) array([[ 1., 0., 1., 0., 0., 1.], [ 1., 2., 3., 4., 6., 9.], [ 1., 4., 5., 16., 20., 25.]]) >>> poly = PolynomialFeatures(interaction_only=True) >>> poly.fit_transform(X) array([[ 1., 0., 1., 0.], [ 1., 2., 3., 6.], [ 1., 4., 5., 20.]]) Attributes ---------- powers_ : array, shape (n_input_features, n_output_features) powers_[i, j] is the exponent of the jth input in the ith output. n_input_features_ : int The total number of input features. n_output_features_ : int The total number of polynomial output features. The number of output features is computed by iterating over all suitably sized combinations of input features. Notes ----- Be aware that the number of features in the output array scales polynomially in the number of features of the input array, and exponentially in the degree. High degrees can cause overfitting. See :ref:`examples/linear_model/plot_polynomial_interpolation.py <example_linear_model_plot_polynomial_interpolation.py>` """ def __init__(self, degree=2, interaction_only=False, include_bias=True): self.degree = degree self.interaction_only = interaction_only self.include_bias = include_bias @staticmethod def _combinations(n_features, degree, interaction_only, include_bias): comb = (combinations if interaction_only else combinations_w_r) start = int(not include_bias) return chain.from_iterable(comb(range(n_features), i) for i in range(start, degree + 1)) @property def powers_(self): check_is_fitted(self, 'n_input_features_') combinations = self._combinations(self.n_input_features_, self.degree, self.interaction_only, self.include_bias) return np.vstack(np.bincount(c, minlength=self.n_input_features_) for c in combinations) def fit(self, X, y=None): """ Compute number of output features. """ n_samples, n_features = check_array(X).shape combinations = self._combinations(n_features, self.degree, self.interaction_only, self.include_bias) self.n_input_features_ = n_features self.n_output_features_ = sum(1 for _ in combinations) return self def transform(self, X, y=None): """Transform data to polynomial features Parameters ---------- X : array-like, shape [n_samples, n_features] The data to transform, row by row. Returns ------- XP : np.ndarray shape [n_samples, NP] The matrix of features, where NP is the number of polynomial features generated from the combination of inputs. """ check_is_fitted(self, ['n_input_features_', 'n_output_features_']) X = check_array(X, dtype=FLOAT_DTYPES) n_samples, n_features = X.shape if n_features != self.n_input_features_: raise ValueError("X shape does not match training shape") # allocate output data XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype) combinations = self._combinations(n_features, self.degree, self.interaction_only, self.include_bias) for i, c in enumerate(combinations): XP[:, i] = X[:, c].prod(1) return XP def normalize(X, norm='l2', axis=1, copy=True, return_norm=False): """Scale input vectors individually to unit norm (vector length). Read more in the :ref:`User Guide <preprocessing_normalization>`. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data to normalize, element by element. scipy.sparse matrices should be in CSR format to avoid an un-necessary copy. norm : 'l1', 'l2', or 'max', optional ('l2' by default) The norm to use to normalize each non zero sample (or each non-zero feature if axis is 0). axis : 0 or 1, optional (1 by default) axis used to normalize the data along. If 1, independently normalize each sample, otherwise (if 0) normalize each feature. copy : boolean, optional, default True set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR matrix and if axis is 1). return_norm : boolean, default False whether to return the computed norms See also -------- :class:`sklearn.preprocessing.Normalizer` to perform normalization using the ``Transformer`` API (e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`) """ if norm not in ('l1', 'l2', 'max'): raise ValueError("'%s' is not a supported norm" % norm) if axis == 0: sparse_format = 'csc' elif axis == 1: sparse_format = 'csr' else: raise ValueError("'%d' is not a supported axis" % axis) X = check_array(X, sparse_format, copy=copy, warn_on_dtype=True, estimator='the normalize function', dtype=FLOAT_DTYPES) if axis == 0: X = X.T if sparse.issparse(X): if norm == 'l1': inplace_csr_row_normalize_l1(X) elif norm == 'l2': inplace_csr_row_normalize_l2(X) elif norm == 'max': _, norms = min_max_axis(X, 1) norms = norms.repeat(np.diff(X.indptr)) mask = norms != 0 X.data[mask] /= norms[mask] else: if norm == 'l1': norms = np.abs(X).sum(axis=1) elif norm == 'l2': norms = row_norms(X) elif norm == 'max': norms = np.max(X, axis=1) norms = _handle_zeros_in_scale(norms, copy=False) X /= norms[:, np.newaxis] if axis == 0: X = X.T if return_norm: return X, norms else: return X class Normalizer(BaseEstimator, TransformerMixin): """Normalize samples individually to unit norm. Each sample (i.e. each row of the data matrix) with at least one non zero component is rescaled independently of other samples so that its norm (l1 or l2) equals one. This transformer is able to work both with dense numpy arrays and scipy.sparse matrix (use CSR format if you want to avoid the burden of a copy / conversion). Scaling inputs to unit norms is a common operation for text classification or clustering for instance. For instance the dot product of two l2-normalized TF-IDF vectors is the cosine similarity of the vectors and is the base similarity metric for the Vector Space Model commonly used by the Information Retrieval community. Read more in the :ref:`User Guide <preprocessing_normalization>`. Parameters ---------- norm : 'l1', 'l2', or 'max', optional ('l2' by default) The norm to use to normalize each non zero sample. copy : boolean, optional, default True set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR matrix). Notes ----- This estimator is stateless (besides constructor parameters), the fit method does nothing but is useful when used in a pipeline. See also -------- :func:`sklearn.preprocessing.normalize` equivalent function without the object oriented API """ def __init__(self, norm='l2', copy=True): self.norm = norm self.copy = copy def fit(self, X, y=None): """Do nothing and return the estimator unchanged This method is just there to implement the usual API and hence work in pipelines. """ X = check_array(X, accept_sparse='csr') return self def transform(self, X, y=None, copy=None): """Scale each non zero row of X to unit norm Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data to normalize, row by row. scipy.sparse matrices should be in CSR format to avoid an un-necessary copy. """ copy = copy if copy is not None else self.copy X = check_array(X, accept_sparse='csr') return normalize(X, norm=self.norm, axis=1, copy=copy) def binarize(X, threshold=0.0, copy=True): """Boolean thresholding of array-like or scipy.sparse matrix Read more in the :ref:`User Guide <preprocessing_binarization>`. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data to binarize, element by element. scipy.sparse matrices should be in CSR or CSC format to avoid an un-necessary copy. threshold : float, optional (0.0 by default) Feature values below or equal to this are replaced by 0, above it by 1. Threshold may not be less than 0 for operations on sparse matrices. copy : boolean, optional, default True set to False to perform inplace binarization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR / CSC matrix and if axis is 1). See also -------- :class:`sklearn.preprocessing.Binarizer` to perform binarization using the ``Transformer`` API (e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`) """ X = check_array(X, accept_sparse=['csr', 'csc'], copy=copy) if sparse.issparse(X): if threshold < 0: raise ValueError('Cannot binarize a sparse matrix with threshold ' '< 0') cond = X.data > threshold not_cond = np.logical_not(cond) X.data[cond] = 1 X.data[not_cond] = 0 X.eliminate_zeros() else: cond = X > threshold not_cond = np.logical_not(cond) X[cond] = 1 X[not_cond] = 0 return X class Binarizer(BaseEstimator, TransformerMixin): """Binarize data (set feature values to 0 or 1) according to a threshold Values greater than the threshold map to 1, while values less than or equal to the threshold map to 0. With the default threshold of 0, only positive values map to 1. Binarization is a common operation on text count data where the analyst can decide to only consider the presence or absence of a feature rather than a quantified number of occurrences for instance. It can also be used as a pre-processing step for estimators that consider boolean random variables (e.g. modelled using the Bernoulli distribution in a Bayesian setting). Read more in the :ref:`User Guide <preprocessing_binarization>`. Parameters ---------- threshold : float, optional (0.0 by default) Feature values below or equal to this are replaced by 0, above it by 1. Threshold may not be less than 0 for operations on sparse matrices. copy : boolean, optional, default True set to False to perform inplace binarization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR matrix). Notes ----- If the input is a sparse matrix, only the non-zero values are subject to update by the Binarizer class. This estimator is stateless (besides constructor parameters), the fit method does nothing but is useful when used in a pipeline. """ def __init__(self, threshold=0.0, copy=True): self.threshold = threshold self.copy = copy def fit(self, X, y=None): """Do nothing and return the estimator unchanged This method is just there to implement the usual API and hence work in pipelines. """ check_array(X, accept_sparse='csr') return self def transform(self, X, y=None, copy=None): """Binarize each element of X Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] The data to binarize, element by element. scipy.sparse matrices should be in CSR format to avoid an un-necessary copy. """ copy = copy if copy is not None else self.copy return binarize(X, threshold=self.threshold, copy=copy) class KernelCenterer(BaseEstimator, TransformerMixin): """Center a kernel matrix Let K(x, z) be a kernel defined by phi(x)^T phi(z), where phi is a function mapping x to a Hilbert space. KernelCenterer centers (i.e., normalize to have zero mean) the data without explicitly computing phi(x). It is equivalent to centering phi(x) with sklearn.preprocessing.StandardScaler(with_std=False). Read more in the :ref:`User Guide <kernel_centering>`. """ def fit(self, K, y=None): """Fit KernelCenterer Parameters ---------- K : numpy array of shape [n_samples, n_samples] Kernel matrix. Returns ------- self : returns an instance of self. """ K = check_array(K, dtype=FLOAT_DTYPES) n_samples = K.shape[0] self.K_fit_rows_ = np.sum(K, axis=0) / n_samples self.K_fit_all_ = self.K_fit_rows_.sum() / n_samples return self def transform(self, K, y=None, copy=True): """Center kernel matrix. Parameters ---------- K : numpy array of shape [n_samples1, n_samples2] Kernel matrix. copy : boolean, optional, default True Set to False to perform inplace computation. Returns ------- K_new : numpy array of shape [n_samples1, n_samples2] """ check_is_fitted(self, 'K_fit_all_') K = check_array(K, copy=copy, dtype=FLOAT_DTYPES) K_pred_cols = (np.sum(K, axis=1) / self.K_fit_rows_.shape[0])[:, np.newaxis] K -= self.K_fit_rows_ K -= K_pred_cols K += self.K_fit_all_ return K def add_dummy_feature(X, value=1.0): """Augment dataset with an additional dummy feature. This is useful for fitting an intercept term with implementations which cannot otherwise fit it directly. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] Data. value : float Value to use for the dummy feature. Returns ------- X : {array, sparse matrix}, shape [n_samples, n_features + 1] Same data with dummy feature added as first column. Examples -------- >>> from sklearn.preprocessing import add_dummy_feature >>> add_dummy_feature([[0, 1], [1, 0]]) array([[ 1., 0., 1.], [ 1., 1., 0.]]) """ X = check_array(X, accept_sparse=['csc', 'csr', 'coo'], dtype=FLOAT_DTYPES) n_samples, n_features = X.shape shape = (n_samples, n_features + 1) if sparse.issparse(X): if sparse.isspmatrix_coo(X): # Shift columns to the right. col = X.col + 1 # Column indices of dummy feature are 0 everywhere. col = np.concatenate((np.zeros(n_samples), col)) # Row indices of dummy feature are 0, ..., n_samples-1. row = np.concatenate((np.arange(n_samples), X.row)) # Prepend the dummy feature n_samples times. data = np.concatenate((np.ones(n_samples) * value, X.data)) return sparse.coo_matrix((data, (row, col)), shape) elif sparse.isspmatrix_csc(X): # Shift index pointers since we need to add n_samples elements. indptr = X.indptr + n_samples # indptr[0] must be 0. indptr = np.concatenate((np.array([0]), indptr)) # Row indices of dummy feature are 0, ..., n_samples-1. indices = np.concatenate((np.arange(n_samples), X.indices)) # Prepend the dummy feature n_samples times. data = np.concatenate((np.ones(n_samples) * value, X.data)) return sparse.csc_matrix((data, indices, indptr), shape) else: klass = X.__class__ return klass(add_dummy_feature(X.tocoo(), value)) else: return np.hstack((np.ones((n_samples, 1)) * value, X)) def _transform_selected(X, transform, selected="all", copy=True): """Apply a transform function to portion of selected features Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] Dense array or sparse matrix. transform : callable A callable transform(X) -> X_transformed copy : boolean, optional Copy X even if it could be avoided. selected: "all" or array of indices or mask Specify which features to apply the transform to. Returns ------- X : array or sparse matrix, shape=(n_samples, n_features_new) """ if isinstance(selected, six.string_types) and selected == "all": return transform(X) X = check_array(X, accept_sparse='csc', copy=copy, dtype=FLOAT_DTYPES) if len(selected) == 0: return X n_features = X.shape[1] ind = np.arange(n_features) sel = np.zeros(n_features, dtype=bool) sel[np.asarray(selected)] = True not_sel = np.logical_not(sel) n_selected = np.sum(sel) if n_selected == 0: # No features selected. return X elif n_selected == n_features: # All features selected. return transform(X) else: X_sel = transform(X[:, ind[sel]]) X_not_sel = X[:, ind[not_sel]] if sparse.issparse(X_sel) or sparse.issparse(X_not_sel): return sparse.hstack((X_sel, X_not_sel)) else: return np.hstack((X_sel, X_not_sel)) class OneHotEncoder(BaseEstimator, TransformerMixin): """Encode categorical integer features using a one-hot aka one-of-K scheme. The input to this transformer should be a matrix of integers, denoting the values taken on by categorical (discrete) features. The output will be a sparse matrix where each column corresponds to one possible value of one feature. It is assumed that input features take on values in the range [0, n_values). This encoding is needed for feeding categorical data to many scikit-learn estimators, notably linear models and SVMs with the standard kernels. Read more in the :ref:`User Guide <preprocessing_categorical_features>`. Parameters ---------- n_values : 'auto', int or array of ints Number of values per feature. - 'auto' : determine value range from training data. - int : number of categorical values per feature. Each feature value should be in ``range(n_values)`` - array : ``n_values[i]`` is the number of categorical values in ``X[:, i]``. Each feature value should be in ``range(n_values[i])`` categorical_features: "all" or array of indices or mask Specify what features are treated as categorical. - 'all' (default): All features are treated as categorical. - array of indices: Array of categorical feature indices. - mask: Array of length n_features and with dtype=bool. Non-categorical features are always stacked to the right of the matrix. dtype : number type, default=np.float Desired dtype of output. sparse : boolean, default=True Will return sparse matrix if set True else will return an array. handle_unknown : str, 'error' or 'ignore' Whether to raise an error or ignore if a unknown categorical feature is present during transform. Attributes ---------- active_features_ : array Indices for active features, meaning values that actually occur in the training set. Only available when n_values is ``'auto'``. feature_indices_ : array of shape (n_features,) Indices to feature ranges. Feature ``i`` in the original data is mapped to features from ``feature_indices_[i]`` to ``feature_indices_[i+1]`` (and then potentially masked by `active_features_` afterwards) n_values_ : array of shape (n_features,) Maximum number of values per feature. Examples -------- Given a dataset with three features and two samples, we let the encoder find the maximum value per feature and transform the data to a binary one-hot encoding. >>> from sklearn.preprocessing import OneHotEncoder >>> enc = OneHotEncoder() >>> enc.fit([[0, 0, 3], [1, 1, 0], [0, 2, 1], \ [1, 0, 2]]) # doctest: +ELLIPSIS OneHotEncoder(categorical_features='all', dtype=<... 'numpy.float64'>, handle_unknown='error', n_values='auto', sparse=True) >>> enc.n_values_ array([2, 3, 4]) >>> enc.feature_indices_ array([0, 2, 5, 9]) >>> enc.transform([[0, 1, 1]]).toarray() array([[ 1., 0., 0., 1., 0., 0., 1., 0., 0.]]) See also -------- sklearn.feature_extraction.DictVectorizer : performs a one-hot encoding of dictionary items (also handles string-valued features). sklearn.feature_extraction.FeatureHasher : performs an approximate one-hot encoding of dictionary items or strings. """ def __init__(self, n_values="auto", categorical_features="all", dtype=np.float64, sparse=True, handle_unknown='error'): self.n_values = n_values self.categorical_features = categorical_features self.dtype = dtype self.sparse = sparse self.handle_unknown = handle_unknown def fit(self, X, y=None): """Fit OneHotEncoder to X. Parameters ---------- X : array-like, shape [n_samples, n_feature] Input array of type int. Returns ------- self """ self.fit_transform(X) return self def _fit_transform(self, X): """Assumes X contains only categorical features.""" X = check_array(X, dtype=np.int) if np.any(X < 0): raise ValueError("X needs to contain only non-negative integers.") n_samples, n_features = X.shape if self.n_values == 'auto': n_values = np.max(X, axis=0) + 1 elif isinstance(self.n_values, numbers.Integral): if (np.max(X, axis=0) >= self.n_values).any(): raise ValueError("Feature out of bounds for n_values=%d" % self.n_values) n_values = np.empty(n_features, dtype=np.int) n_values.fill(self.n_values) else: try: n_values = np.asarray(self.n_values, dtype=int) except (ValueError, TypeError): raise TypeError("Wrong type for parameter `n_values`. Expected" " 'auto', int or array of ints, got %r" % type(X)) if n_values.ndim < 1 or n_values.shape[0] != X.shape[1]: raise ValueError("Shape mismatch: if n_values is an array," " it has to be of shape (n_features,).") self.n_values_ = n_values n_values = np.hstack([[0], n_values]) indices = np.cumsum(n_values) self.feature_indices_ = indices column_indices = (X + indices[:-1]).ravel() row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), n_features) data = np.ones(n_samples * n_features) out = sparse.coo_matrix((data, (row_indices, column_indices)), shape=(n_samples, indices[-1]), dtype=self.dtype).tocsr() if self.n_values == 'auto': mask = np.array(out.sum(axis=0)).ravel() != 0 active_features = np.where(mask)[0] out = out[:, active_features] self.active_features_ = active_features return out if self.sparse else out.toarray() def fit_transform(self, X, y=None): """Fit OneHotEncoder to X, then transform X. Equivalent to self.fit(X).transform(X), but more convenient and more efficient. See fit for the parameters, transform for the return value. """ return _transform_selected(X, self._fit_transform, self.categorical_features, copy=True) def _transform(self, X): """Assumes X contains only categorical features.""" X = check_array(X, dtype=np.int) if np.any(X < 0): raise ValueError("X needs to contain only non-negative integers.") n_samples, n_features = X.shape indices = self.feature_indices_ if n_features != indices.shape[0] - 1: raise ValueError("X has different shape than during fitting." " Expected %d, got %d." % (indices.shape[0] - 1, n_features)) # We use only those categorical features of X that are known using fit. # i.e lesser than n_values_ using mask. # This means, if self.handle_unknown is "ignore", the row_indices and # col_indices corresponding to the unknown categorical feature are # ignored. mask = (X < self.n_values_).ravel() if np.any(~mask): if self.handle_unknown not in ['error', 'ignore']: raise ValueError("handle_unknown should be either error or " "unknown got %s" % self.handle_unknown) if self.handle_unknown == 'error': raise ValueError("unknown categorical feature present %s " "during transform." % X.ravel()[~mask]) column_indices = (X + indices[:-1]).ravel()[mask] row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), n_features)[mask] data = np.ones(np.sum(mask)) out = sparse.coo_matrix((data, (row_indices, column_indices)), shape=(n_samples, indices[-1]), dtype=self.dtype).tocsr() if self.n_values == 'auto': out = out[:, self.active_features_] return out if self.sparse else out.toarray() def transform(self, X): """Transform X using one-hot encoding. Parameters ---------- X : array-like, shape [n_samples, n_features] Input array of type int. Returns ------- X_out : sparse matrix if sparse=True else a 2-d array, dtype=int Transformed input. """ return _transform_selected(X, self._transform, self.categorical_features, copy=True)
mit
b-deng/mailinabox
tools/mail.py
8
4562
#!/usr/bin/python3 import sys, getpass, urllib.request, urllib.error, json, re def mgmt(cmd, data=None, is_json=False): # The base URL for the management daemon. (Listens on IPv4 only.) mgmt_uri = 'http://127.0.0.1:10222' setup_key_auth(mgmt_uri) req = urllib.request.Request(mgmt_uri + cmd, urllib.parse.urlencode(data).encode("utf8") if data else None) try: response = urllib.request.urlopen(req) except urllib.error.HTTPError as e: if e.code == 401: try: print(e.read().decode("utf8")) except: pass print("The management daemon refused access. The API key file may be out of sync. Try 'service mailinabox restart'.", file=sys.stderr) elif hasattr(e, 'read'): print(e.read().decode('utf8'), file=sys.stderr) else: print(e, file=sys.stderr) sys.exit(1) resp = response.read().decode('utf8') if is_json: resp = json.loads(resp) return resp def read_password(): while True: first = getpass.getpass('password: ') if len(first) < 4: print("Passwords must be at least four characters.") continue if re.search(r'[\s]', first): print("Passwords cannot contain spaces.") continue second = getpass.getpass(' (again): ') if first != second: print("Passwords not the same. Try again.") continue break return first def setup_key_auth(mgmt_uri): key = open('/var/lib/mailinabox/api.key').read().strip() auth_handler = urllib.request.HTTPBasicAuthHandler() auth_handler.add_password( realm='Mail-in-a-Box Management Server', uri=mgmt_uri, user=key, passwd='') opener = urllib.request.build_opener(auth_handler) urllib.request.install_opener(opener) if len(sys.argv) < 2: print("Usage: ") print(" tools/mail.py user (lists users)") print(" tools/mail.py user add [email protected] [password]") print(" tools/mail.py user password [email protected] [password]") print(" tools/mail.py user remove [email protected]") print(" tools/mail.py user make-admin [email protected]") print(" tools/mail.py user remove-admin [email protected]") print(" tools/mail.py user admins (lists admins)") print(" tools/mail.py alias (lists aliases)") print(" tools/mail.py alias add [email protected] [email protected]") print(" tools/mail.py alias add [email protected] '[email protected], [email protected]'") print(" tools/mail.py alias remove [email protected]") print() print("Removing a mail user does not delete their mail folders on disk. It only prevents IMAP/SMTP login.") print() elif sys.argv[1] == "user" and len(sys.argv) == 2: # Dump a list of users, one per line. Mark admins with an asterisk. users = mgmt("/mail/users?format=json", is_json=True) for domain in users: for user in domain["users"]: if user['status'] == 'inactive': continue print(user['email'], end='') if "admin" in user['privileges']: print("*", end='') print() elif sys.argv[1] == "user" and sys.argv[2] in ("add", "password"): if len(sys.argv) < 5: if len(sys.argv) < 4: email = input("email: ") else: email = sys.argv[3] pw = read_password() else: email, pw = sys.argv[3:5] if sys.argv[2] == "add": print(mgmt("/mail/users/add", { "email": email, "password": pw })) elif sys.argv[2] == "password": print(mgmt("/mail/users/password", { "email": email, "password": pw })) elif sys.argv[1] == "user" and sys.argv[2] == "remove" and len(sys.argv) == 4: print(mgmt("/mail/users/remove", { "email": sys.argv[3] })) elif sys.argv[1] == "user" and sys.argv[2] in ("make-admin", "remove-admin") and len(sys.argv) == 4: if sys.argv[2] == "make-admin": action = "add" else: action = "remove" print(mgmt("/mail/users/privileges/" + action, { "email": sys.argv[3], "privilege": "admin" })) elif sys.argv[1] == "user" and sys.argv[2] == "admins": # Dump a list of admin users. users = mgmt("/mail/users?format=json", is_json=True) for domain in users: for user in domain["users"]: if "admin" in user['privileges']: print(user['email']) elif sys.argv[1] == "alias" and len(sys.argv) == 2: print(mgmt("/mail/aliases")) elif sys.argv[1] == "alias" and sys.argv[2] == "add" and len(sys.argv) == 5: print(mgmt("/mail/aliases/add", { "address": sys.argv[3], "forwards_to": sys.argv[4] })) elif sys.argv[1] == "alias" and sys.argv[2] == "remove" and len(sys.argv) == 4: print(mgmt("/mail/aliases/remove", { "address": sys.argv[3] })) else: print("Invalid command-line arguments.") sys.exit(1)
cc0-1.0
MarcJoan/django
django/utils/translation/trans_null.py
467
1408
# These are versions of the functions in django.utils.translation.trans_real # that don't actually do anything. This is purely for performance, so that # settings.USE_I18N = False can use this module rather than trans_real.py. from django.conf import settings from django.utils.encoding import force_text def ngettext(singular, plural, number): if number == 1: return singular return plural ngettext_lazy = ngettext def ungettext(singular, plural, number): return force_text(ngettext(singular, plural, number)) def pgettext(context, message): return ugettext(message) def npgettext(context, singular, plural, number): return ungettext(singular, plural, number) activate = lambda x: None deactivate = deactivate_all = lambda: None get_language = lambda: settings.LANGUAGE_CODE get_language_bidi = lambda: settings.LANGUAGE_CODE in settings.LANGUAGES_BIDI check_for_language = lambda x: True def gettext(message): return message def ugettext(message): return force_text(gettext(message)) gettext_noop = gettext_lazy = _ = gettext def to_locale(language): p = language.find('-') if p >= 0: return language[:p].lower() + '_' + language[p + 1:].upper() else: return language.lower() def get_language_from_request(request, check_path=False): return settings.LANGUAGE_CODE def get_language_from_path(request): return None
bsd-3-clause
devs1991/test_edx_docmode
venv/lib/python2.7/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py
10
2367
#!/usr/bin/env python from nose.tools import * from nose import SkipTest from nose.plugins.attrib import attr import networkx # Example from # A. Langville and C. Meyer, "A survey of eigenvector methods of web # information retrieval." http://citeseer.ist.psu.edu/713792.html class TestHITS: def setUp(self): G=networkx.DiGraph() edges=[(1,3),(1,5),\ (2,1),\ (3,5),\ (5,4),(5,3),\ (6,5)] G.add_edges_from(edges,weight=1) self.G=G self.G.a=dict(zip(G,[0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000])) self.G.h=dict(zip(G,[ 0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325])) def test_hits(self): G=self.G h,a=networkx.hits(G,tol=1.e-08) for n in G: assert_almost_equal(h[n],G.h[n],places=4) for n in G: assert_almost_equal(a[n],G.a[n],places=4) def test_hits_nstart(self): G = self.G nstart = dict([(i, 1./2) for i in G]) h, a = networkx.hits(G, nstart = nstart) @attr('numpy') def test_hits_numpy(self): try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') G=self.G h,a=networkx.hits_numpy(G) for n in G: assert_almost_equal(h[n],G.h[n],places=4) for n in G: assert_almost_equal(a[n],G.a[n],places=4) def test_hits_scipy(self): try: import scipy as sp except ImportError: raise SkipTest('SciPy not available.') G=self.G h,a=networkx.hits_scipy(G,tol=1.e-08) for n in G: assert_almost_equal(h[n],G.h[n],places=4) for n in G: assert_almost_equal(a[n],G.a[n],places=4) @attr('numpy') def test_empty(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') G=networkx.Graph() assert_equal(networkx.hits(G),({},{})) assert_equal(networkx.hits_numpy(G),({},{})) assert_equal(networkx.hits_scipy(G),({},{})) assert_equal(networkx.authority_matrix(G).shape,(0,0)) assert_equal(networkx.hub_matrix(G).shape,(0,0))
agpl-3.0
DiMartinoX/plugin.video.kinopoisk.ru
script.myshows/kinopoisk/LOGGER.py
1
1115
# -*- coding: utf-8 -*- # try: import xbmcaddon __settings__ = xbmcaddon.Addon("script.myshows") __myshows__ = xbmcaddon.Addon("plugin.video.myshows") try: debug = __myshows__.getSetting("debug") except: debug = __settings__.getSetting("debug") except: debug='true' def Log(msg, force = False): try: print "[myshows log] " + msg except UnicodeEncodeError: print "[myshows log] " + msg.encode( "utf-8", "ignore" ) def Debug(msg, force = False): if debug=='true' or force: try: print "[myshows] " + msg except UnicodeEncodeError: print "[myshows] " + msg.encode( "utf-8", "ignore" ) def Info(msg, force = False): if debug=='true' or force: try: print "[myshows] " + msg except UnicodeEncodeError: print "[myshows] " + msg.encode( "utf-8", "ignore" ) def Warn(msg, force = False): if debug=='true' or force: try: print "[myshows] " + msg except UnicodeEncodeError: print "[myshows] " + msg.encode( "utf-8", "ignore" )
gpl-3.0
sathnaga/virt-test
qemu/tests/timedrift_with_stop.py
2
5153
import logging, time, os, signal from autotest.client.shared import error from virttest import utils_test def run_timedrift_with_stop(test, params, env): """ Time drift test with stop/continue the guest: 1) Log into a guest. 2) Take a time reading from the guest and host. 3) Stop the running of the guest 4) Sleep for a while 5) Continue the guest running 6) Take a second time reading. 7) If the drift (in seconds) is higher than a user specified value, fail. @param test: QEMU test object. @param params: Dictionary with test parameters. @param env: Dictionary with the test environment. """ login_timeout = int(params.get("login_timeout", 360)) sleep_time = int(params.get("sleep_time", 30)) vm = env.get_vm(params["main_vm"]) vm.verify_alive() boot_option_added = params.get("boot_option_added") boot_option_removed = params.get("boot_option_removed") if boot_option_added or boot_option_removed: utils_test.update_boot_option(vm, args_removed=boot_option_removed, args_added=boot_option_added) session = vm.wait_for_login(timeout=login_timeout) # Collect test parameters: # Command to run to get the current time time_command = params["time_command"] # Filter which should match a string to be passed to time.strptime() time_filter_re = params["time_filter_re"] # Time format for time.strptime() time_format = params["time_format"] drift_threshold = float(params.get("drift_threshold", "10")) drift_threshold_single = float(params.get("drift_threshold_single", "3")) stop_iterations = int(params.get("stop_iterations", 1)) stop_time = int(params.get("stop_time", 60)) stop_with_signal = params.get("stop_with_signal") == "yes" # Get guest's pid. pid = vm.get_pid() try: # Get initial time # (ht stands for host time, gt stands for guest time) (ht0, gt0) = utils_test.get_time(session, time_command, time_filter_re, time_format) # Stop the guest for i in range(stop_iterations): # Get time before current iteration (ht0_, gt0_) = utils_test.get_time(session, time_command, time_filter_re, time_format) # Run current iteration logging.info("Stop %s second: iteration %d of %d...", stop_time, (i + 1), stop_iterations) if stop_with_signal: logging.debug("Stop guest") os.kill(pid, signal.SIGSTOP) time.sleep(stop_time) logging.debug("Continue guest") os.kill(pid, signal.SIGCONT) else: vm.pause() time.sleep(stop_time) vm.resume() # Sleep for a while to wait the interrupt to be reinjected logging.info("Waiting for the interrupt to be reinjected ...") time.sleep(sleep_time) # Get time after current iteration (ht1_, gt1_) = utils_test.get_time(session, time_command, time_filter_re, time_format) # Report iteration results host_delta = ht1_ - ht0_ guest_delta = gt1_ - gt0_ drift = abs(host_delta - guest_delta) logging.info("Host duration (iteration %d): %.2f", (i + 1), host_delta) logging.info("Guest duration (iteration %d): %.2f", (i + 1), guest_delta) logging.info("Drift at iteration %d: %.2f seconds", (i + 1), drift) # Fail if necessary if drift > drift_threshold_single: raise error.TestFail("Time drift too large at iteration %d: " "%.2f seconds" % (i + 1, drift)) # Get final time (ht1, gt1) = utils_test.get_time(session, time_command, time_filter_re, time_format) finally: if session: session.close() # remove flags add for this test. if boot_option_added or boot_option_removed: utils_test.update_boot_option(vm, args_removed=boot_option_added, args_added=boot_option_removed) # Report results host_delta = ht1 - ht0 guest_delta = gt1 - gt0 drift = abs(host_delta - guest_delta) logging.info("Host duration (%d stops): %.2f", stop_iterations, host_delta) logging.info("Guest duration (%d stops): %.2f", stop_iterations, guest_delta) logging.info("Drift after %d stops: %.2f seconds", stop_iterations, drift) # Fail if necessary if drift > drift_threshold: raise error.TestFail("Time drift too large after %d stops: " "%.2f seconds" % (stop_iterations, drift))
gpl-2.0
RJRandell79/RJRStudios
node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
542
45270
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Notes: # # This generates makefiles suitable for inclusion into the Android build system # via an Android.mk file. It is based on make.py, the standard makefile # generator. # # The code below generates a separate .mk file for each target, but # all are sourced by the top-level GypAndroid.mk. This means that all # variables in .mk-files clobber one another, and furthermore that any # variables set potentially clash with other Android build system variables. # Try to avoid setting global variables where possible. import gyp import gyp.common import gyp.generator.make as make # Reuse global functions from make backend. import os import re import subprocess generator_default_variables = { 'OS': 'android', 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', 'STATIC_LIB_PREFIX': 'lib', 'SHARED_LIB_PREFIX': 'lib', 'STATIC_LIB_SUFFIX': '.a', 'SHARED_LIB_SUFFIX': '.so', 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)', 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)', 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)', 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', 'LIB_DIR': '$(obj).$(TOOLSET)', 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. 'RULE_INPUT_PATH': '$(RULE_SOURCES)', 'RULE_INPUT_EXT': '$(suffix $<)', 'RULE_INPUT_NAME': '$(notdir $<)', 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)', } # Make supports multiple toolsets generator_supports_multiple_toolsets = True # Generator-specific gyp specs. generator_additional_non_configuration_keys = [ # Boolean to declare that this target does not want its name mangled. 'android_unmangled_name', # Map of android build system variables to set. 'aosp_build_settings', ] generator_additional_path_sections = [] generator_extra_sources_for_rules = [] ALL_MODULES_FOOTER = """\ # "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from # all the included sub-makefiles. This is just here to clarify. gyp_all_modules: """ header = """\ # This file is generated by gyp; do not edit. """ # Map gyp target types to Android module classes. MODULE_CLASSES = { 'static_library': 'STATIC_LIBRARIES', 'shared_library': 'SHARED_LIBRARIES', 'executable': 'EXECUTABLES', } def IsCPPExtension(ext): return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx' def Sourceify(path): """Convert a path to its source directory form. The Android backend does not support options.generator_output, so this function is a noop.""" return path # Map from qualified target to path to output. # For Android, the target of these maps is a tuple ('static', 'modulename'), # ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, # since we link by module. target_outputs = {} # Map from qualified target to any linkable output. A subset # of target_outputs. E.g. when mybinary depends on liba, we want to # include liba in the linker line; when otherbinary depends on # mybinary, we just want to build mybinary first. target_link_deps = {} class AndroidMkWriter(object): """AndroidMkWriter packages up the writing of one target-specific Android.mk. Its only real entry point is Write(), and is mostly used for namespacing. """ def __init__(self, android_top_dir): self.android_top_dir = android_top_dir def Write(self, qualified_target, relative_target, base_path, output_filename, spec, configs, part_of_all, write_alias_target, sdk_version): """The main entry point: writes a .mk file for a single target. Arguments: qualified_target: target we're generating relative_target: qualified target name relative to the root base_path: path relative to source root we're building in, used to resolve target-relative paths output_filename: output .mk file name to write spec, configs: gyp info part_of_all: flag indicating this target is part of 'all' write_alias_target: flag indicating whether to create short aliases for this target sdk_version: what to emit for LOCAL_SDK_VERSION in output """ gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) self.qualified_target = qualified_target self.relative_target = relative_target self.path = base_path self.target = spec['target_name'] self.type = spec['type'] self.toolset = spec['toolset'] deps, link_deps = self.ComputeDeps(spec) # Some of the generation below can add extra output, sources, or # link dependencies. All of the out params of the functions that # follow use names like extra_foo. extra_outputs = [] extra_sources = [] self.android_class = MODULE_CLASSES.get(self.type, 'GYP') self.android_module = self.ComputeAndroidModule(spec) (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) self.output = self.output_binary = self.ComputeOutput(spec) # Standard header. self.WriteLn('include $(CLEAR_VARS)\n') # Module class and name. self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class) self.WriteLn('LOCAL_MODULE := ' + self.android_module) # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. # The library module classes fail if the stem is set. ComputeOutputParts # makes sure that stem == modulename in these cases. if self.android_stem != self.android_module: self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem) self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix) if self.toolset == 'host': self.WriteLn('LOCAL_IS_HOST_MODULE := true') self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)') else: self.WriteLn('LOCAL_MODULE_TARGET_ARCH := ' '$(TARGET_$(GYP_VAR_PREFIX)ARCH)') self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version) # Grab output directories; needed for Actions and Rules. if self.toolset == 'host': self.WriteLn('gyp_intermediate_dir := ' '$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))') else: self.WriteLn('gyp_intermediate_dir := ' '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))') self.WriteLn('gyp_shared_intermediate_dir := ' '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))') self.WriteLn() # List files this target depends on so that actions/rules/copies/sources # can depend on the list. # TODO: doesn't pull in things through transitive link deps; needed? target_dependencies = [x[1] for x in deps if x[0] == 'path'] self.WriteLn('# Make sure our deps are built first.') self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES', local_pathify=True) # Actions must come first, since they can generate more OBJs for use below. if 'actions' in spec: self.WriteActions(spec['actions'], extra_sources, extra_outputs) # Rules must be early like actions. if 'rules' in spec: self.WriteRules(spec['rules'], extra_sources, extra_outputs) if 'copies' in spec: self.WriteCopies(spec['copies'], extra_outputs) # GYP generated outputs. self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True) # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend # on both our dependency targets and our generated files. self.WriteLn('# Make sure our deps and generated files are built first.') self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) ' '$(GYP_GENERATED_OUTPUTS)') self.WriteLn() # Sources. if spec.get('sources', []) or extra_sources: self.WriteSources(spec, configs, extra_sources) self.WriteTarget(spec, configs, deps, link_deps, part_of_all, write_alias_target) # Update global list of target outputs, used in dependency tracking. target_outputs[qualified_target] = ('path', self.output_binary) # Update global list of link dependencies. if self.type == 'static_library': target_link_deps[qualified_target] = ('static', self.android_module) elif self.type == 'shared_library': target_link_deps[qualified_target] = ('shared', self.android_module) self.fp.close() return self.android_module def WriteActions(self, actions, extra_sources, extra_outputs): """Write Makefile code for any 'actions' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these actions (used to make other pieces dependent on these actions) """ for action in actions: name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, action['action_name'])) self.WriteLn('### Rules for action "%s":' % action['action_name']) inputs = action['inputs'] outputs = action['outputs'] # Build up a list of outputs. # Collect the output dirs we'll need. dirs = set() for out in outputs: if not out.startswith('$'): print ('WARNING: Action for target "%s" writes output to local path ' '"%s".' % (self.target, out)) dir = os.path.split(out)[0] if dir: dirs.add(dir) if int(action.get('process_outputs_as_sources', False)): extra_sources += outputs # Prepare the actual command. command = gyp.common.EncodePOSIXShellList(action['action']) if 'message' in action: quiet_cmd = 'Gyp action: %s ($@)' % action['message'] else: quiet_cmd = 'Gyp action: %s ($@)' % name if len(dirs) > 0: command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command cd_action = 'cd $(gyp_local_path)/%s; ' % self.path command = cd_action + command # The makefile rules are all relative to the top dir, but the gyp actions # are defined relative to their containing dir. This replaces the gyp_* # variables for the action rule with an absolute version so that the # output goes in the right place. # Only write the gyp_* rules for the "primary" output (:1); # it's superfluous for the "extra outputs", and this avoids accidentally # writing duplicate dummy rules for those outputs. main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output) self.WriteLn('%s: gyp_intermediate_dir := ' '$(abspath $(gyp_intermediate_dir))' % main_output) self.WriteLn('%s: gyp_shared_intermediate_dir := ' '$(abspath $(gyp_shared_intermediate_dir))' % main_output) # Android's envsetup.sh adds a number of directories to the path including # the built host binary directory. This causes actions/rules invoked by # gyp to sometimes use these instead of system versions, e.g. bison. # The built host binaries may not be suitable, and can cause errors. # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable # set by envsetup. self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) # Don't allow spaces in input/output filenames, but make an exception for # filenames which start with '$(' since it's okay for there to be spaces # inside of make function/macro invocations. for input in inputs: if not input.startswith('$(') and ' ' in input: raise gyp.common.GypError( 'Action input filename "%s" in target %s contains a space' % (input, self.target)) for output in outputs: if not output.startswith('$(') and ' ' in output: raise gyp.common.GypError( 'Action output filename "%s" in target %s contains a space' % (output, self.target)) self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % (main_output, ' '.join(map(self.LocalPathify, inputs)))) self.WriteLn('\t@echo "%s"' % quiet_cmd) self.WriteLn('\t$(hide)%s\n' % command) for output in outputs[1:]: # Make each output depend on the main output, with an empty command # to force make to notice that the mtime has changed. self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output)) extra_outputs += outputs self.WriteLn() self.WriteLn() def WriteRules(self, rules, extra_sources, extra_outputs): """Write Makefile code for any 'rules' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these rules (used to make other pieces dependent on these rules) """ if len(rules) == 0: return for rule in rules: if len(rule.get('rule_sources', [])) == 0: continue name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, rule['rule_name'])) self.WriteLn('\n### Generated for rule "%s":' % name) self.WriteLn('# "%s":' % rule) inputs = rule.get('inputs') for rule_source in rule.get('rule_sources', []): (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) (rule_source_root, rule_source_ext) = \ os.path.splitext(rule_source_basename) outputs = [self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) for out in rule['outputs']] dirs = set() for out in outputs: if not out.startswith('$'): print ('WARNING: Rule for target %s writes output to local path %s' % (self.target, out)) dir = os.path.dirname(out) if dir: dirs.add(dir) extra_outputs += outputs if int(rule.get('process_outputs_as_sources', False)): extra_sources.extend(outputs) components = [] for component in rule['action']: component = self.ExpandInputRoot(component, rule_source_root, rule_source_dirname) if '$(RULE_SOURCES)' in component: component = component.replace('$(RULE_SOURCES)', rule_source) components.append(component) command = gyp.common.EncodePOSIXShellList(components) cd_action = 'cd $(gyp_local_path)/%s; ' % self.path command = cd_action + command if dirs: command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command # We set up a rule to build the first output, and then set up # a rule for each additional output to depend on the first. outputs = map(self.LocalPathify, outputs) main_output = outputs[0] self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output) self.WriteLn('%s: gyp_intermediate_dir := ' '$(abspath $(gyp_intermediate_dir))' % main_output) self.WriteLn('%s: gyp_shared_intermediate_dir := ' '$(abspath $(gyp_shared_intermediate_dir))' % main_output) # See explanation in WriteActions. self.WriteLn('%s: export PATH := ' '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) main_output_deps = self.LocalPathify(rule_source) if inputs: main_output_deps += ' ' main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs]) self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % (main_output, main_output_deps)) self.WriteLn('\t%s\n' % command) for output in outputs[1:]: # Make each output depend on the main output, with an empty command # to force make to notice that the mtime has changed. self.WriteLn('%s: %s ;' % (output, main_output)) self.WriteLn() self.WriteLn() def WriteCopies(self, copies, extra_outputs): """Write Makefile code for any 'copies' from the gyp input. extra_outputs: a list that will be filled in with any outputs of this action (used to make other pieces dependent on this action) """ self.WriteLn('### Generated for copy rule.') variable = make.StringToMakefileVariable(self.relative_target + '_copies') outputs = [] for copy in copies: for path in copy['files']: # The Android build system does not allow generation of files into the # source tree. The destination should start with a variable, which will # typically be $(gyp_intermediate_dir) or # $(gyp_shared_intermediate_dir). Note that we can't use an assertion # because some of the gyp tests depend on this. if not copy['destination'].startswith('$'): print ('WARNING: Copy rule for target %s writes output to ' 'local path %s' % (self.target, copy['destination'])) # LocalPathify() calls normpath, stripping trailing slashes. path = Sourceify(self.LocalPathify(path)) filename = os.path.split(path)[1] output = Sourceify(self.LocalPathify(os.path.join(copy['destination'], filename))) self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' % (output, path)) self.WriteLn('\t@echo Copying: $@') self.WriteLn('\t$(hide) mkdir -p $(dir $@)') self.WriteLn('\t$(hide) $(ACP) -rpf $< $@') self.WriteLn() outputs.append(output) self.WriteLn('%s = %s' % (variable, ' '.join(map(make.QuoteSpaces, outputs)))) extra_outputs.append('$(%s)' % variable) self.WriteLn() def WriteSourceFlags(self, spec, configs): """Write out the flags and include paths used to compile source files for the current target. Args: spec, configs: input from gyp. """ for configname, config in sorted(configs.iteritems()): extracted_includes = [] self.WriteLn('\n# Flags passed to both C and C++ files.') cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( config.get('cflags', []) + config.get('cflags_c', [])) extracted_includes.extend(includes_from_cflags) self.WriteList(cflags, 'MY_CFLAGS_%s' % configname) self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname, prefix='-D', quoter=make.EscapeCppDefine) self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') includes = list(config.get('include_dirs', [])) includes.extend(extracted_includes) includes = map(Sourceify, map(self.LocalPathify, includes)) includes = self.NormalizeIncludePaths(includes) self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname) self.WriteLn('\n# Flags passed to only C++ (and not C) files.') self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname) self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) ' '$(MY_DEFS_$(GYP_CONFIGURATION))') # Undefine ANDROID for host modules # TODO: the source code should not use macro ANDROID to tell if it's host # or target module. if self.toolset == 'host': self.WriteLn('# Undefine ANDROID for host modules') self.WriteLn('LOCAL_CFLAGS += -UANDROID') self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) ' '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))') self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))') # Android uses separate flags for assembly file invocations, but gyp expects # the same CFLAGS to be applied: self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)') def WriteSources(self, spec, configs, extra_sources): """Write Makefile code for any 'sources' from the gyp input. These are source files necessary to build the current target. We need to handle shared_intermediate directory source files as a special case by copying them to the intermediate directory and treating them as a genereated sources. Otherwise the Android build rules won't pick them up. Args: spec, configs: input from gyp. extra_sources: Sources generated from Actions or Rules. """ sources = filter(make.Compilable, spec.get('sources', [])) generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] extra_sources = filter(make.Compilable, extra_sources) # Determine and output the C++ extension used by these sources. # We simply find the first C++ file and use that extension. all_sources = sources + extra_sources local_cpp_extension = '.cpp' for source in all_sources: (root, ext) = os.path.splitext(source) if IsCPPExtension(ext): local_cpp_extension = ext break if local_cpp_extension != '.cpp': self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension) # We need to move any non-generated sources that are coming from the # shared intermediate directory out of LOCAL_SRC_FILES and put them # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files # that don't match our local_cpp_extension, since Android will only # generate Makefile rules for a single LOCAL_CPP_EXTENSION. local_files = [] for source in sources: (root, ext) = os.path.splitext(source) if '$(gyp_shared_intermediate_dir)' in source: extra_sources.append(source) elif '$(gyp_intermediate_dir)' in source: extra_sources.append(source) elif IsCPPExtension(ext) and ext != local_cpp_extension: extra_sources.append(source) else: local_files.append(os.path.normpath(os.path.join(self.path, source))) # For any generated source, if it is coming from the shared intermediate # directory then we add a Make rule to copy them to the local intermediate # directory first. This is because the Android LOCAL_GENERATED_SOURCES # must be in the local module intermediate directory for the compile rules # to work properly. If the file has the wrong C++ extension, then we add # a rule to copy that to intermediates and use the new version. final_generated_sources = [] # If a source file gets copied, we still need to add the orginal source # directory as header search path, for GCC searches headers in the # directory that contains the source file by default. origin_src_dirs = [] for source in extra_sources: local_file = source if not '$(gyp_intermediate_dir)/' in local_file: basename = os.path.basename(local_file) local_file = '$(gyp_intermediate_dir)/' + basename (root, ext) = os.path.splitext(local_file) if IsCPPExtension(ext) and ext != local_cpp_extension: local_file = root + local_cpp_extension if local_file != source: self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source))) self.WriteLn('\tmkdir -p $(@D); cp $< $@') origin_src_dirs.append(os.path.dirname(source)) final_generated_sources.append(local_file) # We add back in all of the non-compilable stuff to make sure that the # make rules have dependencies on them. final_generated_sources.extend(generated_not_sources) self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') origin_src_dirs = gyp.common.uniquer(origin_src_dirs) origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') self.WriteList(local_files, 'LOCAL_SRC_FILES') # Write out the flags used to compile the source; this must be done last # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. self.WriteSourceFlags(spec, configs) def ComputeAndroidModule(self, spec): """Return the Android module name used for a gyp spec. We use the complete qualified target name to avoid collisions between duplicate targets in different directories. We also add a suffix to distinguish gyp-generated module names. """ if int(spec.get('android_unmangled_name', 0)): assert self.type != 'shared_library' or self.target.startswith('lib') return self.target if self.type == 'shared_library': # For reasons of convention, the Android build system requires that all # shared library modules are named 'libfoo' when generating -l flags. prefix = 'lib_' else: prefix = '' if spec['toolset'] == 'host': suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp' else: suffix = '_gyp' if self.path: middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target)) else: middle = make.StringToMakefileVariable(self.target) return ''.join([prefix, middle, suffix]) def ComputeOutputParts(self, spec): """Return the 'output basename' of a gyp spec, split into filename + ext. Android libraries must be named the same thing as their module name, otherwise the linker can't find them, so product_name and so on must be ignored if we are building a library, and the "lib" prepending is not done for Android. """ assert self.type != 'loadable_module' # TODO: not supported? target = spec['target_name'] target_prefix = '' target_ext = '' if self.type == 'static_library': target = self.ComputeAndroidModule(spec) target_ext = '.a' elif self.type == 'shared_library': target = self.ComputeAndroidModule(spec) target_ext = '.so' elif self.type == 'none': target_ext = '.stamp' elif self.type != 'executable': print ("ERROR: What output file should be generated?", "type", self.type, "target", target) if self.type != 'static_library' and self.type != 'shared_library': target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) product_ext = spec.get('product_extension') if product_ext: target_ext = '.' + product_ext target_stem = target_prefix + target return (target_stem, target_ext) def ComputeOutputBasename(self, spec): """Return the 'output basename' of a gyp spec. E.g., the loadable module 'foobar' in directory 'baz' will produce 'libfoobar.so' """ return ''.join(self.ComputeOutputParts(spec)) def ComputeOutput(self, spec): """Return the 'output' (full output path) of a gyp spec. E.g., the loadable module 'foobar' in directory 'baz' will produce '$(obj)/baz/libfoobar.so' """ if self.type == 'executable': # We install host executables into shared_intermediate_dir so they can be # run by gyp rules that refer to PRODUCT_DIR. path = '$(gyp_shared_intermediate_dir)' elif self.type == 'shared_library': if self.toolset == 'host': path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)' else: path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)' else: # Other targets just get built into their intermediate dir. if self.toolset == 'host': path = ('$(call intermediates-dir-for,%s,%s,true,,' '$(GYP_HOST_VAR_PREFIX))' % (self.android_class, self.android_module)) else: path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))' % (self.android_class, self.android_module)) assert spec.get('product_dir') is None # TODO: not supported? return os.path.join(path, self.ComputeOutputBasename(spec)) def NormalizeIncludePaths(self, include_paths): """ Normalize include_paths. Convert absolute paths to relative to the Android top directory. Args: include_paths: A list of unprocessed include paths. Returns: A list of normalized include paths. """ normalized = [] for path in include_paths: if path[0] == '/': path = gyp.common.RelativePath(path, self.android_top_dir) normalized.append(path) return normalized def ExtractIncludesFromCFlags(self, cflags): """Extract includes "-I..." out from cflags Args: cflags: A list of compiler flags, which may be mixed with "-I.." Returns: A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. """ clean_cflags = [] include_paths = [] for flag in cflags: if flag.startswith('-I'): include_paths.append(flag[2:]) else: clean_cflags.append(flag) return (clean_cflags, include_paths) def FilterLibraries(self, libraries): """Filter the 'libraries' key to separate things that shouldn't be ldflags. Library entries that look like filenames should be converted to android module names instead of being passed to the linker as flags. Args: libraries: the value of spec.get('libraries') Returns: A tuple (static_lib_modules, dynamic_lib_modules, ldflags) """ static_lib_modules = [] dynamic_lib_modules = [] ldflags = [] for libs in libraries: # Libs can have multiple words. for lib in libs.split(): # Filter the system libraries, which are added by default by the Android # build system. if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or lib.endswith('libgcc.a')): continue match = re.search(r'([^/]+)\.a$', lib) if match: static_lib_modules.append(match.group(1)) continue match = re.search(r'([^/]+)\.so$', lib) if match: dynamic_lib_modules.append(match.group(1)) continue if lib.startswith('-l'): ldflags.append(lib) return (static_lib_modules, dynamic_lib_modules, ldflags) def ComputeDeps(self, spec): """Compute the dependencies of a gyp spec. Returns a tuple (deps, link_deps), where each is a list of filenames that will need to be put in front of make for either building (deps) or linking (link_deps). """ deps = [] link_deps = [] if 'dependencies' in spec: deps.extend([target_outputs[dep] for dep in spec['dependencies'] if target_outputs[dep]]) for dep in spec['dependencies']: if dep in target_link_deps: link_deps.append(target_link_deps[dep]) deps.extend(link_deps) return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) def WriteTargetFlags(self, spec, configs, link_deps): """Write Makefile code to specify the link flags and library dependencies. spec, configs: input from gyp. link_deps: link dependency list; see ComputeDeps() """ # Libraries (i.e. -lfoo) # These must be included even for static libraries as some of them provide # implicit include paths through the build system. libraries = gyp.common.uniquer(spec.get('libraries', [])) static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) if self.type != 'static_library': for configname, config in sorted(configs.iteritems()): ldflags = list(config.get('ldflags', [])) self.WriteLn('') self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname) self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS') self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) ' '$(LOCAL_GYP_LIBS)') # Link dependencies (i.e. other gyp targets this target depends on) # These need not be included for static libraries as within the gyp build # we do not use the implicit include path mechanism. if self.type != 'static_library': static_link_deps = [x[1] for x in link_deps if x[0] == 'static'] shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared'] else: static_link_deps = [] shared_link_deps = [] # Only write the lists if they are non-empty. if static_libs or static_link_deps: self.WriteLn('') self.WriteList(static_libs + static_link_deps, 'LOCAL_STATIC_LIBRARIES') self.WriteLn('# Enable grouping to fix circular references') self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true') if dynamic_libs or shared_link_deps: self.WriteLn('') self.WriteList(dynamic_libs + shared_link_deps, 'LOCAL_SHARED_LIBRARIES') def WriteTarget(self, spec, configs, deps, link_deps, part_of_all, write_alias_target): """Write Makefile code to produce the final target of the gyp spec. spec, configs: input from gyp. deps, link_deps: dependency lists; see ComputeDeps() part_of_all: flag indicating this target is part of 'all' write_alias_target: flag indicating whether to create short aliases for this target """ self.WriteLn('### Rules for final target.') if self.type != 'none': self.WriteTargetFlags(spec, configs, link_deps) settings = spec.get('aosp_build_settings', {}) if settings: self.WriteLn('### Set directly by aosp_build_settings.') for k, v in settings.iteritems(): if isinstance(v, list): self.WriteList(v, k) else: self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v))) self.WriteLn('') # Add to the set of targets which represent the gyp 'all' target. We use the # name 'gyp_all_modules' as the Android build system doesn't allow the use # of the Make target 'all' and because 'all_modules' is the equivalent of # the Make target 'all' on Android. if part_of_all and write_alias_target: self.WriteLn('# Add target alias to "gyp_all_modules" target.') self.WriteLn('.PHONY: gyp_all_modules') self.WriteLn('gyp_all_modules: %s' % self.android_module) self.WriteLn('') # Add an alias from the gyp target name to the Android module name. This # simplifies manual builds of the target, and is required by the test # framework. if self.target != self.android_module and write_alias_target: self.WriteLn('# Alias gyp target name.') self.WriteLn('.PHONY: %s' % self.target) self.WriteLn('%s: %s' % (self.target, self.android_module)) self.WriteLn('') # Add the command to trigger build of the target type depending # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY # NOTE: This has to come last! modifier = '' if self.toolset == 'host': modifier = 'HOST_' if self.type == 'static_library': self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier) elif self.type == 'shared_library': self.WriteLn('LOCAL_PRELINK_MODULE := false') self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier) elif self.type == 'executable': # Executables are for build and test purposes only, so they're installed # to a directory that doesn't get included in the system image. self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)') self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier) else: self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp') self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') if self.toolset == 'target': self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)') else: self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)') self.WriteLn() self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk') self.WriteLn() self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)') self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') self.WriteLn('\t$(hide) mkdir -p $(dir $@)') self.WriteLn('\t$(hide) touch $@') self.WriteLn() self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=') def WriteList(self, value_list, variable=None, prefix='', quoter=make.QuoteIfNecessary, local_pathify=False): """Write a variable definition that is a list of values. E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out foo = blaha blahb but in a pretty-printed style. """ values = '' if value_list: value_list = [quoter(prefix + l) for l in value_list] if local_pathify: value_list = [self.LocalPathify(l) for l in value_list] values = ' \\\n\t' + ' \\\n\t'.join(value_list) self.fp.write('%s :=%s\n\n' % (variable, values)) def WriteLn(self, text=''): self.fp.write(text + '\n') def LocalPathify(self, path): """Convert a subdirectory-relative path into a normalized path which starts with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). Absolute paths, or paths that contain variables, are just normalized.""" if '$(' in path or os.path.isabs(path): # path is not a file in the project tree in this case, but calling # normpath is still important for trimming trailing slashes. return os.path.normpath(path) local_path = os.path.join('$(LOCAL_PATH)', self.path, path) local_path = os.path.normpath(local_path) # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) # - i.e. that the resulting path is still inside the project tree. The # path may legitimately have ended up containing just $(LOCAL_PATH), though, # so we don't look for a slash. assert local_path.startswith('$(LOCAL_PATH)'), ( 'Path %s attempts to escape from gyp path %s !)' % (path, self.path)) return local_path def ExpandInputRoot(self, template, expansion, dirname): if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: return template path = template % { 'INPUT_ROOT': expansion, 'INPUT_DIRNAME': dirname, } return os.path.normpath(path) def PerformBuild(data, configurations, params): # The android backend only supports the default configuration. options = params['options'] makefile = os.path.abspath(os.path.join(options.toplevel_dir, 'GypAndroid.mk')) env = dict(os.environ) env['ONE_SHOT_MAKEFILE'] = makefile arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] print 'Building: %s' % arguments subprocess.check_call(arguments, env=env) def GenerateOutput(target_list, target_dicts, data, params): options = params['options'] generator_flags = params.get('generator_flags', {}) builddir_name = generator_flags.get('output_dir', 'out') limit_to_target_all = generator_flags.get('limit_to_target_all', False) write_alias_targets = generator_flags.get('write_alias_targets', True) sdk_version = generator_flags.get('aosp_sdk_version', 19) android_top_dir = os.environ.get('ANDROID_BUILD_TOP') assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.' def CalculateMakefilePath(build_file, base_name): """Determine where to write a Makefile for a given gyp file.""" # Paths in gyp files are relative to the .gyp file, but we want # paths relative to the source root for the master makefile. Grab # the path of the .gyp file as the base to relativize against. # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) # We write the file in the base_path directory. output_file = os.path.join(options.depth, base_path, base_name) assert not options.generator_output, ( 'The Android backend does not support options.generator_output.') base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.toplevel_dir) return base_path, output_file # TODO: search for the first non-'Default' target. This can go # away when we add verification that all targets have the # necessary configurations. default_configuration = None toolsets = set([target_dicts[target]['toolset'] for target in target_list]) for target in target_list: spec = target_dicts[target] if spec['default_configuration'] != 'Default': default_configuration = spec['default_configuration'] break if not default_configuration: default_configuration = 'Default' srcdir = '.' makefile_name = 'GypAndroid' + options.suffix + '.mk' makefile_path = os.path.join(options.toplevel_dir, makefile_name) assert not options.generator_output, ( 'The Android backend does not support options.generator_output.') gyp.common.EnsureDirExists(makefile_path) root_makefile = open(makefile_path, 'w') root_makefile.write(header) # We set LOCAL_PATH just once, here, to the top of the project tree. This # allows all the other paths we use to be relative to the Android.mk file, # as the Android build system expects. root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n') # Find the list of targets that derive from the gyp file(s) being built. needed_targets = set() for build_file in params['build_files']: for target in gyp.common.AllTargets(target_list, target_dicts, build_file): needed_targets.add(target) build_files = set() include_list = set() android_modules = {} for qualified_target in target_list: build_file, target, toolset = gyp.common.ParseQualifiedTarget( qualified_target) relative_build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) build_files.add(relative_build_file) included_files = data[build_file]['included_files'] for included_file in included_files: # The included_files entries are relative to the dir of the build file # that included them, so we have to undo that and then make them relative # to the root dir. relative_include_file = gyp.common.RelativePath( gyp.common.UnrelativePath(included_file, build_file), options.toplevel_dir) abs_include_file = os.path.abspath(relative_include_file) # If the include file is from the ~/.gyp dir, we should use absolute path # so that relocating the src dir doesn't break the path. if (params['home_dot_gyp'] and abs_include_file.startswith(params['home_dot_gyp'])): build_files.add(abs_include_file) else: build_files.add(relative_include_file) base_path, output_file = CalculateMakefilePath(build_file, target + '.' + toolset + options.suffix + '.mk') spec = target_dicts[qualified_target] configs = spec['configurations'] part_of_all = qualified_target in needed_targets if limit_to_target_all and not part_of_all: continue relative_target = gyp.common.QualifiedTarget(relative_build_file, target, toolset) writer = AndroidMkWriter(android_top_dir) android_module = writer.Write(qualified_target, relative_target, base_path, output_file, spec, configs, part_of_all=part_of_all, write_alias_target=write_alias_targets, sdk_version=sdk_version) if android_module in android_modules: print ('ERROR: Android module names must be unique. The following ' 'targets both generate Android module name %s.\n %s\n %s' % (android_module, android_modules[android_module], qualified_target)) return android_modules[android_module] = qualified_target # Our root_makefile lives at the source root. Compute the relative path # from there to the output_file for including. mkfile_rel_path = gyp.common.RelativePath(output_file, os.path.dirname(makefile_path)) include_list.add(mkfile_rel_path) root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration) root_makefile.write('GYP_VAR_PREFIX ?=\n') root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n') root_makefile.write('GYP_HOST_MULTILIB ?=\n') # Write out the sorted list of includes. root_makefile.write('\n') for include_file in sorted(include_list): root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n') root_makefile.write('\n') if write_alias_targets: root_makefile.write(ALL_MODULES_FOOTER) root_makefile.close()
gpl-2.0
appsembler/edx-platform
common/test/acceptance/pages/studio/settings_advanced.py
8
8274
""" Course Advanced Settings page """ from bok_choy.promise import EmptyPromise from common.test.acceptance.pages.studio.course_page import CoursePage from common.test.acceptance.pages.studio.utils import ( get_codemirror_value, press_the_notification_button, type_in_codemirror ) KEY_CSS = '.key h3.title' UNDO_BUTTON_SELECTOR = ".action-item .action-undo" MANUAL_BUTTON_SELECTOR = ".action-item .action-cancel" MODAL_SELECTOR = ".validation-error-modal-content" ERROR_ITEM_NAME_SELECTOR = ".error-item-title strong" ERROR_ITEM_CONTENT_SELECTOR = ".error-item-message" SETTINGS_NAME_SELECTOR = ".is-not-editable" CONFIRMATION_MESSAGE_SELECTOR = "#alert-confirmation-title" DEPRECATED_SETTINGS_SELECTOR = ".field-group.course-advanced-policy-list-item.is-deprecated" DEPRECATED_SETTINGS_BUTTON_SELECTOR = ".deprecated-settings-label" class AdvancedSettingsPage(CoursePage): """ Course Advanced Settings page. """ url_path = "settings/advanced" def is_browser_on_page(self): def _is_finished_loading(): return len(self.q(css='.course-advanced-policy-list-item')) > 0 EmptyPromise(_is_finished_loading, 'Finished rendering the advanced policy items.').fulfill() return self.q(css='body.advanced').present @property def key_names(self): """ Returns a list of key names of all settings. """ return self.q(css=KEY_CSS).text @property def deprecated_settings_button_text(self): """ Returns text for deprecated settings button """ return self.q(css=DEPRECATED_SETTINGS_BUTTON_SELECTOR).text[0] def wait_for_modal_load(self): """ Wait for validation response from the server, and make sure that the validation error modal pops up. This method should only be called when it is guaranteed that there're validation errors in the settings changes. """ self.wait_for_ajax() self.wait_for_element_presence(MODAL_SELECTOR, 'Validation Modal is present') def refresh_and_wait_for_load(self): """ Refresh the page and wait for all resources to load. """ self.browser.refresh() self.wait_for_page() def coordinates_for_scrolling(self, coordinates_for): """ Get the x and y coordinates of elements """ cordinates_dict = self.browser.find_element_by_css_selector(coordinates_for) location = cordinates_dict.location for key, val in location.iteritems(): if key == 'x': x_axis = val elif key == 'y': y_axis = val return x_axis, y_axis def undo_changes_via_modal(self): """ Trigger clicking event of the undo changes button in the modal. Wait for the undoing process to load via ajax call. Before that Scroll so the button is clickable on all browsers """ self.browser.execute_script("window.scrollTo" + str(self.coordinates_for_scrolling(UNDO_BUTTON_SELECTOR))) self.q(css=UNDO_BUTTON_SELECTOR).click() self.wait_for_ajax() def trigger_manual_changes(self): """ Trigger click event of the manual changes button in the modal. No need to wait for any ajax. Before that Scroll so the button is clickable on all browsers """ self.browser.execute_script("window.scrollTo" + str(self.coordinates_for_scrolling(MANUAL_BUTTON_SELECTOR))) self.q(css=MANUAL_BUTTON_SELECTOR).click() def is_validation_modal_present(self): """ Checks if the validation modal is present. """ return self.q(css=MODAL_SELECTOR).present def get_error_item_names(self): """ Returns a list of display names of all invalid settings. """ return self.q(css=ERROR_ITEM_NAME_SELECTOR).text def get_error_item_messages(self): """ Returns a list of error messages of all invalid settings. """ return self.q(css=ERROR_ITEM_CONTENT_SELECTOR).text def _get_index_of(self, expected_key): for i, element in enumerate(self.q(css=KEY_CSS)): # Sometimes get stale reference if I hold on to the array of elements key = self.q(css=KEY_CSS).nth(i).text[0] if key == expected_key: return i return -1 def save(self): press_the_notification_button(self, "Save") def cancel(self): press_the_notification_button(self, "Cancel") def set(self, key, new_value): index = self._get_index_of(key) type_in_codemirror(self, index, new_value) self.save() def get(self, key): index = self._get_index_of(key) return get_codemirror_value(self, index) def set_values(self, key_value_map): """ Make multiple settings changes and save them. """ for key, value in key_value_map.iteritems(): index = self._get_index_of(key) type_in_codemirror(self, index, value) self.save() def get_values(self, key_list): """ Get a key-value dictionary of all keys in the given list. """ result_map = {} for key in key_list: index = self._get_index_of(key) val = get_codemirror_value(self, index) result_map[key] = val return result_map @property def displayed_settings_names(self): """ Returns all settings displayed on the advanced settings page/screen/modal/whatever We call it 'name', but it's really whatever is embedded in the 'id' element for each field """ query = self.q(css=SETTINGS_NAME_SELECTOR) return query.attrs('id') @property def expected_settings_names(self): """ Returns a list of settings expected to be displayed on the Advanced Settings screen Should match the list of settings found in cms/djangoapps/models/settings/course_metadata.py If a new setting is added to the metadata list, this test will fail and you must update it. Basically this guards against accidental exposure of a field on the Advanced Settings screen """ return [ 'advanced_modules', 'allow_anonymous', 'allow_anonymous_to_peers', 'allow_public_wiki_access', 'cert_html_view_overrides', 'cert_name_long', 'cert_name_short', 'certificates_display_behavior', 'course_image', 'banner_image', 'video_thumbnail_image', 'cosmetic_display_price', 'advertised_start', 'announcement', 'display_name', 'is_new', 'issue_badges', 'max_student_enrollments_allowed', 'no_grade', 'display_coursenumber', 'display_organization', 'catalog_visibility', 'days_early_for_beta', 'disable_progress_graph', 'discussion_blackouts', 'discussion_sort_alpha', 'discussion_topics', 'due', 'due_date_display_format', 'edxnotes', 'use_latex_compiler', 'video_speed_optimizations', 'enrollment_domain', 'html_textbooks', 'invitation_only', 'lti_passports', 'matlab_api_key', 'max_attempts', 'mobile_available', 'rerandomize', 'remote_gradebook', 'annotation_token_secret', 'showanswer', 'show_calculator', 'show_reset_button', 'static_asset_path', 'teams_configuration', 'annotation_storage_url', 'social_sharing_url', 'video_bumper', 'enable_proctored_exams', 'allow_proctoring_opt_out', 'enable_timed_exams', 'enable_subsection_gating', 'learning_info', 'instructor_info', 'create_zendesk_tickets', 'ccx_connector', 'enable_ccx', ]
agpl-3.0
BioXiao/chimerascan
chimerascan/deprecated/old_nominate_chimeras.py
6
8393
''' Created on Jun 4, 2011 @author: mkiyer chimerascan: chimeric transcript discovery using RNA-seq Copyright (C) 2011 Matthew Iyer This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import logging import os import collections import itertools import operator from chimerascan import pysam from chimerascan.lib import config from chimerascan.lib.chimera import Chimera, \ DiscordantTags, DISCORDANT_TAG_NAME, \ OrientationTags, ORIENTATION_TAG_NAME, \ DiscordantRead, ChimeraTypes, ChimeraPartner from chimerascan.lib.gene_to_genome import build_tid_tx_maps def parse_pairs(bamfh): bam_iter = iter(bamfh) try: while True: r1 = bam_iter.next() r2 = bam_iter.next() yield r1,r2 except StopIteration: pass def parse_gene_chimeric_reads(bamfh): # create a dictionary structure to hold read pairs chimera_dict = collections.defaultdict(lambda: []) for r1,r2 in parse_pairs(bamfh): # # TODO: # for now we are only going to deal with gene-gene # chimeras and leave other chimeras for study at a # later time # dr1 = r1.opt(DISCORDANT_TAG_NAME) dr2 = r2.opt(DISCORDANT_TAG_NAME) if (dr1 != DiscordantTags.DISCORDANT_GENE or dr2 != DiscordantTags.DISCORDANT_GENE): continue # organize key in 5' to 3' order or1 = r1.opt(ORIENTATION_TAG_NAME) or2 = r2.opt(ORIENTATION_TAG_NAME) assert or1 != or2 if or1 == OrientationTags.FIVEPRIME: pair = (r1,r2) else: pair = (r2,r1) # store pertinent information in lightweight structure # convert to DiscordantRead objects r5p = DiscordantRead.from_read(pair[0]) r3p = DiscordantRead.from_read(pair[1]) # keep list of discordant chimeric reads chimera_dict[(r5p.tid, r3p.tid)].append((r5p,r3p)) for key,pairs in chimera_dict.iteritems(): rname1,rname2 = key yield rname1, rname2, pairs def get_chimera_type(fiveprime_gene, threeprime_gene, gene_trees): """ return tuple containing ChimeraType and distance between 5' and 3' genes """ # get gene information chrom5p, start5p, end5p, strand1 = fiveprime_gene.chrom, fiveprime_gene.tx_start, fiveprime_gene.tx_end, fiveprime_gene.strand chrom3p, start3p, end3p, strand2 = threeprime_gene.chrom, threeprime_gene.tx_start, threeprime_gene.tx_end, threeprime_gene.strand # interchromosomal if chrom5p != chrom3p: return ChimeraTypes.INTERCHROMOSOMAL, None # orientation same_strand = strand1 == strand2 # genes on same chromosome so check overlap is_overlapping = (start5p < end3p) and (start3p < end5p) if is_overlapping: if not same_strand: if ((start5p <= start3p and strand1 == "+") or (start5p > start3p and strand1 == "-")): return (ChimeraTypes.OVERLAP_CONVERGE, 0) else: return (ChimeraTypes.OVERLAP_DIVERGE, 0) else: if ((start5p <= start3p and strand1 == "+") or (end5p >= end3p and strand1 == "-")): return (ChimeraTypes.OVERLAP_SAME, 0) else: return (ChimeraTypes.OVERLAP_COMPLEX, 0) # if code gets here then the genes are on the same chromosome but do not # overlap. first calculate distance (minimum distance between genes) if start5p <= start3p: distance = start3p - end5p between_start,between_end = end5p,start3p else: distance = end3p - start5p between_start,between_end = end3p,start5p # check whether there are genes intervening between the # chimera candidates genes_between = [] genes_between_same_strand = [] for hit in gene_trees[chrom5p].find(between_start, between_end): if (hit.start > between_start and hit.end < between_end): if hit.strand == strand1: genes_between_same_strand.append(hit) genes_between.append(hit) if same_strand: if len(genes_between_same_strand) == 0: return ChimeraTypes.READTHROUGH, distance else: return ChimeraTypes.INTRACHROMOSOMAL, distance else: # check for reads between neighboring genes if len(genes_between) == 0: if ((start5p <= start3p and strand1 == "+") or (start5p > start3p and strand1 == "-")): return (ChimeraTypes.ADJ_CONVERGE, distance) elif ((start5p >= start3p and strand1 == "+") or (start5p < start3p and strand1 == "-")): return (ChimeraTypes.ADJ_DIVERGE, distance) elif ((start5p <= start3p and strand1 == "+") or (start5p > start3p and strand1 == "-")): return (ChimeraTypes.ADJ_SAME, distance) elif ((start5p >= start3p and strand1 == "+") or (start5p < start3p and strand1 == '-')): return (ChimeraTypes.ADJ_COMPLEX, distance) else: return ChimeraTypes.INTRA_COMPLEX, distance return ChimeraTypes.UNKNOWN, distance def read_pairs_to_chimera(chimera_name, tid5p, tid3p, readpairs, tid_tx_map, genome_tx_trees, trim_bp): # get gene information tx5p = tid_tx_map[tid5p] tx3p = tid_tx_map[tid3p] # categorize chimera type chimera_type, distance = get_chimera_type(tx5p, tx3p, genome_tx_trees) # create chimera object c = Chimera() iter5p = itertools.imap(operator.itemgetter(0), readpairs) iter3p = itertools.imap(operator.itemgetter(1), readpairs) c.partner5p = ChimeraPartner.from_discordant_reads(iter5p, tx5p, trim_bp) c.partner3p = ChimeraPartner.from_discordant_reads(iter3p, tx3p, trim_bp) c.name = chimera_name c.chimera_type = chimera_type c.distance = distance # raw reads c.encomp_read_pairs = readpairs return c def nominate_chimeras(index_dir, input_bam_file, output_file, trim_bp): logging.debug("Reading gene information") gene_file = os.path.join(index_dir, config.GENE_FEATURE_FILE) bamfh = pysam.Samfile(input_bam_file, "rb") # build a lookup table to get genomic intervals from transcripts tid_tx_map, genome_tx_trees = build_tid_tx_maps(bamfh, gene_file, rname_prefix=config.GENE_REF_PREFIX) # group discordant read pairs by gene chimera_num = 0 outfh = open(output_file, "w") logging.debug("Parsing discordant reads") for tid5p,tid3p,readpairs in parse_gene_chimeric_reads(bamfh): c = read_pairs_to_chimera("C%07d" % (chimera_num), tid5p, tid3p, readpairs, tid_tx_map, genome_tx_trees, trim_bp) fields = c.to_list() chimera_num += 1 print >>outfh, '\t'.join(map(str, fields)) outfh.close() bamfh.close() def main(): from optparse import OptionParser logging.basicConfig(level=logging.DEBUG, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") parser = OptionParser("usage: %prog [options] <index> " "<discordant_reads.srt.bedpe> <chimeras.txt>") parser.add_option("--trim", dest="trim", type="int", default=config.EXON_JUNCTION_TRIM_BP) options, args = parser.parse_args() index_dir = args[0] input_file = args[1] output_file = args[2] nominate_chimeras(index_dir, input_file, output_file, options.trim) if __name__ == '__main__': main()
gpl-3.0
inclement/vispy
vispy/visuals/collections/collection.py
20
8684
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2014, Nicolas P. Rougier # Distributed under the (new) BSD License. See LICENSE.txt for more info. # ----------------------------------------------------------------------------- """ A collection is a container for several items having the same data structure (dtype). Each data type can be declared as local (it specific to a vertex), shared (it is shared among an item vertices) or global (it is shared by all vertices). It is based on the BaseCollection but offers a more intuitive interface. """ import numpy as np from ... import gloo from . util import fetchcode from . base_collection import BaseCollection from ..shaders import ModularProgram from ...util.event import EventEmitter class Collection(BaseCollection): """ A collection is a container for several items having the same data structure (dtype). Each data type can be declared as local (it is specific to a vertex), shared (it is shared among item vertices) or global (it is shared by all items). It is based on the BaseCollection but offers a more intuitive interface. Parameters ---------- dtype: list Data individual types as (name, dtype, scope, default) itype: np.dtype or None Indices data type mode : GL_ENUM GL_POINTS, GL_LINES, GL_LINE_STRIP, GL_LINE_LOOP, GL_TRIANGLES, GL_TRIANGLE_STRIP, GL_TRIANGLE_FAN vertex: str or tuple of str Vertex shader to use to draw this collection fragment: str or tuple of str Fragment shader to use to draw this collection kwargs: str Scope can also be specified using keyword argument, where parameter name must be one of the dtype. """ _gtypes = {('float32', 1): "float", ('float32', 2): "vec2", ('float32', 3): "vec3", ('float32', 4): "vec4", ('int32', 1): "int", ('int32', 2): "ivec2", ('int32', 3): "ivec3", ('int32', 4): "ivec4"} def __init__(self, dtype, itype, mode, vertex, fragment, program=None, **kwargs): """ """ self._uniforms = {} self._attributes = {} self._varyings = {} self._mode = mode vtype = [] utype = [] self.update = EventEmitter(source=self, type='collection_update') # Build vtype and utype according to parameters declarations = {"uniforms": "", "attributes": "", "varyings": ""} defaults = {} for item in dtype: name, (basetype, count), scope, default = item basetype = np.dtype(basetype).name if scope[0] == "!": scope = scope[1:] else: scope = kwargs.pop(name, scope) defaults[name] = default gtype = Collection._gtypes[(basetype, count)] if scope == "local": vtype.append((name, basetype, count)) declarations[ "attributes"] += "attribute %s %s;\n" % (gtype, name) elif scope == "shared": utype.append((name, basetype, count)) declarations["varyings"] += "varying %s %s;\n" % (gtype, name) else: declarations["uniforms"] += "uniform %s %s;\n" % (gtype, name) self._uniforms[name] = None if len(kwargs) > 0: raise NameError("Invalid keyword argument(s): %s" % list(kwargs.keys())) vtype = np.dtype(vtype) itype = np.dtype(itype) if itype else None utype = np.dtype(utype) if utype else None BaseCollection.__init__(self, vtype=vtype, utype=utype, itype=itype) self._declarations = declarations self._defaults = defaults # Build program (once base collection is built) saved = vertex vertex = "" if self.utype is not None: vertex += fetchcode(self.utype) + vertex else: vertex += "void fetch_uniforms(void) { }\n" + vertex vertex += self._declarations["uniforms"] vertex += self._declarations["attributes"] vertex += saved self._vertex = vertex self._fragment = fragment if program is None: program = ModularProgram(vertex, fragment) else: program.vert = vertex program.frag = fragment if hasattr(program, 'changed'): program.changed.connect(self.update) self._programs.append(program) # Initialize uniforms for name in self._uniforms.keys(): self._uniforms[name] = self._defaults.get(name) program[name] = self._uniforms[name] def view(self, transform, viewport=None): """ Return a view on the collection using provided transform """ return CollectionView(self, transform, viewport) # program = gloo.Program(self._vertex, self._fragment) # if "transform" in program.hooks: # program["transform"] = transform # if "viewport" in program.hooks: # if viewport is not None: # program["viewport"] = viewport # else: # program["viewport"] = Viewport() # self._programs.append(program) # program.bind(self._vertices_buffer) # for name in self._uniforms.keys(): # program[name] = self._uniforms[name] # #if self._uniforms_list is not None: # # program["uniforms"] = self._uniforms_texture # # program["uniforms_shape"] = self._ushape # # Piggy backing # def draw(): # if self._need_update: # self._update() # program.bind(self._vertices_buffer) # if self._uniforms_list is not None: # program["uniforms"] = self._uniforms_texture # program["uniforms_shape"] = self._ushape # if self._indices_list is not None: # Program.draw(program, self._mode, self._indices_buffer) # else: # Program.draw(program, self._mode) # program.draw = draw # return program def __getitem__(self, key): program = self._programs[0] for name, (storage, _, _) in program._code_variables.items(): if name == key and storage == 'uniform': return program[key] return BaseCollection.__getitem__(self, key) def __setitem__(self, key, value): try: BaseCollection.__setitem__(self, key, value) except IndexError: for program in self._programs: program[key] = value def draw(self, mode=None): """ Draw collection """ if self._need_update: self._update() program = self._programs[0] mode = mode or self._mode if self._indices_list is not None: program.draw(mode, self._indices_buffer) else: program.draw(mode) class CollectionView(object): def __init__(self, collection, transform=None, viewport=None): vertex = collection._vertex fragment = collection._fragment program = gloo.Program(vertex, fragment) # if "transform" in program.hooks and transform is not None: # program["transform"] = transform # if "viewport" in program.hooks and viewport is not None: # program["viewport"] = viewport program.bind(collection._vertices_buffer) for name in collection._uniforms.keys(): program[name] = collection._uniforms[name] collection._programs.append(program) self._program = program self._collection = collection def __getitem__(self, key): return self._program[key] def __setitem__(self, key, value): self._program[key] = value def draw(self): program = self._program collection = self._collection mode = collection._mode if collection._need_update: collection._update() # self._program.bind(self._vertices_buffer) if collection._uniforms_list is not None: program["uniforms"] = collection._uniforms_texture program["uniforms_shape"] = collection._ushape if collection._indices_list is not None: program.draw(mode, collection._indices_buffer) else: program.draw(mode)
bsd-3-clause
HuanglabPurdue/NCS
cuda/pyCUDANCS/profile.py
1
2785
#!/usr/bin/env python # # Used for quickly measuring how long the solver takes to run. # # Hazen 08/19 # import numpy import pycuda.autoinit import pycuda.driver as drv from pycuda.compiler import SourceModule import time # python3 and C NCS reference version. import pyCNCS.ncs_c as ncsC # OpenCL version (for the OTF mask). import pyOpenCLNCS.py_ref as pyRef # CUDA version import pyCUDANCS # # CUDA setup. # kernel_code = pyCUDANCS.loadNCSKernel() mod = SourceModule(kernel_code, **pyCUDANCS.src_module_args) ncsReduceNoise = mod.get_function("ncsReduceNoise") alpha = 0.1 n_pts = 16 def profile(n_reps): """ Report how long it takes to reduce the noise in X sub-regions. """ # Setup numpy.random.seed(1) data = numpy.random.uniform(low = 10.0, high = 20.0, size = (n_reps, n_pts, n_pts)).astype(dtype = numpy.float32) gamma = numpy.random.uniform(low = 2.0, high = 4.0, size = (n_pts, n_pts)).astype(dtype = numpy.float32) otf_mask_shift = pyRef.createOTFMask() u = numpy.zeros((n_reps, n_pts, n_pts), dtype = numpy.float32) iters = numpy.zeros(n_reps, dtype = numpy.int32) status = numpy.zeros(n_reps, dtype = numpy.int32) # CUDA start_time = time.time() ncsReduceNoise(drv.In(data), drv.In(gamma), drv.In(otf_mask_shift), drv.Out(u), drv.Out(iters), drv.Out(status), numpy.float32(alpha), block = (16,1,1), grid = (n_reps,1)) e_time = time.time() - start_time print("CUDA {0:.6f} seconds".format(e_time)) def profileNCSC(n_reps): """ The C reference version for comparison. """ numpy.random.seed(1) data = numpy.random.uniform(low = 10.0, high = 20.0, size = (n_reps, n_pts, n_pts)).astype(dtype = numpy.float32) gamma = numpy.random.uniform(low = 2.0, high = 4.0, size = (n_pts, n_pts)).astype(dtype = numpy.float32) otf_mask = numpy.fft.fftshift(pyRef.createOTFMask().reshape(16, 16)) ref_u = numpy.zeros_like(data) ncs_sr = ncsC.NCSCSubRegion(r_size = n_pts) start_time = time.time() for i in range(n_reps): ncs_sr.newRegion(data[i,:,:], gamma) ncs_sr.setOTFMask(otf_mask) ref_u[i,:,:] = ncs_sr.cSolve(alpha, verbose = False) e_time = time.time() - start_time ncs_sr.cleanup() print("CNSC {0:.6f} seconds".format(e_time)) if (__name__ == "__main__"): import argparse parser = argparse.ArgumentParser(description = 'NCS in OpenCL') parser.add_argument('--reps', dest='reps', type=int, required=False, default = 1000, help = "Number sub-regions to process in profiling.") args = parser.parse_args() profile(args.reps) profileNCSC(args.reps)
gpl-3.0
pokemon4ik2008/py-airfoil
scons-local-2.2.0/SCons/Script/SConsOptions.py
14
37624
# # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Script/SConsOptions.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo" import optparse import re import sys import textwrap no_hyphen_re = re.compile(r'(\s+|(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') try: from gettext import gettext except ImportError: def gettext(message): return message _ = gettext import SCons.Node.FS import SCons.Warnings OptionValueError = optparse.OptionValueError SUPPRESS_HELP = optparse.SUPPRESS_HELP diskcheck_all = SCons.Node.FS.diskcheck_types() def diskcheck_convert(value): if value is None: return [] if not SCons.Util.is_List(value): value = value.split(',') result = [] for v in value: v = v.lower() if v == 'all': result = diskcheck_all elif v == 'none': result = [] elif v in diskcheck_all: result.append(v) else: raise ValueError(v) return result class SConsValues(optparse.Values): """ Holder class for uniform access to SCons options, regardless of whether or not they can be set on the command line or in the SConscript files (using the SetOption() function). A SCons option value can originate three different ways: 1) set on the command line; 2) set in an SConscript file; 3) the default setting (from the the op.add_option() calls in the Parser() function, below). The command line always overrides a value set in a SConscript file, which in turn always overrides default settings. Because we want to support user-specified options in the SConscript file itself, though, we may not know about all of the options when the command line is first parsed, so we can't make all the necessary precedence decisions at the time the option is configured. The solution implemented in this class is to keep these different sets of settings separate (command line, SConscript file, and default) and to override the __getattr__() method to check them in turn. This should allow the rest of the code to just fetch values as attributes of an instance of this class, without having to worry about where they came from. Note that not all command line options are settable from SConscript files, and the ones that are must be explicitly added to the "settable" list in this class, and optionally validated and coerced in the set_option() method. """ def __init__(self, defaults): self.__dict__['__defaults__'] = defaults self.__dict__['__SConscript_settings__'] = {} def __getattr__(self, attr): """ Fetches an options value, checking first for explicit settings from the command line (which are direct attributes), then the SConscript file settings, then the default values. """ try: return self.__dict__[attr] except KeyError: try: return self.__dict__['__SConscript_settings__'][attr] except KeyError: return getattr(self.__dict__['__defaults__'], attr) settable = [ 'clean', 'diskcheck', 'duplicate', 'help', 'implicit_cache', 'max_drift', 'md5_chunksize', 'no_exec', 'num_jobs', 'random', 'stack_size', 'warn', ] def set_option(self, name, value): """ Sets an option from an SConscript file. """ if not name in self.settable: raise SCons.Errors.UserError("This option is not settable from a SConscript file: %s"%name) if name == 'num_jobs': try: value = int(value) if value < 1: raise ValueError except ValueError: raise SCons.Errors.UserError("A positive integer is required: %s"%repr(value)) elif name == 'max_drift': try: value = int(value) except ValueError: raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) elif name == 'duplicate': try: value = str(value) except ValueError: raise SCons.Errors.UserError("A string is required: %s"%repr(value)) if not value in SCons.Node.FS.Valid_Duplicates: raise SCons.Errors.UserError("Not a valid duplication style: %s" % value) # Set the duplicate style right away so it can affect linking # of SConscript files. SCons.Node.FS.set_duplicate(value) elif name == 'diskcheck': try: value = diskcheck_convert(value) except ValueError, v: raise SCons.Errors.UserError("Not a valid diskcheck value: %s"%v) if 'diskcheck' not in self.__dict__: # No --diskcheck= option was specified on the command line. # Set this right away so it can affect the rest of the # file/Node lookups while processing the SConscript files. SCons.Node.FS.set_diskcheck(value) elif name == 'stack_size': try: value = int(value) except ValueError: raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) elif name == 'md5_chunksize': try: value = int(value) except ValueError: raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) elif name == 'warn': if SCons.Util.is_String(value): value = [value] value = self.__SConscript_settings__.get(name, []) + value SCons.Warnings.process_warn_strings(value) self.__SConscript_settings__[name] = value class SConsOption(optparse.Option): def convert_value(self, opt, value): if value is not None: if self.nargs in (1, '?'): return self.check_value(opt, value) else: return tuple([self.check_value(opt, v) for v in value]) def process(self, opt, value, values, parser): # First, convert the value(s) to the right type. Howl if any # value(s) are bogus. value = self.convert_value(opt, value) # And then take whatever action is expected of us. # This is a separate method to make life easier for # subclasses to add new actions. return self.take_action( self.action, self.dest, opt, value, values, parser) def _check_nargs_optional(self): if self.nargs == '?' and self._short_opts: fmt = "option %s: nargs='?' is incompatible with short options" raise SCons.Errors.UserError(fmt % self._short_opts[0]) try: _orig_CONST_ACTIONS = optparse.Option.CONST_ACTIONS _orig_CHECK_METHODS = optparse.Option.CHECK_METHODS except AttributeError: # optparse.Option had no CONST_ACTIONS before Python 2.5. _orig_CONST_ACTIONS = ("store_const",) def _check_const(self): if self.action not in self.CONST_ACTIONS and self.const is not None: raise OptionError( "'const' must not be supplied for action %r" % self.action, self) # optparse.Option collects its list of unbound check functions # up front. This sucks because it means we can't just override # the _check_const() function like a normal method, we have to # actually replace it in the list. This seems to be the most # straightforward way to do that. _orig_CHECK_METHODS = [optparse.Option._check_action, optparse.Option._check_type, optparse.Option._check_choice, optparse.Option._check_dest, _check_const, optparse.Option._check_nargs, optparse.Option._check_callback] CHECK_METHODS = _orig_CHECK_METHODS + [_check_nargs_optional] CONST_ACTIONS = _orig_CONST_ACTIONS + optparse.Option.TYPED_ACTIONS class SConsOptionGroup(optparse.OptionGroup): """ A subclass for SCons-specific option groups. The only difference between this and the base class is that we print the group's help text flush left, underneath their own title but lined up with the normal "SCons Options". """ def format_help(self, formatter): """ Format an option group's help text, outdenting the title so it's flush with the "SCons Options" title we print at the top. """ formatter.dedent() result = formatter.format_heading(self.title) formatter.indent() result = result + optparse.OptionContainer.format_help(self, formatter) return result class SConsOptionParser(optparse.OptionParser): preserve_unknown_options = False def error(self, msg): self.print_usage(sys.stderr) sys.stderr.write("SCons error: %s\n" % msg) sys.exit(2) def _process_long_opt(self, rargs, values): """ SCons-specific processing of long options. This is copied directly from the normal optparse._process_long_opt() method, except that, if configured to do so, we catch the exception thrown when an unknown option is encountered and just stick it back on the "leftover" arguments for later (re-)processing. """ arg = rargs.pop(0) # Value explicitly attached to arg? Pretend it's the next # argument. if "=" in arg: (opt, next_arg) = arg.split("=", 1) rargs.insert(0, next_arg) had_explicit_value = True else: opt = arg had_explicit_value = False try: opt = self._match_long_opt(opt) except optparse.BadOptionError: if self.preserve_unknown_options: # SCons-specific: if requested, add unknown options to # the "leftover arguments" list for later processing. self.largs.append(arg) if had_explicit_value: # The unknown option will be re-processed later, # so undo the insertion of the explicit value. rargs.pop(0) return raise option = self._long_opt[opt] if option.takes_value(): nargs = option.nargs if nargs == '?': if had_explicit_value: value = rargs.pop(0) else: value = option.const elif len(rargs) < nargs: if nargs == 1: self.error(_("%s option requires an argument") % opt) else: self.error(_("%s option requires %d arguments") % (opt, nargs)) elif nargs == 1: value = rargs.pop(0) else: value = tuple(rargs[0:nargs]) del rargs[0:nargs] elif had_explicit_value: self.error(_("%s option does not take a value") % opt) else: value = None option.process(opt, value, values, self) def add_local_option(self, *args, **kw): """ Adds a local option to the parser. This is initiated by a SetOption() call to add a user-defined command-line option. We add the option to a separate option group for the local options, creating the group if necessary. """ try: group = self.local_option_group except AttributeError: group = SConsOptionGroup(self, 'Local Options') group = self.add_option_group(group) self.local_option_group = group result = group.add_option(*args, **kw) if result: # The option was added succesfully. We now have to add the # default value to our object that holds the default values # (so that an attempt to fetch the option's attribute will # yield the default value when not overridden) and then # we re-parse the leftover command-line options, so that # any value overridden on the command line is immediately # available if the user turns around and does a GetOption() # right away. setattr(self.values.__defaults__, result.dest, result.default) self.parse_args(self.largs, self.values) return result class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter): def format_usage(self, usage): return "usage: %s\n" % usage def format_heading(self, heading): """ This translates any heading of "options" or "Options" into "SCons Options." Unfortunately, we have to do this here, because those titles are hard-coded in the optparse calls. """ if heading == 'options': # The versions of optparse.py shipped with Pythons 2.3 and # 2.4 pass this in uncapitalized; override that so we get # consistent output on all versions. heading = "Options" if heading == 'Options': heading = "SCons Options" return optparse.IndentedHelpFormatter.format_heading(self, heading) def format_option(self, option): """ A copy of the normal optparse.IndentedHelpFormatter.format_option() method. This has been snarfed so we can modify text wrapping to out liking: -- add our own regular expression that doesn't break on hyphens (so things like --no-print-directory don't get broken); -- wrap the list of options themselves when it's too long (the wrapper.fill(opts) call below); -- set the subsequent_indent when wrapping the help_text. """ # The help for each option consists of two parts: # * the opt strings and metavars # eg. ("-x", or "-fFILENAME, --file=FILENAME") # * the user-supplied help string # eg. ("turn on expert mode", "read data from FILENAME") # # If possible, we write both of these on the same line: # -x turn on expert mode # # But if the opt string list is too long, we put the help # string on a second line, indented to the same column it would # start in if it fit on the first line. # -fFILENAME, --file=FILENAME # read data from FILENAME result = [] try: opts = self.option_strings[option] except AttributeError: # The Python 2.3 version of optparse attaches this to # to the option argument, not to this object. opts = option.option_strings opt_width = self.help_position - self.current_indent - 2 if len(opts) > opt_width: wrapper = textwrap.TextWrapper(width=self.width, initial_indent = ' ', subsequent_indent = ' ') wrapper.wordsep_re = no_hyphen_re opts = wrapper.fill(opts) + '\n' indent_first = self.help_position else: # start help on same line as opts opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) indent_first = 0 result.append(opts) if option.help: try: expand_default = self.expand_default except AttributeError: # The HelpFormatter base class in the Python 2.3 version # of optparse has no expand_default() method. help_text = option.help else: help_text = expand_default(option) # SCons: indent every line of the help text but the first. wrapper = textwrap.TextWrapper(width=self.help_width, subsequent_indent = ' ') wrapper.wordsep_re = no_hyphen_re help_lines = wrapper.wrap(help_text) result.append("%*s%s\n" % (indent_first, "", help_lines[0])) for line in help_lines[1:]: result.append("%*s%s\n" % (self.help_position, "", line)) elif opts[-1] != "\n": result.append("\n") return "".join(result) # For consistent help output across Python versions, we provide a # subclass copy of format_option_strings() and these two variables. # This is necessary (?) for Python2.3, which otherwise concatenates # a short option with its metavar. _short_opt_fmt = "%s %s" _long_opt_fmt = "%s=%s" def format_option_strings(self, option): """Return a comma-separated list of option strings & metavariables.""" if option.takes_value(): metavar = option.metavar or option.dest.upper() short_opts = [] for sopt in option._short_opts: short_opts.append(self._short_opt_fmt % (sopt, metavar)) long_opts = [] for lopt in option._long_opts: long_opts.append(self._long_opt_fmt % (lopt, metavar)) else: short_opts = option._short_opts long_opts = option._long_opts if self.short_first: opts = short_opts + long_opts else: opts = long_opts + short_opts return ", ".join(opts) def Parser(version): """ Returns an options parser object initialized with the standard SCons options. """ formatter = SConsIndentedHelpFormatter(max_help_position=30) op = SConsOptionParser(option_class=SConsOption, add_help_option=False, formatter=formatter, usage="usage: scons [OPTION] [TARGET] ...",) op.preserve_unknown_options = True op.version = version # Add the options to the parser we just created. # # These are in the order we want them to show up in the -H help # text, basically alphabetical. Each op.add_option() call below # should have a consistent format: # # op.add_option("-L", "--long-option-name", # nargs=1, type="string", # dest="long_option_name", default='foo', # action="callback", callback=opt_long_option, # help="help text goes here", # metavar="VAR") # # Even though the optparse module constructs reasonable default # destination names from the long option names, we're going to be # explicit about each one for easier readability and so this code # will at least show up when grepping the source for option attribute # names, or otherwise browsing the source code. # options ignored for compatibility def opt_ignore(option, opt, value, parser): sys.stderr.write("Warning: ignoring %s option\n" % opt) op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w", "--environment-overrides", "--no-keep-going", "--no-print-directory", "--print-directory", "--stop", "--touch", action="callback", callback=opt_ignore, help="Ignored for compatibility.") op.add_option('-c', '--clean', '--remove', dest="clean", default=False, action="store_true", help="Remove specified targets and dependencies.") op.add_option('-C', '--directory', nargs=1, type="string", dest="directory", default=[], action="append", help="Change to DIR before doing anything.", metavar="DIR") op.add_option('--cache-debug', nargs=1, dest="cache_debug", default=None, action="store", help="Print CacheDir debug info to FILE.", metavar="FILE") op.add_option('--cache-disable', '--no-cache', dest='cache_disable', default=False, action="store_true", help="Do not retrieve built targets from CacheDir.") op.add_option('--cache-force', '--cache-populate', dest='cache_force', default=False, action="store_true", help="Copy already-built targets into the CacheDir.") op.add_option('--cache-show', dest='cache_show', default=False, action="store_true", help="Print build actions for files from CacheDir.") config_options = ["auto", "force" ,"cache"] def opt_config(option, opt, value, parser, c_options=config_options): if not value in c_options: raise OptionValueError("Warning: %s is not a valid config type" % value) setattr(parser.values, option.dest, value) opt_config_help = "Controls Configure subsystem: %s." \ % ", ".join(config_options) op.add_option('--config', nargs=1, type="string", dest="config", default="auto", action="callback", callback=opt_config, help = opt_config_help, metavar="MODE") op.add_option('-D', dest="climb_up", default=None, action="store_const", const=2, help="Search up directory tree for SConstruct, " "build all Default() targets.") deprecated_debug_options = { "dtree" : '; please use --tree=derived instead', "nomemoizer" : ' and has no effect', "stree" : '; please use --tree=all,status instead', "tree" : '; please use --tree=all instead', } debug_options = ["count", "duplicate", "explain", "findlibs", "includes", "memoizer", "memory", "objects", "pdb", "prepare", "presub", "stacktrace", "time"] + list(deprecated_debug_options.keys()) def opt_debug(option, opt, value, parser, debug_options=debug_options, deprecated_debug_options=deprecated_debug_options): if value in debug_options: parser.values.debug.append(value) if value in deprecated_debug_options.keys(): try: parser.values.delayed_warnings except AttributeError: parser.values.delayed_warnings = [] msg = deprecated_debug_options[value] w = "The --debug=%s option is deprecated%s." % (value, msg) t = (SCons.Warnings.DeprecatedDebugOptionsWarning, w) parser.values.delayed_warnings.append(t) else: raise OptionValueError("Warning: %s is not a valid debug type" % value) opt_debug_help = "Print various types of debugging information: %s." \ % ", ".join(debug_options) op.add_option('--debug', nargs=1, type="string", dest="debug", default=[], action="callback", callback=opt_debug, help=opt_debug_help, metavar="TYPE") def opt_diskcheck(option, opt, value, parser): try: diskcheck_value = diskcheck_convert(value) except ValueError, e: raise OptionValueError("Warning: `%s' is not a valid diskcheck type" % e) setattr(parser.values, option.dest, diskcheck_value) op.add_option('--diskcheck', nargs=1, type="string", dest='diskcheck', default=None, action="callback", callback=opt_diskcheck, help="Enable specific on-disk checks.", metavar="TYPE") def opt_duplicate(option, opt, value, parser): if not value in SCons.Node.FS.Valid_Duplicates: raise OptionValueError("`%s' is not a valid duplication style." % value) setattr(parser.values, option.dest, value) # Set the duplicate style right away so it can affect linking # of SConscript files. SCons.Node.FS.set_duplicate(value) opt_duplicate_help = "Set the preferred duplication methods. Must be one of " \ + ", ".join(SCons.Node.FS.Valid_Duplicates) op.add_option('--duplicate', nargs=1, type="string", dest="duplicate", default='hard-soft-copy', action="callback", callback=opt_duplicate, help=opt_duplicate_help) op.add_option('-f', '--file', '--makefile', '--sconstruct', nargs=1, type="string", dest="file", default=[], action="append", help="Read FILE as the top-level SConstruct file.") op.add_option('-h', '--help', dest="help", default=False, action="store_true", help="Print defined help message, or this one.") op.add_option("-H", "--help-options", action="help", help="Print this message and exit.") op.add_option('-i', '--ignore-errors', dest='ignore_errors', default=False, action="store_true", help="Ignore errors from build actions.") op.add_option('-I', '--include-dir', nargs=1, dest='include_dir', default=[], action="append", help="Search DIR for imported Python modules.", metavar="DIR") op.add_option('--implicit-cache', dest='implicit_cache', default=False, action="store_true", help="Cache implicit dependencies") def opt_implicit_deps(option, opt, value, parser): setattr(parser.values, 'implicit_cache', True) setattr(parser.values, option.dest, True) op.add_option('--implicit-deps-changed', dest="implicit_deps_changed", default=False, action="callback", callback=opt_implicit_deps, help="Ignore cached implicit dependencies.") op.add_option('--implicit-deps-unchanged', dest="implicit_deps_unchanged", default=False, action="callback", callback=opt_implicit_deps, help="Ignore changes in implicit dependencies.") op.add_option('--interact', '--interactive', dest='interactive', default=False, action="store_true", help="Run in interactive mode.") op.add_option('-j', '--jobs', nargs=1, type="int", dest="num_jobs", default=1, action="store", help="Allow N jobs at once.", metavar="N") op.add_option('-k', '--keep-going', dest='keep_going', default=False, action="store_true", help="Keep going when a target can't be made.") op.add_option('--max-drift', nargs=1, type="int", dest='max_drift', default=SCons.Node.FS.default_max_drift, action="store", help="Set maximum system clock drift to N seconds.", metavar="N") op.add_option('--md5-chunksize', nargs=1, type="int", dest='md5_chunksize', default=SCons.Node.FS.File.md5_chunksize, action="store", help="Set chunk-size for MD5 signature computation to N kilobytes.", metavar="N") op.add_option('-n', '--no-exec', '--just-print', '--dry-run', '--recon', dest='no_exec', default=False, action="store_true", help="Don't build; just print commands.") op.add_option('--no-site-dir', dest='no_site_dir', default=False, action="store_true", help="Don't search or use the usual site_scons dir.") op.add_option('--profile', nargs=1, dest="profile_file", default=None, action="store", help="Profile SCons and put results in FILE.", metavar="FILE") op.add_option('-q', '--question', dest="question", default=False, action="store_true", help="Don't build; exit status says if up to date.") op.add_option('-Q', dest='no_progress', default=False, action="store_true", help="Suppress \"Reading/Building\" progress messages.") op.add_option('--random', dest="random", default=False, action="store_true", help="Build dependencies in random order.") op.add_option('-s', '--silent', '--quiet', dest="silent", default=False, action="store_true", help="Don't print commands.") op.add_option('--site-dir', nargs=1, dest='site_dir', default=None, action="store", help="Use DIR instead of the usual site_scons dir.", metavar="DIR") op.add_option('--stack-size', nargs=1, type="int", dest='stack_size', action="store", help="Set the stack size of the threads used to run jobs to N kilobytes.", metavar="N") op.add_option('--taskmastertrace', nargs=1, dest="taskmastertrace_file", default=None, action="store", help="Trace Node evaluation to FILE.", metavar="FILE") tree_options = ["all", "derived", "prune", "status"] def opt_tree(option, opt, value, parser, tree_options=tree_options): import Main tp = Main.TreePrinter() for o in value.split(','): if o == 'all': tp.derived = False elif o == 'derived': tp.derived = True elif o == 'prune': tp.prune = True elif o == 'status': tp.status = True else: raise OptionValueError("Warning: %s is not a valid --tree option" % o) parser.values.tree_printers.append(tp) opt_tree_help = "Print a dependency tree in various formats: %s." \ % ", ".join(tree_options) op.add_option('--tree', nargs=1, type="string", dest="tree_printers", default=[], action="callback", callback=opt_tree, help=opt_tree_help, metavar="OPTIONS") op.add_option('-u', '--up', '--search-up', dest="climb_up", default=0, action="store_const", const=1, help="Search up directory tree for SConstruct, " "build targets at or below current directory.") op.add_option('-U', dest="climb_up", default=0, action="store_const", const=3, help="Search up directory tree for SConstruct, " "build Default() targets from local SConscript.") def opt_version(option, opt, value, parser): sys.stdout.write(parser.version + '\n') sys.exit(0) op.add_option("-v", "--version", action="callback", callback=opt_version, help="Print the SCons version number and exit.") def opt_warn(option, opt, value, parser, tree_options=tree_options): if SCons.Util.is_String(value): value = value.split(',') parser.values.warn.extend(value) op.add_option('--warn', '--warning', nargs=1, type="string", dest="warn", default=[], action="callback", callback=opt_warn, help="Enable or disable warnings.", metavar="WARNING-SPEC") op.add_option('-Y', '--repository', '--srcdir', nargs=1, dest="repository", default=[], action="append", help="Search REPOSITORY for source and target files.") # Options from Make and Cons classic that we do not yet support, # but which we may support someday and whose (potential) meanings # we don't want to change. These all get a "the -X option is not # yet implemented" message and don't show up in the help output. def opt_not_yet(option, opt, value, parser): msg = "Warning: the %s option is not yet implemented\n" % opt sys.stderr.write(msg) op.add_option('-l', '--load-average', '--max-load', nargs=1, type="float", dest="load_average", default=0, action="callback", callback=opt_not_yet, # action="store", # help="Don't start multiple jobs unless load is below " # "LOAD-AVERAGE." help=SUPPRESS_HELP) op.add_option('--list-actions', dest="list_actions", action="callback", callback=opt_not_yet, # help="Don't build; list files and build actions." help=SUPPRESS_HELP) op.add_option('--list-derived', dest="list_derived", action="callback", callback=opt_not_yet, # help="Don't build; list files that would be built." help=SUPPRESS_HELP) op.add_option('--list-where', dest="list_where", action="callback", callback=opt_not_yet, # help="Don't build; list files and where defined." help=SUPPRESS_HELP) op.add_option('-o', '--old-file', '--assume-old', nargs=1, type="string", dest="old_file", default=[], action="callback", callback=opt_not_yet, # action="append", # help = "Consider FILE to be old; don't rebuild it." help=SUPPRESS_HELP) op.add_option('--override', nargs=1, type="string", action="callback", callback=opt_not_yet, dest="override", # help="Override variables as specified in FILE." help=SUPPRESS_HELP) op.add_option('-p', action="callback", callback=opt_not_yet, dest="p", # help="Print internal environments/objects." help=SUPPRESS_HELP) op.add_option('-r', '-R', '--no-builtin-rules', '--no-builtin-variables', action="callback", callback=opt_not_yet, dest="no_builtin_rules", # help="Clear default environments and variables." help=SUPPRESS_HELP) op.add_option('--write-filenames', nargs=1, type="string", dest="write_filenames", action="callback", callback=opt_not_yet, # help="Write all filenames examined into FILE." help=SUPPRESS_HELP) op.add_option('-W', '--new-file', '--assume-new', '--what-if', nargs=1, type="string", dest="new_file", action="callback", callback=opt_not_yet, # help="Consider FILE to be changed." help=SUPPRESS_HELP) op.add_option('--warn-undefined-variables', dest="warn_undefined_variables", action="callback", callback=opt_not_yet, # help="Warn when an undefined variable is referenced." help=SUPPRESS_HELP) return op # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
gpl-3.0
lasote/conan
conans/test/functional/disk_search_test.py
4
3498
import os import unittest from conans.paths import (BUILD_FOLDER, PACKAGES_FOLDER, EXPORT_FOLDER, SimplePaths, CONANINFO) from conans.model.ref import ConanFileReference from conans.test.utils.test_files import temp_folder from conans.search.search import DiskSearchManager, DiskSearchAdapter from conans.util.files import save from conans.model.info import ConanInfo class SearchTest(unittest.TestCase): def setUp(self): folder = temp_folder() paths = SimplePaths(folder) search_adapter = DiskSearchAdapter() self.search_manager = DiskSearchManager(paths, search_adapter) os.chdir(paths.store) self.paths = paths def basic_test2(self): conan_ref1 = ConanFileReference.loads("opencv/2.4.10@lasote/testing") root_folder = str(conan_ref1).replace("@", "/") artifacts = ["a", "b", "c"] reg1 = "%s/%s" % (root_folder, EXPORT_FOLDER) os.makedirs(reg1) for artif_id in artifacts: build1 = "%s/%s/%s" % (root_folder, BUILD_FOLDER, artif_id) artif1 = "%s/%s/%s" % (root_folder, PACKAGES_FOLDER, artif_id) os.makedirs(build1) info = ConanInfo().loads("[settings]\n[options]") save(os.path.join(artif1, CONANINFO), info.dumps()) packages = self.search_manager.search_packages(conan_ref1, "") all_artif = [_artif for _artif in sorted(packages)] self.assertEqual(all_artif, artifacts) def pattern_test(self): refs = ["opencv/2.4.%s@lasote/testing" % ref for ref in ("1", "2", "3")] refs = [ConanFileReference.loads(ref) for ref in refs] for ref in refs: root_folder = str(ref).replace("@", "/") reg1 = "%s/%s" % (root_folder, EXPORT_FOLDER) os.makedirs(reg1) recipes = self.search_manager.search("opencv/*@lasote/testing") self.assertEqual(recipes, refs) def case_insensitive_test(self): root_folder2 = "sdl/1.5/lasote/stable" conan_ref2 = ConanFileReference.loads("sdl/1.5@lasote/stable") os.makedirs("%s/%s" % (root_folder2, EXPORT_FOLDER)) root_folder3 = "assimp/0.14/phil/testing" conan_ref3 = ConanFileReference.loads("assimp/0.14@phil/testing") os.makedirs("%s/%s" % (root_folder3, EXPORT_FOLDER)) root_folder4 = "sdl/2.10/lasote/stable" conan_ref4 = ConanFileReference.loads("sdl/2.10@lasote/stable") os.makedirs("%s/%s" % (root_folder4, EXPORT_FOLDER)) root_folder5 = "SDL_fake/1.10/lasote/testing" conan_ref5 = ConanFileReference.loads("SDL_fake/1.10@lasote/testing") os.makedirs("%s/%s" % (root_folder5, EXPORT_FOLDER)) # Case insensitive searches search_adapter = DiskSearchAdapter() search_manager = DiskSearchManager(self.paths, search_adapter) reg_conans = sorted([str(_reg) for _reg in search_manager.search("*")]) self.assertEqual(reg_conans, [str(conan_ref5), str(conan_ref3), str(conan_ref2), str(conan_ref4)]) reg_conans = sorted([str(_reg) for _reg in search_manager.search(pattern="sdl*")]) self.assertEqual(reg_conans, [str(conan_ref5), str(conan_ref2), str(conan_ref4)]) # Case sensitive search self.assertEqual(str(search_manager.search(pattern="SDL*", ignorecase=False)[0]), str(conan_ref5))
mit
HiSPARC/station-software
user/python/Lib/test/test_ordered_dict.py
4
13429
import copy import pickle from random import shuffle import unittest from collections import OrderedDict from collections import MutableMapping from test import mapping_tests, test_support class TestOrderedDict(unittest.TestCase): def test_init(self): with self.assertRaises(TypeError): OrderedDict([('a', 1), ('b', 2)], None) # too many args pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)] self.assertEqual(sorted(OrderedDict(dict(pairs)).items()), pairs) # dict input self.assertEqual(sorted(OrderedDict(**dict(pairs)).items()), pairs) # kwds input self.assertEqual(list(OrderedDict(pairs).items()), pairs) # pairs input self.assertEqual(list(OrderedDict([('a', 1), ('b', 2), ('c', 9), ('d', 4)], c=3, e=5).items()), pairs) # mixed input # make sure no positional args conflict with possible kwdargs self.assertEqual(list(OrderedDict(self=42).items()), [('self', 42)]) self.assertEqual(list(OrderedDict(other=42).items()), [('other', 42)]) self.assertRaises(TypeError, OrderedDict, 42) self.assertRaises(TypeError, OrderedDict, (), ()) self.assertRaises(TypeError, OrderedDict.__init__) # Make sure that direct calls to __init__ do not clear previous contents d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)]) d.__init__([('e', 5), ('f', 6)], g=7, d=4) self.assertEqual(list(d.items()), [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)]) def test_update(self): with self.assertRaises(TypeError): OrderedDict().update([('a', 1), ('b', 2)], None) # too many args pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)] od = OrderedDict() od.update(dict(pairs)) self.assertEqual(sorted(od.items()), pairs) # dict input od = OrderedDict() od.update(**dict(pairs)) self.assertEqual(sorted(od.items()), pairs) # kwds input od = OrderedDict() od.update(pairs) self.assertEqual(list(od.items()), pairs) # pairs input od = OrderedDict() od.update([('a', 1), ('b', 2), ('c', 9), ('d', 4)], c=3, e=5) self.assertEqual(list(od.items()), pairs) # mixed input # Issue 9137: Named argument called 'other' or 'self' # shouldn't be treated specially. od = OrderedDict() od.update(self=23) self.assertEqual(list(od.items()), [('self', 23)]) od = OrderedDict() od.update(other={}) self.assertEqual(list(od.items()), [('other', {})]) od = OrderedDict() od.update(red=5, blue=6, other=7, self=8) self.assertEqual(sorted(list(od.items())), [('blue', 6), ('other', 7), ('red', 5), ('self', 8)]) # Make sure that direct calls to update do not clear previous contents # add that updates items are not moved to the end d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)]) d.update([('e', 5), ('f', 6)], g=7, d=4) self.assertEqual(list(d.items()), [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)]) self.assertRaises(TypeError, OrderedDict().update, 42) self.assertRaises(TypeError, OrderedDict().update, (), ()) self.assertRaises(TypeError, OrderedDict.update) def test_abc(self): self.assertIsInstance(OrderedDict(), MutableMapping) self.assertTrue(issubclass(OrderedDict, MutableMapping)) def test_clear(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] shuffle(pairs) od = OrderedDict(pairs) self.assertEqual(len(od), len(pairs)) od.clear() self.assertEqual(len(od), 0) def test_delitem(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] od = OrderedDict(pairs) del od['a'] self.assertNotIn('a', od) with self.assertRaises(KeyError): del od['a'] self.assertEqual(list(od.items()), pairs[:2] + pairs[3:]) def test_setitem(self): od = OrderedDict([('d', 1), ('b', 2), ('c', 3), ('a', 4), ('e', 5)]) od['c'] = 10 # existing element od['f'] = 20 # new element self.assertEqual(list(od.items()), [('d', 1), ('b', 2), ('c', 10), ('a', 4), ('e', 5), ('f', 20)]) def test_iterators(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] shuffle(pairs) od = OrderedDict(pairs) self.assertEqual(list(od), [t[0] for t in pairs]) self.assertEqual(od.keys()[:], [t[0] for t in pairs]) self.assertEqual(od.values()[:], [t[1] for t in pairs]) self.assertEqual(od.items()[:], pairs) self.assertEqual(list(od.iterkeys()), [t[0] for t in pairs]) self.assertEqual(list(od.itervalues()), [t[1] for t in pairs]) self.assertEqual(list(od.iteritems()), pairs) self.assertEqual(list(reversed(od)), [t[0] for t in reversed(pairs)]) def test_popitem(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] shuffle(pairs) od = OrderedDict(pairs) while pairs: self.assertEqual(od.popitem(), pairs.pop()) with self.assertRaises(KeyError): od.popitem() self.assertEqual(len(od), 0) def test_pop(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] shuffle(pairs) od = OrderedDict(pairs) shuffle(pairs) while pairs: k, v = pairs.pop() self.assertEqual(od.pop(k), v) with self.assertRaises(KeyError): od.pop('xyz') self.assertEqual(len(od), 0) self.assertEqual(od.pop(k, 12345), 12345) # make sure pop still works when __missing__ is defined class Missing(OrderedDict): def __missing__(self, key): return 0 m = Missing(a=1) self.assertEqual(m.pop('b', 5), 5) self.assertEqual(m.pop('a', 6), 1) self.assertEqual(m.pop('a', 6), 6) with self.assertRaises(KeyError): m.pop('a') def test_equality(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] shuffle(pairs) od1 = OrderedDict(pairs) od2 = OrderedDict(pairs) self.assertEqual(od1, od2) # same order implies equality pairs = pairs[2:] + pairs[:2] od2 = OrderedDict(pairs) self.assertNotEqual(od1, od2) # different order implies inequality # comparison to regular dict is not order sensitive self.assertEqual(od1, dict(od2)) self.assertEqual(dict(od2), od1) # different length implied inequality self.assertNotEqual(od1, OrderedDict(pairs[:-1])) def test_copying(self): # Check that ordered dicts are copyable, deepcopyable, picklable, # and have a repr/eval round-trip pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] od = OrderedDict(pairs) update_test = OrderedDict() update_test.update(od) for i, dup in enumerate([ od.copy(), copy.copy(od), copy.deepcopy(od), pickle.loads(pickle.dumps(od, 0)), pickle.loads(pickle.dumps(od, 1)), pickle.loads(pickle.dumps(od, 2)), pickle.loads(pickle.dumps(od, -1)), eval(repr(od)), update_test, OrderedDict(od), ]): self.assertTrue(dup is not od) self.assertEqual(dup, od) self.assertEqual(list(dup.items()), list(od.items())) self.assertEqual(len(dup), len(od)) self.assertEqual(type(dup), type(od)) def test_yaml_linkage(self): # Verify that __reduce__ is setup in a way that supports PyYAML's dump() feature. # In yaml, lists are native but tuples are not. pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] od = OrderedDict(pairs) # yaml.dump(od) --> # '!!python/object/apply:__main__.OrderedDict\n- - [a, 1]\n - [b, 2]\n' self.assertTrue(all(type(pair)==list for pair in od.__reduce__()[1])) def test_reduce_not_too_fat(self): # do not save instance dictionary if not needed pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] od = OrderedDict(pairs) self.assertEqual(len(od.__reduce__()), 2) od.x = 10 self.assertEqual(len(od.__reduce__()), 3) def test_repr(self): od = OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]) self.assertEqual(repr(od), "OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])") self.assertEqual(eval(repr(od)), od) self.assertEqual(repr(OrderedDict()), "OrderedDict()") def test_repr_recursive(self): # See issue #9826 od = OrderedDict.fromkeys('abc') od['x'] = od self.assertEqual(repr(od), "OrderedDict([('a', None), ('b', None), ('c', None), ('x', ...)])") def test_repr_recursive_values(self): od = OrderedDict() od[42] = od.viewvalues() r = repr(od) # Cannot perform a stronger test, as the contents of the repr # are implementation-dependent. All we can say is that we # want a str result, not an exception of any sort. self.assertIsInstance(r, str) od[42] = od.viewitems() r = repr(od) # Again. self.assertIsInstance(r, str) def test_setdefault(self): pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)] shuffle(pairs) od = OrderedDict(pairs) pair_order = list(od.items()) self.assertEqual(od.setdefault('a', 10), 3) # make sure order didn't change self.assertEqual(list(od.items()), pair_order) self.assertEqual(od.setdefault('x', 10), 10) # make sure 'x' is added to the end self.assertEqual(list(od.items())[-1], ('x', 10)) # make sure setdefault still works when __missing__ is defined class Missing(OrderedDict): def __missing__(self, key): return 0 self.assertEqual(Missing().setdefault(5, 9), 9) def test_reinsert(self): # Given insert a, insert b, delete a, re-insert a, # verify that a is now later than b. od = OrderedDict() od['a'] = 1 od['b'] = 2 del od['a'] od['a'] = 1 self.assertEqual(list(od.items()), [('b', 2), ('a', 1)]) def test_views(self): s = 'the quick brown fox jumped over a lazy dog yesterday before dawn'.split() od = OrderedDict.fromkeys(s) self.assertEqual(list(od.viewkeys()), s) self.assertEqual(list(od.viewvalues()), [None for k in s]) self.assertEqual(list(od.viewitems()), [(k, None) for k in s]) # See http://bugs.python.org/issue24286 self.assertEqual(od.viewkeys(), dict(od).viewkeys()) self.assertEqual(od.viewitems(), dict(od).viewitems()) def test_override_update(self): # Verify that subclasses can override update() without breaking __init__() class MyOD(OrderedDict): def update(self, *args, **kwds): raise Exception() items = [('a', 1), ('c', 3), ('b', 2)] self.assertEqual(list(MyOD(items).items()), items) def test_free_after_iterating(self): test_support.check_free_after_iterating(self, iter, OrderedDict) test_support.check_free_after_iterating(self, lambda d: d.iterkeys(), OrderedDict) test_support.check_free_after_iterating(self, lambda d: d.itervalues(), OrderedDict) test_support.check_free_after_iterating(self, lambda d: d.iteritems(), OrderedDict) test_support.check_free_after_iterating(self, lambda d: iter(d.viewkeys()), OrderedDict) test_support.check_free_after_iterating(self, lambda d: iter(d.viewvalues()), OrderedDict) test_support.check_free_after_iterating(self, lambda d: iter(d.viewitems()), OrderedDict) class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = OrderedDict def test_popitem(self): d = self._empty_mapping() self.assertRaises(KeyError, d.popitem) class MyOrderedDict(OrderedDict): pass class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = MyOrderedDict def test_popitem(self): d = self._empty_mapping() self.assertRaises(KeyError, d.popitem) def test_main(verbose=None): test_classes = [TestOrderedDict, GeneralMappingTests, SubclassMappingTests] test_support.run_unittest(*test_classes) if __name__ == "__main__": test_main(verbose=True)
gpl-3.0
wagjo/closure-library
closure/bin/build/depstree.py
455
6375
# Copyright 2009 The Closure Library Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Class to represent a full Closure Library dependency tree. Offers a queryable tree of dependencies of a given set of sources. The tree will also do logical validation to prevent duplicate provides and circular dependencies. """ __author__ = '[email protected] (Nathan Naze)' class DepsTree(object): """Represents the set of dependencies between source files.""" def __init__(self, sources): """Initializes the tree with a set of sources. Args: sources: A set of JavaScript sources. Raises: MultipleProvideError: A namespace is provided by muplitple sources. NamespaceNotFoundError: A namespace is required but never provided. """ self._sources = sources self._provides_map = dict() # Ensure nothing was provided twice. for source in sources: for provide in source.provides: if provide in self._provides_map: raise MultipleProvideError( provide, [self._provides_map[provide], source]) self._provides_map[provide] = source # Check that all required namespaces are provided. for source in sources: for require in source.requires: if require not in self._provides_map: raise NamespaceNotFoundError(require, source) def GetDependencies(self, required_namespaces): """Get source dependencies, in order, for the given namespaces. Args: required_namespaces: A string (for one) or list (for one or more) of namespaces. Returns: A list of source objects that provide those namespaces and all requirements, in dependency order. Raises: NamespaceNotFoundError: A namespace is requested but doesn't exist. CircularDependencyError: A cycle is detected in the dependency tree. """ if isinstance(required_namespaces, str): required_namespaces = [required_namespaces] deps_sources = [] for namespace in required_namespaces: for source in DepsTree._ResolveDependencies( namespace, [], self._provides_map, []): if source not in deps_sources: deps_sources.append(source) return deps_sources @staticmethod def _ResolveDependencies(required_namespace, deps_list, provides_map, traversal_path): """Resolve dependencies for Closure source files. Follows the dependency tree down and builds a list of sources in dependency order. This function will recursively call itself to fill all dependencies below the requested namespaces, and then append its sources at the end of the list. Args: required_namespace: String of required namespace. deps_list: List of sources in dependency order. This function will append the required source once all of its dependencies are satisfied. provides_map: Map from namespace to source that provides it. traversal_path: List of namespaces of our path from the root down the dependency/recursion tree. Used to identify cyclical dependencies. This is a list used as a stack -- when the function is entered, the current namespace is pushed and popped right before returning. Each recursive call will check that the current namespace does not appear in the list, throwing a CircularDependencyError if it does. Returns: The given deps_list object filled with sources in dependency order. Raises: NamespaceNotFoundError: A namespace is requested but doesn't exist. CircularDependencyError: A cycle is detected in the dependency tree. """ source = provides_map.get(required_namespace) if not source: raise NamespaceNotFoundError(required_namespace) if required_namespace in traversal_path: traversal_path.append(required_namespace) # do this *after* the test # This must be a cycle. raise CircularDependencyError(traversal_path) # If we don't have the source yet, we'll have to visit this namespace and # add the required dependencies to deps_list. if source not in deps_list: traversal_path.append(required_namespace) for require in source.requires: # Append all other dependencies before we append our own. DepsTree._ResolveDependencies(require, deps_list, provides_map, traversal_path) deps_list.append(source) traversal_path.pop() return deps_list class BaseDepsTreeError(Exception): """Base DepsTree error.""" def __init__(self): Exception.__init__(self) class CircularDependencyError(BaseDepsTreeError): """Raised when a dependency cycle is encountered.""" def __init__(self, dependency_list): BaseDepsTreeError.__init__(self) self._dependency_list = dependency_list def __str__(self): return ('Encountered circular dependency:\n%s\n' % '\n'.join(self._dependency_list)) class MultipleProvideError(BaseDepsTreeError): """Raised when a namespace is provided more than once.""" def __init__(self, namespace, sources): BaseDepsTreeError.__init__(self) self._namespace = namespace self._sources = sources def __str__(self): source_strs = map(str, self._sources) return ('Namespace "%s" provided more than once in sources:\n%s\n' % (self._namespace, '\n'.join(source_strs))) class NamespaceNotFoundError(BaseDepsTreeError): """Raised when a namespace is requested but not provided.""" def __init__(self, namespace, source=None): BaseDepsTreeError.__init__(self) self._namespace = namespace self._source = source def __str__(self): msg = 'Namespace "%s" never provided.' % self._namespace if self._source: msg += ' Required in %s' % self._source return msg
apache-2.0
safwanrahman/readthedocs.org
readthedocs/restapi/permissions.py
2
2876
"""Defines access permissions for the API.""" from __future__ import absolute_import from rest_framework import permissions from readthedocs.core.permissions import AdminPermission class IsOwner(permissions.BasePermission): """Custom permission to only allow owners of an object to edit it.""" def has_object_permission(self, request, view, obj): # Write permissions are only allowed to the owner of the snippet return request.user in obj.users.all() class CommentModeratorOrReadOnly(permissions.BasePermission): def has_object_permission(self, request, view, obj): if request.method in permissions.SAFE_METHODS: return True # TODO: Similar logic to #1084 return AdminPermission.is_admin(request.user, obj.node.project) class RelatedProjectIsOwner(permissions.BasePermission): """Custom permission to only allow owners of an object to edit it.""" def has_permission(self, request, view): return (request.method in permissions.SAFE_METHODS) def has_object_permission(self, request, view, obj): # Write permissions are only allowed to the owner of the snippet return ( request.method in permissions.SAFE_METHODS or (request.user in obj.project.users.all()) ) class APIPermission(permissions.IsAuthenticatedOrReadOnly): """ Control users access to the API. This permission should allow authenticated users readonly access to the API, and allow admin users write access. This should be used on API resources that need to implement write operations to resources that were based on the ReadOnlyViewSet """ def has_permission(self, request, view): has_perm = super(APIPermission, self).has_permission(request, view) return has_perm or (request.user and request.user.is_staff) def has_object_permission(self, request, view, obj): has_perm = super(APIPermission, self).has_object_permission( request, view, obj) return has_perm or (request.user and request.user.is_staff) class APIRestrictedPermission(permissions.BasePermission): """ Allow admin write, authenticated and anonymous read only. This differs from :py:class:`APIPermission` by not allowing for authenticated POSTs. This permission is endpoints like ``/api/v2/build/``, which are used by admin users to coordinate build instance creation, but only should be readable by end users. """ def has_permission(self, request, view): return ( request.method in permissions.SAFE_METHODS or (request.user and request.user.is_staff) ) def has_object_permission(self, request, view, obj): return ( request.method in permissions.SAFE_METHODS or (request.user and request.user.is_staff) )
mit
igemsoftware/SYSU-Software2013
project/Python27/Tools/scripts/xxci.py
94
2798
#! /usr/bin/env python # xxci # # check in files for which rcsdiff returns nonzero exit status import sys import os from stat import * import fnmatch EXECMAGIC = '\001\140\000\010' MAXSIZE = 200*1024 # Files this big must be binaries and are skipped. def getargs(): args = sys.argv[1:] if args: return args print 'No arguments, checking almost *, in "ls -t" order' list = [] for file in os.listdir(os.curdir): if not skipfile(file): list.append((getmtime(file), file)) list.sort() if not list: print 'Nothing to do -- exit 1' sys.exit(1) list.sort() list.reverse() for mtime, file in list: args.append(file) return args def getmtime(file): try: st = os.stat(file) return st[ST_MTIME] except os.error: return -1 badnames = ['tags', 'TAGS', 'xyzzy', 'nohup.out', 'core'] badprefixes = ['.', ',', '@', '#', 'o.'] badsuffixes = \ ['~', '.a', '.o', '.old', '.bak', '.orig', '.new', '.prev', '.not', \ '.pyc', '.fdc', '.rgb', '.elc', ',v'] ignore = [] def setup(): ignore[:] = badnames for p in badprefixes: ignore.append(p + '*') for p in badsuffixes: ignore.append('*' + p) try: f = open('.xxcign', 'r') except IOError: return ignore[:] = ignore + f.read().split() def skipfile(file): for p in ignore: if fnmatch.fnmatch(file, p): return 1 try: st = os.lstat(file) except os.error: return 1 # Doesn't exist -- skip it # Skip non-plain files. if not S_ISREG(st[ST_MODE]): return 1 # Skip huge files -- probably binaries. if st[ST_SIZE] >= MAXSIZE: return 1 # Skip executables try: data = open(file, 'r').read(len(EXECMAGIC)) if data == EXECMAGIC: return 1 except: pass return 0 def badprefix(file): for bad in badprefixes: if file[:len(bad)] == bad: return 1 return 0 def badsuffix(file): for bad in badsuffixes: if file[-len(bad):] == bad: return 1 return 0 def go(args): for file in args: print file + ':' if differing(file): showdiffs(file) if askyesno('Check in ' + file + ' ? '): sts = os.system('rcs -l ' + file) # ignored sts = os.system('ci -l ' + file) def differing(file): cmd = 'co -p ' + file + ' 2>/dev/null | cmp -s - ' + file sts = os.system(cmd) return sts != 0 def showdiffs(file): cmd = 'rcsdiff ' + file + ' 2>&1 | ${PAGER-more}' sts = os.system(cmd) def askyesno(prompt): s = raw_input(prompt) return s in ['y', 'yes'] if __name__ == '__main__': try: setup() go(getargs()) except KeyboardInterrupt: print '[Intr]'
mit
smasala/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py
124
21459
# Copyright (C) 2009 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import unittest2 as unittest from webkitpy.common.net.layouttestresults import LayoutTestResults from webkitpy.common.net.buildbot import BuildBot, Builder, Build from webkitpy.layout_tests.models import test_results from webkitpy.layout_tests.models import test_failures from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup class BuilderTest(unittest.TestCase): def _mock_test_result(self, testname): return test_results.TestResult(testname, [test_failures.FailureTextMismatch()]) def _install_fetch_build(self, failure): def _mock_fetch_build(build_number): build = Build( builder=self.builder, build_number=build_number, revision=build_number + 1000, is_green=build_number < 4 ) results = [self._mock_test_result(testname) for testname in failure(build_number)] layout_test_results = LayoutTestResults(results) def mock_layout_test_results(): return layout_test_results build.layout_test_results = mock_layout_test_results return build self.builder._fetch_build = _mock_fetch_build def setUp(self): self.buildbot = BuildBot() self.builder = Builder(u"Test Builder \u2661", self.buildbot) self._install_fetch_build(lambda build_number: ["test1", "test2"]) def test_latest_layout_test_results(self): self.builder.fetch_layout_test_results = lambda results_url: LayoutTestResults([self._mock_test_result(testname) for testname in ["test1", "test2"]]) self.builder.accumulated_results_url = lambda: "http://dummy_url.org" self.assertTrue(self.builder.latest_layout_test_results()) def test_find_regression_window(self): regression_window = self.builder.find_regression_window(self.builder.build(10)) self.assertEqual(regression_window.build_before_failure().revision(), 1003) self.assertEqual(regression_window.failing_build().revision(), 1004) regression_window = self.builder.find_regression_window(self.builder.build(10), look_back_limit=2) self.assertIsNone(regression_window.build_before_failure()) self.assertEqual(regression_window.failing_build().revision(), 1008) def test_none_build(self): self.builder._fetch_build = lambda build_number: None regression_window = self.builder.find_regression_window(self.builder.build(10)) self.assertIsNone(regression_window.build_before_failure()) self.assertIsNone(regression_window.failing_build()) def test_flaky_tests(self): self._install_fetch_build(lambda build_number: ["test1"] if build_number % 2 else ["test2"]) regression_window = self.builder.find_regression_window(self.builder.build(10)) self.assertEqual(regression_window.build_before_failure().revision(), 1009) self.assertEqual(regression_window.failing_build().revision(), 1010) def test_failure_and_flaky(self): self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"]) regression_window = self.builder.find_regression_window(self.builder.build(10)) self.assertEqual(regression_window.build_before_failure().revision(), 1003) self.assertEqual(regression_window.failing_build().revision(), 1004) def test_no_results(self): self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"]) regression_window = self.builder.find_regression_window(self.builder.build(10)) self.assertEqual(regression_window.build_before_failure().revision(), 1003) self.assertEqual(regression_window.failing_build().revision(), 1004) def test_failure_after_flaky(self): self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number > 6 else ["test3"]) regression_window = self.builder.find_regression_window(self.builder.build(10)) self.assertEqual(regression_window.build_before_failure().revision(), 1006) self.assertEqual(regression_window.failing_build().revision(), 1007) def test_find_blameworthy_regression_window(self): self.assertEqual(self.builder.find_blameworthy_regression_window(10).revisions(), [1004]) self.assertIsNone(self.builder.find_blameworthy_regression_window(10, look_back_limit=2)) # Flakey test avoidance requires at least 2 red builds: self.assertIsNone(self.builder.find_blameworthy_regression_window(4)) self.assertEqual(self.builder.find_blameworthy_regression_window(4, avoid_flakey_tests=False).revisions(), [1004]) # Green builder: self.assertIsNone(self.builder.find_blameworthy_regression_window(3)) def test_build_caching(self): self.assertEqual(self.builder.build(10), self.builder.build(10)) def test_build_and_revision_for_filename(self): expectations = { "r47483 (1)/" : (47483, 1), "r47483 (1).zip" : (47483, 1), "random junk": None, } for filename, revision_and_build in expectations.items(): self.assertEqual(self.builder._revision_and_build_for_filename(filename), revision_and_build) def test_file_info_list_to_revision_to_build_list(self): file_info_list = [ {"filename": "r47483 (1)/"}, {"filename": "r47483 (1).zip"}, {"filename": "random junk"}, ] builds_and_revisions_list = [(47483, 1), (47483, 1)] self.assertEqual(self.builder._file_info_list_to_revision_to_build_list(file_info_list), builds_and_revisions_list) def test_fetch_build(self): buildbot = BuildBot() builder = Builder(u"Test Builder \u2661", buildbot) def mock_fetch_build_dictionary(self, build_number): build_dictionary = { "sourceStamp": { "revision": None, # revision=None means a trunk build started from the force-build button on the builder page. }, "number": int(build_number), # Intentionally missing the 'results' key, meaning it's a "pass" build. } return build_dictionary buildbot._fetch_build_dictionary = mock_fetch_build_dictionary self.assertIsNotNone(builder._fetch_build(1)) class BuildTest(unittest.TestCase): def test_layout_test_results(self): buildbot = BuildBot() builder = Builder(u"Foo Builder (test)", buildbot) builder._fetch_file_from_results = lambda results_url, file_name: None build = Build(builder, None, None, None) # Test that layout_test_results() returns None if the fetch fails. self.assertIsNone(build.layout_test_results()) class BuildBotTest(unittest.TestCase): _example_one_box_status = ''' <table> <tr> <td class="box"><a href="builders/Windows%20Debug%20%28Tests%29">Windows Debug (Tests)</a></td> <td align="center" class="LastBuild box success"><a href="builders/Windows%20Debug%20%28Tests%29/builds/3693">47380</a><br />build<br />successful</td> <td align="center" class="Activity building">building<br />ETA in<br />~ 14 mins<br />at 13:40</td> <tr> <td class="box"><a href="builders/SnowLeopard%20Intel%20Release">SnowLeopard Intel Release</a></td> <td class="LastBuild box" >no build</td> <td align="center" class="Activity building">building<br />< 1 min</td> <tr> <td class="box"><a href="builders/Qt%20Linux%20Release">Qt Linux Release</a></td> <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Linux%20Release/builds/654">47383</a><br />failed<br />compile-webkit</td> <td align="center" class="Activity idle">idle<br />3 pending</td> <tr> <td class="box"><a href="builders/Qt%20Windows%2032-bit%20Debug">Qt Windows 32-bit Debug</a></td> <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Windows%2032-bit%20Debug/builds/2090">60563</a><br />failed<br />failed<br />slave<br />lost</td> <td align="center" class="Activity building">building<br />ETA in<br />~ 5 mins<br />at 08:25</td> </table> ''' _expected_example_one_box_parsings = [ { 'is_green': True, 'build_number' : 3693, 'name': u'Windows Debug (Tests)', 'built_revision': 47380, 'activity': 'building', 'pending_builds': 0, }, { 'is_green': False, 'build_number' : None, 'name': u'SnowLeopard Intel Release', 'built_revision': None, 'activity': 'building', 'pending_builds': 0, }, { 'is_green': False, 'build_number' : 654, 'name': u'Qt Linux Release', 'built_revision': 47383, 'activity': 'idle', 'pending_builds': 3, }, { 'is_green': True, 'build_number' : 2090, 'name': u'Qt Windows 32-bit Debug', 'built_revision': 60563, 'activity': 'building', 'pending_builds': 0, }, ] def test_status_parsing(self): buildbot = BuildBot() soup = BeautifulSoup(self._example_one_box_status) status_table = soup.find("table") input_rows = status_table.findAll('tr') for x in range(len(input_rows)): status_row = input_rows[x] expected_parsing = self._expected_example_one_box_parsings[x] builder = buildbot._parse_builder_status_from_row(status_row) # Make sure we aren't parsing more or less than we expect self.assertEqual(builder.keys(), expected_parsing.keys()) for key, expected_value in expected_parsing.items(): self.assertEqual(builder[key], expected_value, ("Builder %d parse failure for key: %s: Actual='%s' Expected='%s'" % (x, key, builder[key], expected_value))) def test_builder_with_name(self): buildbot = BuildBot() builder = buildbot.builder_with_name("Test Builder") self.assertEqual(builder.name(), "Test Builder") self.assertEqual(builder.url(), "http://build.webkit.org/builders/Test%20Builder") self.assertEqual(builder.url_encoded_name(), "Test%20Builder") self.assertEqual(builder.results_url(), "http://build.webkit.org/results/Test%20Builder") # Override _fetch_build_dictionary function to not touch the network. def mock_fetch_build_dictionary(self, build_number): build_dictionary = { "sourceStamp": { "revision" : 2 * build_number, }, "number" : int(build_number), "results" : build_number % 2, # 0 means pass } return build_dictionary buildbot._fetch_build_dictionary = mock_fetch_build_dictionary build = builder.build(10) self.assertEqual(build.builder(), builder) self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/10") self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r20%20%2810%29") self.assertEqual(build.revision(), 20) self.assertTrue(build.is_green()) build = build.previous_build() self.assertEqual(build.builder(), builder) self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/9") self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r18%20%289%29") self.assertEqual(build.revision(), 18) self.assertFalse(build.is_green()) self.assertIsNone(builder.build(None)) _example_directory_listing = ''' <h1>Directory listing for /results/SnowLeopard Intel Leaks/</h1> <table> <tr class="alt"> <th>Filename</th> <th>Size</th> <th>Content type</th> <th>Content encoding</th> </tr> <tr class="directory "> <td><a href="r47483%20%281%29/"><b>r47483 (1)/</b></a></td> <td><b></b></td> <td><b>[Directory]</b></td> <td><b></b></td> </tr> <tr class="file alt"> <td><a href="r47484%20%282%29.zip">r47484 (2).zip</a></td> <td>89K</td> <td>[application/zip]</td> <td></td> </tr> ''' _expected_files = [ { "filename" : "r47483 (1)/", "size" : "", "type" : "[Directory]", "encoding" : "", }, { "filename" : "r47484 (2).zip", "size" : "89K", "type" : "[application/zip]", "encoding" : "", }, ] def test_parse_build_to_revision_map(self): buildbot = BuildBot() files = buildbot._parse_twisted_directory_listing(self._example_directory_listing) self.assertEqual(self._expected_files, files) _fake_builder_page = ''' <body> <div class="content"> <h1>Some Builder</h1> <p>(<a href="../waterfall?show=Some Builder">view in waterfall</a>)</p> <div class="column"> <h2>Recent Builds:</h2> <table class="info"> <tr> <th>Time</th> <th>Revision</th> <th>Result</th> <th>Build #</th> <th>Info</th> </tr> <tr class="alt"> <td>Jan 10 15:49</td> <td><span class="revision" title="Revision 104643"><a href="http://trac.webkit.org/changeset/104643">104643</a></span></td> <td class="success">failure</td> <td><a href=".../37604">#37604</a></td> <td class="left">Build successful</td> </tr> <tr class=""> <td>Jan 10 15:32</td> <td><span class="revision" title="Revision 104636"><a href="http://trac.webkit.org/changeset/104636">104636</a></span></td> <td class="success">failure</td> <td><a href=".../37603">#37603</a></td> <td class="left">Build successful</td> </tr> <tr class="alt"> <td>Jan 10 15:18</td> <td><span class="revision" title="Revision 104635"><a href="http://trac.webkit.org/changeset/104635">104635</a></span></td> <td class="success">success</td> <td><a href=".../37602">#37602</a></td> <td class="left">Build successful</td> </tr> <tr class=""> <td>Jan 10 14:51</td> <td><span class="revision" title="Revision 104633"><a href="http://trac.webkit.org/changeset/104633">104633</a></span></td> <td class="failure">failure</td> <td><a href=".../37601">#37601</a></td> <td class="left">Failed compile-webkit</td> </tr> </table> </body>''' _fake_builder_page_without_success = ''' <body> <table> <tr class="alt"> <td>Jan 10 15:49</td> <td><span class="revision" title="Revision 104643"><a href="http://trac.webkit.org/changeset/104643">104643</a></span></td> <td class="success">failure</td> </tr> <tr class=""> <td>Jan 10 15:32</td> <td><span class="revision" title="Revision 104636"><a href="http://trac.webkit.org/changeset/104636">104636</a></span></td> <td class="success">failure</td> </tr> <tr class="alt"> <td>Jan 10 15:18</td> <td><span class="revision" title="Revision 104635"><a href="http://trac.webkit.org/changeset/104635">104635</a></span></td> <td class="success">failure</td> </tr> <tr class=""> <td>Jan 10 11:58</td> <td><span class="revision" title="Revision ??"><a href="http://trac.webkit.org/changeset/%3F%3F">??</a></span></td> <td class="retry">retry</td> </tr> <tr class=""> <td>Jan 10 14:51</td> <td><span class="revision" title="Revision 104633"><a href="http://trac.webkit.org/changeset/104633">104633</a></span></td> <td class="failure">failure</td> </tr> </table> </body>''' def test_revisions_for_builder(self): buildbot = BuildBot() buildbot._fetch_builder_page = lambda builder: builder.page builder_with_success = Builder('Some builder', None) builder_with_success.page = self._fake_builder_page self.assertEqual(buildbot._revisions_for_builder(builder_with_success), [(104643, False), (104636, False), (104635, True), (104633, False)]) builder_without_success = Builder('Some builder', None) builder_without_success.page = self._fake_builder_page_without_success self.assertEqual(buildbot._revisions_for_builder(builder_without_success), [(104643, False), (104636, False), (104635, False), (104633, False)]) def test_find_green_revision(self): buildbot = BuildBot() self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (3, True)], 'Builder 2': [(1, True), (3, False)], 'Builder 3': [(1, True), (3, True)], }), 1) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, False), (3, True)], 'Builder 2': [(1, True), (3, True)], 'Builder 3': [(1, True), (3, True)], }), 3) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (2, True)], 'Builder 2': [(1, False), (2, True), (3, True)], 'Builder 3': [(1, True), (3, True)], }), None) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (2, True)], 'Builder 2': [(1, True), (2, True), (3, True)], 'Builder 3': [(1, True), (3, True)], }), 2) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, False), (2, True)], 'Builder 2': [(1, True), (3, True)], 'Builder 3': [(1, True), (3, True)], }), None) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (3, True)], 'Builder 2': [(1, False), (2, True), (3, True), (4, True)], 'Builder 3': [(2, True), (4, True)], }), 3) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (3, True)], 'Builder 2': [(1, False), (2, True), (3, True), (4, False)], 'Builder 3': [(2, True), (4, True)], }), None) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (3, True)], 'Builder 2': [(1, False), (2, True), (3, True), (4, False)], 'Builder 3': [(2, True), (3, True), (4, True)], }), 3) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (2, True)], 'Builder 2': [], 'Builder 3': [(1, True), (2, True)], }), None) self.assertEqual(buildbot._find_green_revision({ 'Builder 1': [(1, True), (3, False), (5, True), (10, True), (12, False)], 'Builder 2': [(1, True), (3, False), (7, True), (9, True), (12, False)], 'Builder 3': [(1, True), (3, True), (7, True), (11, False), (12, True)], }), 7) def _fetch_build(self, build_number): if build_number == 5: return "correct build" return "wrong build" def _fetch_revision_to_build_map(self): return {'r5': 5, 'r2': 2, 'r3': 3} def test_latest_cached_build(self): b = Builder('builder', BuildBot()) b._fetch_build = self._fetch_build b._fetch_revision_to_build_map = self._fetch_revision_to_build_map self.assertEqual("correct build", b.latest_cached_build()) def results_url(self): return "some-url" def test_results_zip_url(self): b = Build(None, 123, 123, False) b.results_url = self.results_url self.assertEqual("some-url.zip", b.results_zip_url())
bsd-3-clause
zqzhang/crosswalk-test-suite
webapi/tct-csp-w3c-tests/csp-py/csp_img-src_asterisk_allowed_int-manual.py
30
2469
def main(request, response): import simplejson as json f = file('config.json') source = f.read() s = json.JSONDecoder().decode(source) url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1]) url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0]) response.headers.set("Content-Security-Policy", "img-src *") response.headers.set("X-Content-Security-Policy", "img-src *") response.headers.set("X-WebKit-CSP", "img-src *") return """<!DOCTYPE html> <!-- Copyright (c) 2013 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of works must retain the original copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Authors: Hao, Yunfei <[email protected]> --> <html> <head> <title>CSP Test: csp_img-src_asterisk_allowed_int</title> <link rel="author" title="Intel" href="http://www.intel.com"/> <link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#img-src"/> <meta name="flags" content=""/> <meta name="assert" content="img-src *"/> <meta charset="utf-8"/> </head> <body> <p>Test passes if there is a filled blue square.</p> <img src="support/blue-100x100.png"/> </body> </html> """
bsd-3-clause
2014cdag10/2014cdag10
wsgi/programs/cdag1/remsub6.py
9
18749
import cherrypy # 這是 MAN 類別的定義 ''' # 在 application 中導入子模組 import programs.cdag1.remsub6 as cdag1_remsub6 # 加入 cdag1 模組下的 remsub6.py 且以子模組 remsub6 對應其 remsub6() 類別 root.cdag1.remsub6 = cdag1_remsub6.remsub6() # 完成設定後, 可以利用 /cdag1/remsub6 # 呼叫 man.py 中 MAN 類別的 assembly 方法 ''' class remsub6(object): # 各組利用 index 引導隨後的程式執行 @cherrypy.expose def index(self, *args, **kwargs): outstring = ''' 這是 2014CDA 協同專案下的 cdag30 模組下的 MAN 類別.<br /><br /> <!-- 這裡採用相對連結, 而非網址的絕對連結 (這一段為 html 註解) --> <a href="assembly">執行 MAN 類別中的 assembly 方法</a><br /><br /> 請確定下列零件於 V:/home/lego/man 目錄中, 且開啟空白 Creo 組立檔案.<br /> <a href="/static/lego_man.7z">lego_man.7z</a>(滑鼠右鍵存成 .7z 檔案)<br /> ''' return outstring @cherrypy.expose def assembly(self, *args, **kwargs): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <script type="text/javascript" src="/static/weblink/examples/jscript/pfcUtils.js"></script> </head> <body> </script><script language="JavaScript"> /*設計一個零件組立函示 get 組立 get part 抓取約束元素 in part and asm 選擇約束型式 應用在 part 上 */ /* 軸面接 axis_plane_assembly(session, assembly, transf, featID, constrain_way, part2, axis1, plane1, axis2, plane2) ==================== assembly 組立檔案 transf 座標矩陣 feadID 要組裝的父 part2 要組裝的子 constrain_way 參數 1 對齊 對齊 2 對齊 貼合 else 按照 1 plane1~plane2 要組裝的父 參考面 plane3~plane4 要組裝的子 參考面 */ function axis_plane_assembly(file_location, session, assembly, transf, featID, constrain_way, axis1, plane1, axis2, plane2) { //設定part2 路徑 var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName(file_location); //嘗試從 session 中取得 part2 var componentModel = session.GetModelFromDescr(descr); //取得失敗 status null if (componentModel == null) { document.write("在session 取得不到零件" + file_location); //從路逕取得 part2 componentModel = session.RetrieveModel(descr); //仍然取得失敗 表示無此零件 if (componentModel == null) { // 此發錯誤 throw new Error(0, "Current componentModel is not loaded."); } } //假如 part2 有取得到 if (componentModel != void null) { //將part2 放入 組立檔案, part2 在組立檔案裡面為 組立 component var asmcomp = assembly.AssembleComponent(componentModel, transf); } //組立父 featID list 形態, 為整數型態 list var ids = pfcCreate("intseq"); //當有提供 要組裝的父 if (featID != -1) { //將要組裝的父 加入 list ids.Append(featID); //取得組裝路徑 //建立路徑變數,CreateComponentPath:回傳組件的路徑物件,把組立模型和的ID路徑給所需的組件。 var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids); var subassembly = subPath.Leaf; } else { // 假如沒有提供 要組裝的父 // asm 基本 就當作父零件 var subassembly = assembly; //取得組裝路徑 var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids); } //父參考 element var asmDatums = new Array(axis1, plane1); //子參考 element var compDatums = new Array(axis2, plane2); //約數型態 if (constrain_way == 1) { var relation = new Array(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE); } else if (constrain_way == 2) { var relation = new Array(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN); } else { var relation = new Array(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE); } //選擇元素 形態 (ITEM_AXIS) 軸 (ITEM_SURFACE) 面 var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE); //約束 list 等下要應用於 子 var constrs = pfcCreate("pfcComponentConstraints"); for (var i = 0; i < 2; i++) { //選擇 父元素 var asmItem = subassembly.GetItemByName(relationItem[i], asmDatums[i]); if (asmItem == void null) { interactFlag = true; continue; } //選擇 子元素 var compItem = componentModel.GetItemByName(relationItem[i], compDatums[i]); if (compItem == void null) { interactFlag = true; continue; } //採用互動式設定相關的變數 var MpfcSelect = pfcCreate("MpfcSelect"); //互動式設定 選擇元素 父 var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath); //互動式設定 選擇元素 子 var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null); //選擇約束形態 var constr = pfcCreate("pfcComponentConstraint").Create(relation[i]); //約束選擇 剛剛得父元素 constr.AssemblyReference = asmSel; //約束選擇 剛剛得子元素 constr.ComponentReference = compSel; //設定約束屬性 constr.Attributes = pfcCreate("pfcConstraintAttributes").Create(true, false); //加入此約束 至 約束 list constrs.Append(constr); } //約束 list應用至 子 asmcomp.SetConstraints(constrs, void null); //回傳 component id return asmcomp.Id; } // 以上為 axis_plane_assembly() 函式 /* 三面接 three_plane_assembly(session, assembly, transf, featID, constrain_way, part2, plane1, plane2, plane3, plane4, plane5, plane6) ===================== assembly 組立檔案 transf 座標矩陣 feadID 要組裝的父 part2 要組裝的子 constrain_way 參數 1 對齊 2 貼合 else 按照 1 plane1~plane3 要組裝的父 參考面 plane4~plane6 要組裝的子 參考面 */ function three_plane_assembly(file_location, session, assembly, transf, featID, constrain_way, plane1, plane2, plane3, plane4, plane5, plane6) { var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName(file_location); var componentModel = session.GetModelFromDescr(descr); if (componentModel == null) { document.write("在session 取得不到零件" + file_location); componentModel = session.RetrieveModel(descr); if (componentModel == null) { throw new Error(0, "Current componentModel is not loaded."); } } if (componentModel != void null) { var asmcomp = assembly.AssembleComponent(componentModel, transf); } var ids = pfcCreate("intseq"); //假如 asm 有零件時候 if (featID != -1) { ids.Append(featID); var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids); var subassembly = subPath.Leaf; } // 假如是第一個零件 asm 就當作父零件 else { var subassembly = assembly; var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids); } var constrs = pfcCreate("pfcComponentConstraints"); var asmDatums = new Array(plane1, plane2, plane3); var compDatums = new Array(plane4, plane5, plane6); var MpfcSelect = pfcCreate("MpfcSelect"); for (var i = 0; i < 3; i++) { var asmItem = subassembly.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]); if (asmItem == void null) { interactFlag = true; continue; } var compItem = componentModel.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, compDatums[i]); if (compItem == void null) { interactFlag = true; continue; } var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath); var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null); if (constrain_way == 1) { var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN); } else if (constrain_way == 2) { var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE); } else { var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN); } constr.AssemblyReference = asmSel; constr.ComponentReference = compSel; constr.Attributes = pfcCreate("pfcConstraintAttributes").Create(false, false); constrs.Append(constr); } asmcomp.SetConstraints(constrs, void null); return asmcomp.Id; } // 以上為 three_plane_assembly() 函式 //兩軸一面 function one_axis_two_plane_assembly(file_location, session, assembly, transf, featID, constrain_way, axis1, plane1_1, plane1_2, axis2, plane2_1, plane2_2) { //設定part2 路徑 var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName(file_location); //嘗試從 session 中取得 part2 var componentModel = session.GetModelFromDescr(descr); //取得失敗 status null if (componentModel == null) { document.write("在session 取得不到零件" + file_location); //從路逕取得 part2 componentModel = session.RetrieveModel(descr); //仍然取得失敗 表示無此零件 if (componentModel == null) { // 此發錯誤 throw new Error(0, "Current componentModel is not loaded."); } } //假如 part2 有取得到 if (componentModel != void null) { //將part2 放入 組立檔案, part2 在組立檔案裡面為 組立 component var asmcomp = assembly.AssembleComponent(componentModel, transf); } //組立父 featID list 形態, 為整數型態 list var ids = pfcCreate("intseq"); //當有提供 要組裝的父 if (featID != -1) { //將要組裝的父 加入 list ids.Append(featID); //取得組裝路徑 //建立路徑變數,CreateComponentPath:回傳組件的路徑物件,把組立模型和的ID路徑給所需的組件。 var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids); var subassembly = subPath.Leaf; } else { // 假如沒有提供 要組裝的父 // asm 基本 就當作父零件 var subassembly = assembly; //取得組裝路徑 var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids); } //父參考 element var asmDatums = new Array(axis1, plane1_1, plane1_2); //子參考 element var compDatums = new Array(axis2, plane2_1, plane2_2); //約數型態 var ConstraintType = pfcCreate("pfcComponentConstraintType"); if (constrain_way == 1) { var relation = new Array(ConstraintType.ASM_CONSTRAINT_ALIGN, ConstraintType.ASM_CONSTRAINT_MATE, ConstraintType.ASM_CONSTRAINT_AUTO); } else if (constrain_way == 2) { var relation = new Array(ConstraintType.ASM_CONSTRAINT_ALIGN, ConstraintType.ASM_CONSTRAINT_ALIGN, ConstraintType.ASM_CONSTRAINT_MATE); } else if (constrain_way == 3) { var relation = new Array(ConstraintType.ASM_CONSTRAINT_ALIGN, ConstraintType.ASM_CONSTRAINT_MATE, ConstraintType.ASM_CONSTRAINT_MATE); } else { var relation = new Array(ConstraintType.ASM_CONSTRAINT_ALIGN, ConstraintType.ASM_CONSTRAINT_ALIGN, ConstraintType.ASM_CONSTRAINT_ALIGN); } //選擇元素 形態 (ITEM_AXIS) 軸 (ITEM_SURFACE) 面 var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE, pfcCreate("pfcModelItemType").ITEM_SURFACE); //約束 list 等下要應用於 子 var constrs = pfcCreate("pfcComponentConstraints"); for (var i = 0; i < 3; i++) { //選擇 父元素 var asmItem = subassembly.GetItemByName(relationItem[i], asmDatums[i]); if (asmItem == void null) { interactFlag = true; continue; } //選擇 子元素 var compItem = componentModel.GetItemByName(relationItem[i], compDatums[i]); if (compItem == void null) { interactFlag = true; continue; } //採用互動式設定相關的變數 var MpfcSelect = pfcCreate("MpfcSelect"); //互動式設定 選擇元素 父 var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath); //互動式設定 選擇元素 子 var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null); //選擇約束形態 var constr = pfcCreate("pfcComponentConstraint").Create(relation[i]); //約束選擇 剛剛得父元素 constr.AssemblyReference = asmSel; //約束選擇 剛剛得子元素 constr.ComponentReference = compSel; //設定約束屬性 //constr.Attributes = pfcCreate("pfcConstraintAttributes").Create(true, false); constr.Attributes = pfcCreate("pfcConstraintAttributes").Create(true, false); //加入此約束 至 約束 list constrs.Append(constr); } //約束 list應用至 子 asmcomp.SetConstraints(constrs, void null); //回傳 component id return asmcomp.Id; } // // 假如 Creo 所在的操作系統不是 Windows 環境 if (!pfcIsWindows()) { // 則啟動對應的 UniversalXPConnect 執行權限 (等同 Windows 下的 ActiveX) netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect"); } // pfcGetProESession() 是位於 pfcUtils.js 中的函式, 確定此 JavaScript 是在嵌入式瀏覽器中執行 var session = pfcGetProESession(); // 設定 config option, 不要使用元件組立流程中內建的假設約束條件 session.SetConfigOption("comp_placement_assumptions", "no"); // 建立擺放零件的位置矩陣, Pro/Web.Link 中的變數無法直接建立, 必須透過 pfcCreate() 建立 var identityMatrix = pfcCreate("pfcMatrix3D"); // 建立 identity 位置矩陣 for (var x = 0; x < 4; x++) { for (var y = 0; y < 4; y++) { if (x == y) { identityMatrix.Set(x, y, 1.0); } else { identityMatrix.Set(x, y, 0.0); } } } // 利用 identityMatrix 建立 transf 座標轉換矩陣 var transf = pfcCreate("pfcTransform3D").Create(identityMatrix); // 取得目前的工作目錄 var currentDir = session.getCurrentDirectory(); // 以目前已開檔的空白組立檔案, 作為 model var model = session.CurrentModel; // 查驗有無 model, 或 model 類別是否為組立件, 若不符合條件則丟出錯誤訊息 if (model == void null || model.Type != pfcCreate("pfcModelType").MDL_ASSEMBLY) throw new Error(0, "Current model is not an assembly."); // 將此模型設為組立物件 var assembly = model; /* three_plane_assembly(session, assembly, transf, featID, constrain_way, part2, plane1, plane2, plane3, plane4, plane5, plane6) ===================== assembly 組立檔案 transf 座標矩陣 feadID 要組裝的父 part2 要組裝的子 constrain_way 參數 1 對齊 2 貼合 else 按照 1 plane1~plane3 要組裝的父 參考面 plane4~plane6 要組裝的子 參考面 axis_plane_assembly(session, assembly, transf, featID, constrain_way, part2, axis1, plane1, axis2, plane2) ==================== assembly 組立檔案 transf 座標矩陣 feadID 要組裝的父 part2 要組裝的子 constrain_way 參數 1 對齊 對齊 2 對齊 貼合 else 按照 1 plane1~plane2 要組裝的父 參考面 plane3~plane4 要組裝的子 參考面 */ var work_directory = 'V:/home/lego/' //function three_plane_assembly(file_location, session, assembly, transf, featID, constrain_way, plane1, plane2, plane3, plane4, plane5, plane6) { var body_id = three_plane_assembly(work_directory + 'beam_angle.prt', session, assembly, transf, -1, 1, "ASM_FRONT", "ASM_TOP", "ASM_RIGHT", "FRONT", "TOP", "RIGHT"); //function one_axis_two_plane_assembly(file_location, session, assembly, transf, featID, constrain_way, axis1, plane1_1, plane1_2, axis2, plane2_1, plane2_2) var alex_10 = one_axis_two_plane_assembly(work_directory + 'axle_10.prt', session, assembly, transf, body_id, 1, "A_25", "FRONT", "TOP", "A_1", "FRONT", "TOP"); var alex_5 = one_axis_two_plane_assembly(work_directory + 'axle_5.prt', session, assembly, transf, body_id, 1, "A_26", "DTM1", "TOP", "A_1", "RIGHT", "TOP"); var crossblock_2_left = one_axis_two_plane_assembly(work_directory + 'crossblock_2.prt', session, assembly, transf, body_id, 1, "A_25", "DTM3","FRONT", "A_16", "DTM4", "DTM1"); var crossblock_2_right = one_axis_two_plane_assembly(work_directory + 'crossblock_2.prt', session, assembly, transf, body_id, 1, "A_25", "DTM2","FRONT", "A_16", "DTM5", "DTM1"); var crossblock_2_left_front = one_axis_two_plane_assembly(work_directory + 'crossblock_2.prt', session, assembly, transf, body_id, 2, "A_26", "DTM4", "DTM2", "A_16", "DTM1", "DTM4"); var crossblock_2_right_front = one_axis_two_plane_assembly(work_directory + 'crossblock_2.prt', session, assembly, transf, body_id, 2, "A_26", "DTM4", "DTM3", "A_16", "DTM1", "DTM5"); var crossblock_2_left_2 = one_axis_two_plane_assembly(work_directory + 'crossblock_2.prt', session, assembly, transf, crossblock_2_left_front, 2, "A_16", "DTM1", "DTM5", "A_16", "DTM1", "DTM4"); var crossblock_2_right_2 = one_axis_two_plane_assembly(work_directory + 'crossblock_2.prt', session, assembly, transf, crossblock_2_right_front, 2, "A_16", "DTM1", "DTM4", "A_16", "DTM1", "DTM5"); var conn_3_left = axis_plane_assembly(work_directory + 'conn_3.prt', session, assembly, transf, crossblock_2_left, 1, "A_17", "DTM10", "A_20", "DTM2"); var conn_3_left = axis_plane_assembly(work_directory + 'conn_3.prt', session, assembly, transf, crossblock_2_right, 1, "A_17", "DTM10", "A_20", "DTM2"); var beam_3 = one_axis_two_plane_assembly(work_directory + 'beam_3.prt', session, assembly, transf, crossblock_2_right, 3, "A_17", "DTM7","FRONT", "A_37", "BOTTOM", "RIGHT"); assembly.Regenerate(void null); session.GetModelWindow(assembly).Repaint(); </script> </body> </html> ''' return outstring
gpl-2.0
huguesv/PTVS
Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/win32/lib/win32timezone.py
6
31716
# -*- coding: UTF-8 -*- """ win32timezone: Module for handling datetime.tzinfo time zones using the windows registry for time zone information. The time zone names are dependent on the registry entries defined by the operating system. This module may be tested using the doctest module. Written by Jason R. Coombs ([email protected]). Copyright © 2003-2012. All Rights Reserved. This module is licenced for use in Mark Hammond's pywin32 library under the same terms as the pywin32 library. To use this time zone module with the datetime module, simply pass the TimeZoneInfo object to the datetime constructor. For example, >>> import win32timezone, datetime >>> assert 'Mountain Standard Time' in win32timezone.TimeZoneInfo.get_sorted_time_zone_names() >>> MST = win32timezone.TimeZoneInfo('Mountain Standard Time') >>> now = datetime.datetime.now(MST) The now object is now a time-zone aware object, and daylight savings- aware methods may be called on it. >>> now.utcoffset() in (datetime.timedelta(-1, 61200), datetime.timedelta(-1, 64800)) True (note that the result of utcoffset call will be different based on when now was generated, unless standard time is always used) >>> now = datetime.datetime.now(TimeZoneInfo('Mountain Standard Time', True)) >>> now.utcoffset() datetime.timedelta(-1, 61200) >>> aug2 = datetime.datetime(2003, 8, 2, tzinfo = MST) >>> tuple(aug2.utctimetuple()) (2003, 8, 2, 6, 0, 0, 5, 214, 0) >>> nov2 = datetime.datetime(2003, 11, 25, tzinfo = MST) >>> tuple(nov2.utctimetuple()) (2003, 11, 25, 7, 0, 0, 1, 329, 0) To convert from one timezone to another, just use the astimezone method. >>> aug2.isoformat() '2003-08-02T00:00:00-06:00' >>> aug2est = aug2.astimezone(win32timezone.TimeZoneInfo('Eastern Standard Time')) >>> aug2est.isoformat() '2003-08-02T02:00:00-04:00' calling the displayName member will return the display name as set in the registry. >>> est = win32timezone.TimeZoneInfo('Eastern Standard Time') >>> str(est.displayName) '(UTC-05:00) Eastern Time (US & Canada)' >>> gmt = win32timezone.TimeZoneInfo('GMT Standard Time', True) >>> str(gmt.displayName) '(UTC) Dublin, Edinburgh, Lisbon, London' To get the complete list of available time zone keys, >>> zones = win32timezone.TimeZoneInfo.get_all_time_zones() If you want to get them in an order that's sorted longitudinally >>> zones = win32timezone.TimeZoneInfo.get_sorted_time_zones() TimeZoneInfo now supports being pickled and comparison >>> import pickle >>> tz = win32timezone.TimeZoneInfo('China Standard Time') >>> tz == pickle.loads(pickle.dumps(tz)) True It's possible to construct a TimeZoneInfo from a TimeZoneDescription including the currently-defined zone. >>> tz = win32timezone.TimeZoneInfo(TimeZoneDefinition.current()) >>> tz == pickle.loads(pickle.dumps(tz)) True >>> aest = win32timezone.TimeZoneInfo('AUS Eastern Standard Time') >>> est = win32timezone.TimeZoneInfo('E. Australia Standard Time') >>> dt = datetime.datetime(2006, 11, 11, 1, 0, 0, tzinfo = aest) >>> estdt = dt.astimezone(est) >>> estdt.strftime('%Y-%m-%d %H:%M:%S') '2006-11-11 00:00:00' >>> dt = datetime.datetime(2007, 1, 12, 1, 0, 0, tzinfo = aest) >>> estdt = dt.astimezone(est) >>> estdt.strftime('%Y-%m-%d %H:%M:%S') '2007-01-12 00:00:00' >>> dt = datetime.datetime(2007, 6, 13, 1, 0, 0, tzinfo = aest) >>> estdt = dt.astimezone(est) >>> estdt.strftime('%Y-%m-%d %H:%M:%S') '2007-06-13 01:00:00' Microsoft now has a patch for handling time zones in 2007 (see http://support.microsoft.com/gp/cp_dst) As a result, patched systems will give an incorrect result for dates prior to the designated year except for Vista and its successors, which have dynamic time zone support. >>> nov2_pre_change = datetime.datetime(2003, 11, 2, tzinfo = MST) >>> old_response = (2003, 11, 2, 7, 0, 0, 6, 306, 0) >>> incorrect_patch_response = (2003, 11, 2, 6, 0, 0, 6, 306, 0) >>> pre_response = nov2_pre_change.utctimetuple() >>> pre_response in (old_response, incorrect_patch_response) True Furthermore, unpatched systems pre-Vista will give an incorrect result for dates after 2007. >>> nov2_post_change = datetime.datetime(2007, 11, 2, tzinfo = MST) >>> incorrect_unpatched_response = (2007, 11, 2, 7, 0, 0, 4, 306, 0) >>> new_response = (2007, 11, 2, 6, 0, 0, 4, 306, 0) >>> post_response = nov2_post_change.utctimetuple() >>> post_response in (new_response, incorrect_unpatched_response) True There is a function you can call to get some capabilities of the time zone data. >>> caps = GetTZCapabilities() >>> isinstance(caps, dict) True >>> 'MissingTZPatch' in caps True >>> 'DynamicTZSupport' in caps True >>> both_dates_correct = (pre_response == old_response and post_response == new_response) >>> old_dates_wrong = (pre_response == incorrect_patch_response) >>> new_dates_wrong = (post_response == incorrect_unpatched_response) >>> caps['DynamicTZSupport'] == both_dates_correct True >>> (not caps['DynamicTZSupport'] and caps['MissingTZPatch']) == new_dates_wrong True >>> (not caps['DynamicTZSupport'] and not caps['MissingTZPatch']) == old_dates_wrong True This test helps ensure language support for unicode characters >>> x = TIME_ZONE_INFORMATION(0, u'français') Test conversion from one time zone to another at a DST boundary =============================================================== >>> tz_hi = TimeZoneInfo('Hawaiian Standard Time') >>> tz_pac = TimeZoneInfo('Pacific Standard Time') >>> time_before = datetime.datetime(2011, 11, 5, 15, 59, 59, tzinfo=tz_hi) >>> tz_hi.utcoffset(time_before) datetime.timedelta(-1, 50400) >>> tz_hi.dst(time_before) datetime.timedelta(0) Hawaii doesn't need dynamic TZ info >>> getattr(tz_hi, 'dynamicInfo', None) Here's a time that gave some trouble as reported in #3523104 because one minute later, the equivalent UTC time changes from DST in the U.S. >>> dt_hi = datetime.datetime(2011, 11, 5, 15, 59, 59, 0, tzinfo=tz_hi) >>> dt_hi.timetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=15, tm_min=59, tm_sec=59, tm_wday=5, tm_yday=309, tm_isdst=0) >>> dt_hi.utctimetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=1, tm_min=59, tm_sec=59, tm_wday=6, tm_yday=310, tm_isdst=0) Convert the time to pacific time. >>> dt_pac = dt_hi.astimezone(tz_pac) >>> dt_pac.timetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=18, tm_min=59, tm_sec=59, tm_wday=5, tm_yday=309, tm_isdst=1) Notice that the UTC time is almost 2am. >>> dt_pac.utctimetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=1, tm_min=59, tm_sec=59, tm_wday=6, tm_yday=310, tm_isdst=0) Now do the same tests one minute later in Hawaii. >>> time_after = datetime.datetime(2011, 11, 5, 16, 0, 0, 0, tzinfo=tz_hi) >>> tz_hi.utcoffset(time_after) datetime.timedelta(-1, 50400) >>> tz_hi.dst(time_before) datetime.timedelta(0) >>> dt_hi = datetime.datetime(2011, 11, 5, 16, 0, 0, 0, tzinfo=tz_hi) >>> print dt_hi.timetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=16, tm_min=0, tm_sec=0, tm_wday=5, tm_yday=309, tm_isdst=0) >>> print dt_hi.utctimetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=2, tm_min=0, tm_sec=0, tm_wday=6, tm_yday=310, tm_isdst=0) According to the docs, this is what astimezone does. >>> utc = (dt_hi - dt_hi.utcoffset()).replace(tzinfo=tz_pac) >>> utc datetime.datetime(2011, 11, 6, 2, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) >>> tz_pac.fromutc(utc) == dt_hi.astimezone(tz_pac) True >>> tz_pac.fromutc(utc) datetime.datetime(2011, 11, 5, 19, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) Make sure the converted time is correct. >>> dt_pac = dt_hi.astimezone(tz_pac) >>> dt_pac.timetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=19, tm_min=0, tm_sec=0, tm_wday=5, tm_yday=309, tm_isdst=1) >>> dt_pac.utctimetuple() time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=2, tm_min=0, tm_sec=0, tm_wday=6, tm_yday=310, tm_isdst=0) Check some internal methods >>> tz_pac._getStandardBias(datetime.datetime(2011, 1, 1)) datetime.timedelta(0, 28800) >>> tz_pac._getDaylightBias(datetime.datetime(2011, 1, 1)) datetime.timedelta(0, 25200) Test the offsets >>> offset = tz_pac.utcoffset(datetime.datetime(2011, 11, 6, 2, 0)) >>> offset == datetime.timedelta(hours=-8) True >>> dst_offset = tz_pac.dst(datetime.datetime(2011, 11, 6, 2, 0) + offset) >>> dst_offset == datetime.timedelta(hours=1) True >>> (offset + dst_offset) == datetime.timedelta(hours=-7) True Test offsets that occur right at the DST changeover >>> datetime.datetime.utcfromtimestamp(1320570000).replace( ... tzinfo=TimeZoneInfo.utc()).astimezone(tz_pac) datetime.datetime(2011, 11, 6, 1, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) """ from __future__ import generators __author__ = 'Jason R. Coombs <[email protected]>' import winreg import struct import datetime import win32api import re import operator import warnings from itertools import count import logging log = logging.getLogger(__file__) # A couple of objects for working with objects as if they were native C-type # structures. class _SimpleStruct(object): _fields_ = None # must be overridden by subclasses def __init__(self, *args, **kw): for i, (name, typ) in enumerate(self._fields_): def_arg = None if i < len(args): def_arg = args[i] if name in kw: def_arg = kw[name] if def_arg is not None: if not isinstance(def_arg, tuple): def_arg = (def_arg,) else: def_arg = () if len(def_arg)==1 and isinstance(def_arg[0], typ): # already an object of this type. # XXX - should copy.copy??? def_val = def_arg[0] else: def_val = typ(*def_arg) setattr(self, name, def_val) def field_names(self): return [f[0] for f in self._fields_] def __eq__(self, other): if not hasattr(other, "_fields_"): return False if self._fields_ != other._fields_: return False for name, _ in self._fields_: if getattr(self, name) != getattr(other, name): return False return True def __ne__(self, other): return not self.__eq__(other) class SYSTEMTIME(_SimpleStruct): _fields_ = [ ('year', int), ('month', int), ('day_of_week', int), ('day', int), ('hour', int), ('minute', int), ('second', int), ('millisecond', int), ] class TIME_ZONE_INFORMATION(_SimpleStruct): _fields_ = [ ('bias', int), ('standard_name', str), ('standard_start', SYSTEMTIME), ('standard_bias', int), ('daylight_name', str), ('daylight_start', SYSTEMTIME), ('daylight_bias', int), ] class DYNAMIC_TIME_ZONE_INFORMATION(_SimpleStruct): _fields_ = TIME_ZONE_INFORMATION._fields_ + [ ('key_name', str), ('dynamic_daylight_time_disabled', bool), ] class TimeZoneDefinition(DYNAMIC_TIME_ZONE_INFORMATION): """ A time zone definition class based on the win32 DYNAMIC_TIME_ZONE_INFORMATION structure. Describes a bias against UTC (bias), and two dates at which a separate additional bias applies (standard_bias and daylight_bias). """ def __init__(self, *args, **kwargs): """ Try to construct a TimeZoneDefinition from a) [DYNAMIC_]TIME_ZONE_INFORMATION args b) another TimeZoneDefinition c) a byte structure (using _from_bytes) """ try: super(TimeZoneDefinition, self).__init__(*args, **kwargs) return except (TypeError, ValueError): pass try: self.__init_from_other(*args, **kwargs) return except TypeError: pass try: self.__init_from_bytes(*args, **kwargs) return except TypeError: pass raise TypeError("Invalid arguments for %s" % self.__class__) def __init_from_bytes(self, bytes, standard_name='', daylight_name='', key_name='', daylight_disabled=False): format = '3l8h8h' components = struct.unpack(format, bytes) bias, standard_bias, daylight_bias = components[:3] standard_start = SYSTEMTIME(*components[3:11]) daylight_start = SYSTEMTIME(*components[11:19]) super(TimeZoneDefinition, self).__init__(bias, standard_name, standard_start, standard_bias, daylight_name, daylight_start, daylight_bias, key_name, daylight_disabled,) def __init_from_other(self, other): if not isinstance(other, TIME_ZONE_INFORMATION): raise TypeError("Not a TIME_ZONE_INFORMATION") for name in other.field_names(): # explicitly get the value from the underlying structure value = super(TimeZoneDefinition, other).__getattribute__(other, name) setattr(self, name, value) # consider instead of the loop above just copying the memory directly #size = max(ctypes.sizeof(DYNAMIC_TIME_ZONE_INFO), ctypes.sizeof(other)) #ctypes.memmove(ctypes.addressof(self), other, size) def __getattribute__(self, attr): value = super(TimeZoneDefinition, self).__getattribute__(attr) if 'bias' in attr: make_minute_timedelta = lambda m: datetime.timedelta(minutes = m) value = make_minute_timedelta(value) return value @classmethod def current(class_): "Windows Platform SDK GetTimeZoneInformation" code, tzi = win32api.GetTimeZoneInformation(True) return code, class_(*tzi) def set(self): tzi = tuple(getattr(self, n) for n, t in self._fields_) win32api.SetTimeZoneInformation(tzi) def copy(self): # XXX - this is no longer a copy! return self.__class__(self) def locate_daylight_start(self, year): return self._locate_day(year, self.daylight_start) def locate_standard_start(self, year): return self._locate_day(year, self.standard_start) @staticmethod def _locate_day(year, cutoff): """ Takes a SYSTEMTIME object, such as retrieved from a TIME_ZONE_INFORMATION structure or call to GetTimeZoneInformation and interprets it based on the given year to identify the actual day. This method is necessary because the SYSTEMTIME structure refers to a day by its day of the week and week of the month (e.g. 4th saturday in March). >>> SATURDAY = 6 >>> MARCH = 3 >>> st = SYSTEMTIME(2000, MARCH, SATURDAY, 4, 0, 0, 0, 0) # according to my calendar, the 4th Saturday in March in 2009 was the 28th >>> expected_date = datetime.datetime(2009, 3, 28) >>> TimeZoneDefinition._locate_day(2009, st) == expected_date True """ # MS stores Sunday as 0, Python datetime stores Monday as zero target_weekday = (cutoff.day_of_week + 6) % 7 # For SYSTEMTIMEs relating to time zone inforamtion, cutoff.day # is the week of the month week_of_month = cutoff.day # so the following is the first day of that week day = (week_of_month - 1) * 7 + 1 result = datetime.datetime(year, cutoff.month, day, cutoff.hour, cutoff.minute, cutoff.second, cutoff.millisecond) # now the result is the correct week, but not necessarily the correct day of the week days_to_go = (target_weekday - result.weekday()) % 7 result += datetime.timedelta(days_to_go) # if we selected a day in the month following the target month, # move back a week or two. # This is necessary because Microsoft defines the fifth week in a month # to be the last week in a month and adding the time delta might have # pushed the result into the next month. while result.month == cutoff.month + 1: result -= datetime.timedelta(weeks = 1) return result class TimeZoneInfo(datetime.tzinfo): """ Main class for handling Windows time zones. Usage: TimeZoneInfo(<Time Zone Standard Name>, [<Fix Standard Time>]) If <Fix Standard Time> evaluates to True, daylight savings time is calculated in the same way as standard time. >>> tzi = TimeZoneInfo('Pacific Standard Time') >>> march31 = datetime.datetime(2000,3,31) We know that time zone definitions haven't changed from 2007 to 2012, so regardless of whether dynamic info is available, there should be consistent results for these years. >>> subsequent_years = [march31.replace(year=year) ... for year in range(2007, 2013)] >>> offsets = set(tzi.utcoffset(year) for year in subsequent_years) >>> len(offsets) 1 """ # this key works for WinNT+, but not for the Win95 line. tzRegKey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones' def __init__(self, param=None, fix_standard_time=False): if isinstance(param, TimeZoneDefinition): self._LoadFromTZI(param) if isinstance(param, str): self.timeZoneName = param self._LoadInfoFromKey() self.fixedStandardTime = fix_standard_time def _FindTimeZoneKey(self): """Find the registry key for the time zone name (self.timeZoneName).""" # for multi-language compatability, match the time zone name in the # "Std" key of the time zone key. zoneNames = dict(self._get_indexed_time_zone_keys('Std')) # Also match the time zone key name itself, to be compatible with # English-based hard-coded time zones. timeZoneName = zoneNames.get(self.timeZoneName, self.timeZoneName) key = _RegKeyDict.open(winreg.HKEY_LOCAL_MACHINE, self.tzRegKey) try: result = key.subkey(timeZoneName) except: raise ValueError('Timezone Name %s not found.' % timeZoneName) return result def _LoadInfoFromKey(self): """Loads the information from an opened time zone registry key into relevant fields of this TZI object""" key = self._FindTimeZoneKey() self.displayName = key['Display'] self.standardName = key['Std'] self.daylightName = key['Dlt'] self.staticInfo = TimeZoneDefinition(key['TZI']) self._LoadDynamicInfoFromKey(key) def _LoadFromTZI(self, tzi): self.timeZoneName = tzi.standard_name self.displayName = 'Unknown' self.standardName = tzi.standard_name self.daylightName = tzi.daylight_name self.staticInfo = tzi def _LoadDynamicInfoFromKey(self, key): """ >>> tzi = TimeZoneInfo('Central Standard Time') Here's how the RangeMap is supposed to work: >>> m = RangeMap(zip([2006,2007], 'BC'), ... sort_params = dict(reverse=True), ... key_match_comparator=operator.ge) >>> m.get(2000, 'A') 'A' >>> m[2006] 'B' >>> m[2007] 'C' >>> m[2008] 'C' >>> m[RangeMap.last_item] 'B' >>> m.get(2008, m[RangeMap.last_item]) 'C' Now test the dynamic info (but fallback to our simple RangeMap on systems that don't have dynamicInfo). >>> dinfo = getattr(tzi, 'dynamicInfo', m) >>> 2007 in dinfo True >>> 2008 in dinfo False >>> dinfo[2007] == dinfo[2008] == dinfo[2012] True """ try: info = key.subkey('Dynamic DST') except WindowsError: return del info['FirstEntry'] del info['LastEntry'] years = map(int, list(info.keys())) values = map(TimeZoneDefinition, list(info.values())) # create a range mapping that searches by descending year and matches # if the target year is greater or equal. self.dynamicInfo = RangeMap(zip(years, values), sort_params = dict(reverse=True), key_match_comparator = operator.ge) def __repr__(self): result = '%s(%s' % (self.__class__.__name__, repr(self.timeZoneName)) if self.fixedStandardTime: result += ', True' result += ')' return result def __str__(self): return self.displayName def tzname(self, dt): winInfo = self.getWinInfo(dt) if self.dst(dt) == winInfo.daylight_bias: result = self.daylightName elif self.dst(dt) == winInfo.standard_bias: result = self.standardName return result def getWinInfo(self, targetYear): """ Return the most relevant "info" for this time zone in the target year. """ if not hasattr(self, 'dynamicInfo') or not self.dynamicInfo: return self.staticInfo # Find the greatest year entry in self.dynamicInfo which is for # a year greater than or equal to our targetYear. If not found, # default to the earliest year. return self.dynamicInfo.get(targetYear, self.dynamicInfo[RangeMap.last_item]) def _getStandardBias(self, dt): winInfo = self.getWinInfo(dt.year) return winInfo.bias + winInfo.standard_bias def _getDaylightBias(self, dt): winInfo = self.getWinInfo(dt.year) return winInfo.bias + winInfo.daylight_bias def utcoffset(self, dt): "Calculates the utcoffset according to the datetime.tzinfo spec" if dt is None: return winInfo = self.getWinInfo(dt.year) return -winInfo.bias + self.dst(dt) def dst(self, dt): "Calculates the daylight savings offset according to the datetime.tzinfo spec" if dt is None: return winInfo = self.getWinInfo(dt.year) if not self.fixedStandardTime and self._inDaylightSavings(dt): result = winInfo.daylight_bias else: result = winInfo.standard_bias return -result def _inDaylightSavings(self, dt): dt = dt.replace(tzinfo=None) winInfo = self.getWinInfo(dt.year) try: dstStart = self.GetDSTStartTime(dt.year) dstEnd = self.GetDSTEndTime(dt.year) # at the end of DST, when clocks are moved back, there's a period # of daylight_bias where it's ambiguous whether we're in DST or # not. dstEndAdj = dstEnd + winInfo.daylight_bias # the same thing could theoretically happen at the start of DST # if there's a standard_bias (which I suspect is always 0). dstStartAdj = dstStart + winInfo.standard_bias if dstStart < dstEnd: in_dst = dstStartAdj <= dt < dstEndAdj else: # in the southern hemisphere, daylight savings time # typically ends before it begins in a given year. in_dst = not (dstEndAdj < dt <= dstStartAdj) except ValueError: # there was an error parsing the time zone, which is normal when a # start and end time are not specified. in_dst = False return in_dst def GetDSTStartTime(self, year): "Given a year, determines the time when daylight savings time starts" return self.getWinInfo(year).locate_daylight_start(year) def GetDSTEndTime(self, year): "Given a year, determines the time when daylight savings ends." return self.getWinInfo(year).locate_standard_start(year) def __cmp__(self, other): return cmp(self.__dict__, other.__dict__) def __eq__(self, other): return self.__dict__==other.__dict__ def __ne__(self, other): return self.__dict__!=other.__dict__ @classmethod def local(class_): """Returns the local time zone as defined by the operating system in the registry. >>> localTZ = TimeZoneInfo.local() >>> now_local = datetime.datetime.now(localTZ) >>> now_UTC = datetime.datetime.utcnow() >>> (now_UTC - now_local) < datetime.timedelta(seconds = 5) Traceback (most recent call last): ... TypeError: can't subtract offset-naive and offset-aware datetimes >>> now_UTC = now_UTC.replace(tzinfo = TimeZoneInfo('GMT Standard Time', True)) Now one can compare the results of the two offset aware values >>> (now_UTC - now_local) < datetime.timedelta(seconds = 5) True """ code, info = TimeZoneDefinition.current() # code is 0 if daylight savings is disabled or not defined # code is 1 or 2 if daylight savings is enabled, 2 if currently active fix_standard_time = not code # note that although the given information is sufficient to construct a WinTZI object, it's # not sufficient to represent the time zone in which the current user is operating due # to dynamic time zones. return class_(info, fix_standard_time) @classmethod def utc(class_): """Returns a time-zone representing UTC. Same as TimeZoneInfo('GMT Standard Time', True) but caches the result for performance. >>> isinstance(TimeZoneInfo.utc(), TimeZoneInfo) True """ if not '_tzutc' in class_.__dict__: setattr(class_, '_tzutc', class_('GMT Standard Time', True)) return class_._tzutc # helper methods for accessing the timezone info from the registry @staticmethod def _get_time_zone_key(subkey=None): "Return the registry key that stores time zone details" key = _RegKeyDict.open(winreg.HKEY_LOCAL_MACHINE, TimeZoneInfo.tzRegKey) if subkey: key = key.subkey(subkey) return key @staticmethod def _get_time_zone_key_names(): "Returns the names of the (registry keys of the) time zones" return TimeZoneInfo._get_time_zone_key().subkeys() @staticmethod def _get_indexed_time_zone_keys(index_key='Index'): """ Get the names of the registry keys indexed by a value in that key. """ key_names = list(TimeZoneInfo._get_time_zone_key_names()) def get_index_value(key_name): key = TimeZoneInfo._get_time_zone_key(key_name) return key[index_key] values = map(get_index_value, key_names) return zip(values, key_names) @staticmethod def get_sorted_time_zone_names(): "Return a list of time zone names that can be used to initialize TimeZoneInfo instances" tzs = TimeZoneInfo.get_sorted_time_zones() get_standard_name = lambda tzi: tzi.standardName return [get_standard_name(tz) for tz in tzs] @staticmethod def get_all_time_zones(): return [TimeZoneInfo(n) for n in TimeZoneInfo._get_time_zone_key_names()] @staticmethod def get_sorted_time_zones(key=None): """ Return the time zones sorted by some key. key must be a function that takes a TimeZoneInfo object and returns a value suitable for sorting on. The key defaults to the bias (descending), as is done in Windows (see http://blogs.msdn.com/michkap/archive/2006/12/22/1350684.aspx) """ key = key or (lambda tzi: -tzi.staticInfo.bias) zones = TimeZoneInfo.get_all_time_zones() zones.sort(key=key) return zones class _RegKeyDict(dict): def __init__(self, key): dict.__init__(self) self.key = key self.__load_values() @classmethod def open(cls, *args, **kargs): return _RegKeyDict(winreg.OpenKeyEx(*args, **kargs)) def subkey(self, name): return _RegKeyDict(winreg.OpenKeyEx(self.key, name)) def __load_values(self): pairs = [(n, v) for (n, v, t) in self._enumerate_reg_values(self.key)] self.update(pairs) def subkeys(self): return self._enumerate_reg_keys(self.key) @staticmethod def _enumerate_reg_values(key): return _RegKeyDict._enumerate_reg(key, winreg.EnumValue) @staticmethod def _enumerate_reg_keys(key): return _RegKeyDict._enumerate_reg(key, winreg.EnumKey) @staticmethod def _enumerate_reg(key, func): "Enumerates an open registry key as an iterable generator" try: for index in count(): yield func(key, index) except WindowsError: pass # for backward compatibility def deprecated(func, name='Unknown'): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emmitted when the function is used.""" def newFunc(*args, **kwargs): warnings.warn("Call to deprecated function %s." % name, category=DeprecationWarning) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc GetTimeZoneNames = deprecated(TimeZoneInfo._get_time_zone_key_names, 'GetTimeZoneNames') GetIndexedTimeZoneNames = deprecated(TimeZoneInfo._get_indexed_time_zone_keys, 'GetIndexedTimeZoneNames') GetSortedTimeZoneNames = deprecated(TimeZoneInfo.get_sorted_time_zone_names, 'GetSortedTimeZoneNames') # end backward compatibility def utcnow(): """ Return the UTC time now with timezone awareness as enabled by this module >>> now = utcnow() """ now = datetime.datetime.utcnow() now = now.replace(tzinfo=TimeZoneInfo.utc()) return now def now(): """ Return the local time now with timezone awareness as enabled by this module >>> now_local = now() """ return datetime.datetime.now(TimeZoneInfo.local()) def GetTZCapabilities(): """Run a few known tests to determine the capabilities of the time zone database on this machine. Note Dynamic Time Zone support is not available on any platform at this time; this is a limitation of this library, not the platform.""" tzi = TimeZoneInfo('Mountain Standard Time') MissingTZPatch = datetime.datetime(2007,11,2,tzinfo=tzi).utctimetuple() != (2007,11,2,6,0,0,4,306,0) DynamicTZSupport = not MissingTZPatch and datetime.datetime(2003,11,2,tzinfo=tzi).utctimetuple() == (2003,11,2,7,0,0,6,306,0) del tzi return vars() class DLLHandleCache(object): def __init__(self): self.__cache = {} def __getitem__(self, filename): key = filename.lower() return self.__cache.setdefault(key, win32api.LoadLibrary(key)) DLLCache = DLLHandleCache() def resolveMUITimeZone(spec): """Resolve a multilingual user interface resource for the time zone name >>> #some pre-amble for the doc-tests to be py2k and py3k aware) >>> try: unicode and None ... except NameError: unicode=str ... >>> import sys >>> result = resolveMUITimeZone('@tzres.dll,-110') >>> expectedResultType = [type(None),unicode][sys.getwindowsversion() >= (6,)] >>> type(result) is expectedResultType True spec should be of the format @path,-stringID[;comment] see http://msdn2.microsoft.com/en-us/library/ms725481.aspx for details """ pattern = re.compile('@(?P<dllname>.*),-(?P<index>\d+)(?:;(?P<comment>.*))?') matcher = pattern.match(spec) assert matcher, 'Could not parse MUI spec' try: handle = DLLCache[matcher.groupdict()['dllname']] result = win32api.LoadString(handle, int(matcher.groupdict()['index'])) except win32api.error: result = None return result # from jaraco.util.dictlib 5.3.1 class RangeMap(dict): """ A dictionary-like object that uses the keys as bounds for a range. Inclusion of the value for that range is determined by the key_match_comparator, which defaults to less-than-or-equal. A value is returned for a key if it is the first key that matches in the sorted list of keys. One may supply keyword parameters to be passed to the sort function used to sort keys (i.e. cmp [python 2 only], keys, reverse) as sort_params. Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b' >>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy >>> r[1], r[2], r[3], r[4], r[5], r[6] ('a', 'a', 'a', 'b', 'b', 'b') Even float values should work so long as the comparison operator supports it. >>> r[4.5] 'b' But you'll notice that the way rangemap is defined, it must be open-ended on one side. >>> r[0] 'a' >>> r[-1] 'a' One can close the open-end of the RangeMap by using undefined_value >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'}) >>> r[0] Traceback (most recent call last): ... KeyError: 0 One can get the first or last elements in the range by using RangeMap.Item >>> last_item = RangeMap.Item(-1) >>> r[last_item] 'b' .last_item is a shortcut for Item(-1) >>> r[RangeMap.last_item] 'b' Sometimes it's useful to find the bounds for a RangeMap >>> r.bounds() (0, 6) RangeMap supports .get(key, default) >>> r.get(0, 'not found') 'not found' >>> r.get(7, 'not found') 'not found' """ def __init__(self, source, sort_params = {}, key_match_comparator = operator.le): dict.__init__(self, source) self.sort_params = sort_params self.match = key_match_comparator def __getitem__(self, item): sorted_keys = sorted(list(self.keys()), **self.sort_params) if isinstance(item, RangeMap.Item): result = self.__getitem__(sorted_keys[item]) else: key = self._find_first_match_(sorted_keys, item) result = dict.__getitem__(self, key) if result is RangeMap.undefined_value: raise KeyError(key) return result def get(self, key, default=None): """ Return the value for key if key is in the dictionary, else default. If default is not given, it defaults to None, so that this method never raises a KeyError. """ try: return self[key] except KeyError: return default def _find_first_match_(self, keys, item): is_match = lambda k: self.match(item, k) matches = list(filter(is_match, keys)) if matches: return matches[0] raise KeyError(item) def bounds(self): sorted_keys = sorted(list(self.keys()), **self.sort_params) return ( sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item], ) # some special values for the RangeMap undefined_value = type(str('RangeValueUndefined'), (object,), {})() class Item(int): pass first_item = Item(0) last_item = Item(-1)
apache-2.0
wummel/linkchecker
linkcheck/logger/graph.py
9
3382
# -*- coding: iso-8859-1 -*- # Copyright (C) 2000-2014 Bastian Kleineidam # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ Base class for graph loggers. """ from . import _Logger from ..decorators import notimplemented import re class _GraphLogger (_Logger): """Provide base method to get node data.""" def __init__ (self, **kwargs): """Initialize graph node list and internal id counter.""" args = self.get_args(kwargs) super(_GraphLogger, self).__init__(**args) self.init_fileoutput(args) self.nodes = {} self.nodeid = 0 def log_filter_url(self, url_data, do_print): """Update accounting data and log all valid URLs regardless the do_print flag. """ self.stats.log_url(url_data, do_print) # ignore the do_print flag and determine ourselves if we filter the url if url_data.valid: self.log_url(url_data) def get_node (self, url_data): """Return new node data or None if node already exists.""" if not url_data.url: return None elif url_data.url in self.nodes: return None node = { "url": url_data.url, "parent_url": url_data.parent_url, "id": self.nodeid, "label": quote(url_data.title if url_data.title else url_data.name), "extern": 1 if url_data.extern else 0, "checktime": url_data.checktime, "size": url_data.size, "dltime": url_data.dltime, "edge": quote(url_data.name), "valid": 1 if url_data.valid else 0, } self.nodes[node["url"]] = node self.nodeid += 1 return node def write_edges (self): """ Write all edges we can find in the graph in a brute-force manner. """ for node in self.nodes.values(): if node["parent_url"] in self.nodes: self.write_edge(node) self.flush() @notimplemented def write_edge (self, node): """Write edge data for one node and its parent.""" pass @notimplemented def end_graph (self): """Write end-of-graph marker.""" pass def end_output (self, **kwargs): """Write edges and end of checking info as gml comment.""" self.write_edges() self.end_graph() if self.has_part("outro"): self.write_outro() self.close_fileoutput() _disallowed = re.compile(r"[^a-zA-Z0-9 '#(){}\-\[\]\.,;:\!\?]+") def quote (s): """Replace disallowed characters in node or edge labels. Also remove whitespace from beginning or end of label.""" return _disallowed.sub(" ", s).strip()
gpl-2.0
Celedhrim/persomov
libs/dateutil/zoneinfo/__init__.py
144
2773
# -*- coding: utf-8 -*- """ Copyright (c) 2003-2005 Gustavo Niemeyer <[email protected]> This module offers extensions to the standard Python datetime module. """ from dateutil.tz import tzfile from tarfile import TarFile import os __author__ = "Tomi Pieviläinen <[email protected]>" __license__ = "Simplified BSD" __all__ = ["setcachesize", "gettz", "rebuild"] CACHE = [] CACHESIZE = 10 class tzfile(tzfile): def __reduce__(self): return (gettz, (self._filename,)) def getzoneinfofile(): filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__)))) filenames.reverse() for entry in filenames: if entry.startswith("zoneinfo") and ".tar." in entry: return os.path.join(os.path.dirname(__file__), entry) return None ZONEINFOFILE = getzoneinfofile() del getzoneinfofile def setcachesize(size): global CACHESIZE, CACHE CACHESIZE = size del CACHE[size:] def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo def rebuild(filename, tag=None, format="gz"): import tempfile, shutil tmpdir = tempfile.mkdtemp() zonedir = os.path.join(tmpdir, "zoneinfo") moduledir = os.path.dirname(__file__) if tag: tag = "-"+tag targetname = "zoneinfo%s.tar.%s" % (tag, format) try: tf = TarFile.open(filename) # The "backwards" zone file contains links to other files, so must be # processed as last for name in sorted(tf.getnames(), key=lambda k: k != "backward" and k or "z"): if not (name.endswith(".sh") or name.endswith(".tab") or name == "leapseconds"): tf.extract(name, tmpdir) filepath = os.path.join(tmpdir, name) os.system("zic -d %s %s" % (zonedir, filepath)) tf.close() target = os.path.join(moduledir, targetname) for entry in os.listdir(moduledir): if entry.startswith("zoneinfo") and ".tar." in entry: os.unlink(os.path.join(moduledir, entry)) tf = TarFile.open(target, "w:%s" % format) for entry in os.listdir(zonedir): entrypath = os.path.join(zonedir, entry) tf.add(entrypath, entry) tf.close() finally: shutil.rmtree(tmpdir)
gpl-3.0
mrphs/TorCtl
PathSupport.py
1
81359
#!/usr/bin/python # Copyright 2007-2010 Mike Perry. See LICENSE file. """ Support classes for path construction The PathSupport package builds on top of TorCtl.TorCtl. It provides a number of interfaces that make path construction easier. The inheritance diagram for event handling is as follows: TorCtl.EventHandler <- TorCtl.ConsensusTracker <- PathBuilder <- CircuitHandler <- StreamHandler. Basically, EventHandler is what gets all the control port events packaged in nice clean classes (see help(TorCtl) for information on those). ConsensusTracker tracks the NEWCONSENSUS and NEWDESC events to maintain a view of the network that is consistent with the Tor client's current consensus. PathBuilder inherits from ConsensusTracker and is what builds all circuits based on the requirements specified in the SelectionManager instance passed to its constructor. It also handles attaching streams to circuits. It only handles one building one circuit at a time. CircuitHandler optionally inherits from PathBuilder, and overrides its circuit event handling to manage building a pool of circuits as opposed to just one. It still uses the SelectionManager for path selection. StreamHandler inherits from CircuitHandler, and is what governs the attachment of an incoming stream on to one of the multiple circuits of the circuit handler. The SelectionManager is essentially a configuration wrapper around the most elegant portions of TorFlow: NodeGenerators, NodeRestrictions, and PathRestrictions. It extends from a BaseSelectionManager that provides a basic example of using these mechanisms for custom implementations. In the SelectionManager, a NodeGenerator is used to choose the nodes probabilistically according to some distribution while obeying the NodeRestrictions. These generators (one per hop) are handed off to the PathSelector, which uses the generators to build a complete path that satisfies the PathRestriction requirements. Have a look at the class hierarchy directly below to get a feel for how the restrictions fit together, and what options are available. """ import TorCtl import re import struct import random import socket import copy import Queue import time import TorUtil import traceback import threading from TorUtil import * import sys if sys.version_info < (2, 5): from sets import Set as set __all__ = ["NodeRestrictionList", "PathRestrictionList", "PercentileRestriction", "OSRestriction", "ConserveExitsRestriction", "FlagsRestriction", "MinBWRestriction", "VersionIncludeRestriction", "VersionExcludeRestriction", "VersionRangeRestriction", "ExitPolicyRestriction", "NodeRestriction", "PathRestriction", "OrNodeRestriction", "MetaNodeRestriction", "AtLeastNNodeRestriction", "NotNodeRestriction", "Subnet16Restriction", "UniqueRestriction", "NodeGenerator", "UniformGenerator", "OrderedExitGenerator", "BwWeightedGenerator", "PathSelector", "Connection", "NickRestriction", "IdHexRestriction", "PathBuilder", "CircuitHandler", "StreamHandler", "SelectionManager", "BaseSelectionManager", "CountryCodeRestriction", "CountryRestriction", "UniqueCountryRestriction", "SingleCountryRestriction", "ContinentRestriction", "ContinentJumperRestriction", "UniqueContinentRestriction", "MetaPathRestriction", "RateLimitedRestriction", "SmartSocket"] #################### Path Support Interfaces ##################### class RestrictionError(Exception): "Error raised for issues with applying restrictions" pass class NoNodesRemain(RestrictionError): "Error raised for issues with applying restrictions" pass class NodeRestriction: "Interface for node restriction policies" def r_is_ok(self, r): "Returns true if Router 'r' is acceptable for this restriction" return True class PathRestriction: "Interface for path restriction policies" def path_is_ok(self, path): "Return true if the list of Routers in path satisfies this restriction" return True # TODO: Or, Not, N of M class MetaPathRestriction(PathRestriction): "MetaPathRestrictions are path restriction aggregators." def add_restriction(self, rstr): raise NotImplemented() def del_restriction(self, RestrictionClass): raise NotImplemented() class PathRestrictionList(MetaPathRestriction): """Class to manage a list of PathRestrictions""" def __init__(self, restrictions): "Constructor. 'restrictions' is a list of PathRestriction instances" self.restrictions = restrictions def path_is_ok(self, path): "Given list if Routers in 'path', check it against each restriction." for rs in self.restrictions: if not rs.path_is_ok(path): return False return True def add_restriction(self, rstr): "Add a PathRestriction 'rstr' to the list" self.restrictions.append(rstr) def del_restriction(self, RestrictionClass): "Remove all PathRestrictions of type RestrictionClass from the list." self.restrictions = filter( lambda r: not isinstance(r, RestrictionClass), self.restrictions) def __str__(self): return self.__class__.__name__+"("+str(map(str, self.restrictions))+")" class NodeGenerator: "Interface for node generation" def __init__(self, sorted_r, rstr_list): """Constructor. Takes a bandwidth-sorted list of Routers 'sorted_r' and a NodeRestrictionList 'rstr_list'""" self.rstr_list = rstr_list self.rebuild(sorted_r) def reset_restriction(self, rstr_list): "Reset the restriction list to a new list" self.rstr_list = rstr_list self.rebuild() def rewind(self): "Rewind the generator to the 'beginning'" self.routers = copy.copy(self.rstr_routers) if not self.routers: plog("NOTICE", "No routers left after restrictions applied: "+str(self.rstr_list)) raise NoNodesRemain(str(self.rstr_list)) def rebuild(self, sorted_r=None): """ Extra step to be performed when new routers are added or when the restrictions change. """ if sorted_r != None: self.sorted_r = sorted_r self.rstr_routers = filter(lambda r: self.rstr_list.r_is_ok(r), self.sorted_r) if not self.rstr_routers: plog("NOTICE", "No routers left after restrictions applied: "+str(self.rstr_list)) raise NoNodesRemain(str(self.rstr_list)) def mark_chosen(self, r): """Mark a router as chosen: remove it from the list of routers that can be returned in the future""" self.routers.remove(r) def all_chosen(self): "Return true if all the routers have been marked as chosen" return not self.routers def generate(self): "Return a python generator that yields routers according to the policy" raise NotImplemented() class Connection(TorCtl.Connection): """Extended Connection class that provides a method for building circuits""" def __init__(self, sock): TorCtl.Connection.__init__(self,sock) def build_circuit(self, path): "Tell Tor to build a circuit chosen by the PathSelector 'path_sel'" circ = Circuit() circ.path = path circ.exit = circ.path[len(path)-1] circ.circ_id = self.extend_circuit(0, circ.id_path()) return circ ######################## Node Restrictions ######################## # TODO: We still need more path support implementations # - NodeRestrictions: # - Uptime/LongLivedPorts (Does/should hibernation count?) # - Published/Updated # - Add a /8 restriction for ExitPolicy? # - PathRestrictions: # - NodeFamily # - GeoIP: # - Mathematical/empirical study of predecessor expectation # - If middle node on the same continent as exit, exit learns nothing # - else, exit has a bias on the continent of origin of user # - Language and browser accept string determine this anyway # - ContinentRestrictor (avoids doing more than N continent crossings) # - EchelonPhobicRestrictor # - Does not cross international boundaries for client->Entry or # Exit->destination hops class PercentileRestriction(NodeRestriction): """Restriction to cut out a percentile slice of the network.""" def __init__(self, pct_skip, pct_fast, r_list): """Constructor. Sets up the restriction such that routers in the 'pct_skip' to 'pct_fast' percentile of bandwidth rankings are returned from the sorted list 'r_list'""" self.pct_fast = pct_fast self.pct_skip = pct_skip self.sorted_r = r_list def r_is_ok(self, r): "Returns true if r is in the percentile boundaries (by rank)" if r.list_rank < len(self.sorted_r)*self.pct_skip/100: return False elif r.list_rank > len(self.sorted_r)*self.pct_fast/100: return False return True def __str__(self): return self.__class__.__name__+"("+str(self.pct_skip)+","+str(self.pct_fast)+")" class RatioPercentileRestriction(NodeRestriction): """Restriction to cut out a percentile slice of the network by ratio of consensus bw to descriptor bw.""" def __init__(self, pct_skip, pct_fast, r_list): """Constructor. Sets up the restriction such that routers in the 'pct_skip' to 'pct_fast' percentile of bandwidth rankings are returned from the sorted list 'r_list'""" self.pct_fast = pct_fast self.pct_skip = pct_skip self.sorted_r = r_list def r_is_ok(self, r): "Returns true if r is in the percentile boundaries (by rank)" if r.ratio_rank < len(self.sorted_r)*self.pct_skip/100: return False elif r.ratio_rank > len(self.sorted_r)*self.pct_fast/100: return False return True def __str__(self): return self.__class__.__name__+"("+str(self.pct_skip)+","+str(self.pct_fast)+")" class UnmeasuredPercentileRestriction(NodeRestriction): """Restriction to only accept routers from a percentile range in the unmeasured set""" def __init__(self, pct_skip, pct_fast, r_list): """Constructor. Sets up the restriction such that routers in the 'pct_skip' to 'pct_fast' percentile of bandwidth rankings are returned from the sorted list 'r_list'""" self.pct_fast = pct_fast self.pct_skip = pct_skip self.sorted_r = filter(lambda r: r.unmeasured, r_list) plog("DEBUG", "UnmeasuredPercentileRestriction built with "+str(len(self.sorted_r))+" routers") def r_is_ok(self, r): "Returns true if r is in the unmeasured percentile boundaries" if not r.unmeasured: return False # XXX: Can throw an exception somehow??? catch ValueError here.. try: idx = self.sorted_r.index(r) except ValueError: return False if idx < len(self.sorted_r)*self.pct_skip/100: return False elif idx > len(self.sorted_r)*self.pct_fast/100: return False return True def __str__(self): return self.__class__.__name__+"("+str(self.pct_skip)+","+str(self.pct_fast)+")" class UptimeRestriction(NodeRestriction): """Restriction to filter out routers with uptimes < min_uptime or > max_uptime""" def __init__(self, min_uptime=None, max_uptime=None): self.min_uptime = min_uptime self.max_uptime = max_uptime def r_is_ok(self, r): "Returns true if r is in the uptime boundaries" if self.min_uptime and r.uptime < self.min_uptime: return False if self.max_uptime and r.uptime > self.max_uptime: return False return True class UnmeasuredRestriction(NodeRestriction): """Restriction to select only unmeasured=1 routers""" def r_is_ok(self, r): "Returns true if r is unmeasured" return r.unmeasured class RankRestriction(NodeRestriction): """Restriction to cut out a list-rank slice of the network.""" def __init__(self, rank_skip, rank_stop): self.rank_skip = rank_skip self.rank_stop = rank_stop def r_is_ok(self, r): "Returns true if r is in the boundaries (by rank)" if r.list_rank < self.rank_skip: return False elif r.list_rank > self.rank_stop: return False return True def __str__(self): return self.__class__.__name__+"("+str(self.rank_skip)+","+str(self.rank_stop)+")" class OSRestriction(NodeRestriction): "Restriction based on operating system" def __init__(self, ok, bad=[]): """Constructor. Accept router OSes that match regexes in 'ok', rejects those that match regexes in 'bad'.""" self.ok = ok self.bad = bad def r_is_ok(self, r): "Returns true if r is in 'ok', false if 'r' is in 'bad'. If 'ok'" for y in self.ok: if re.search(y, r.os): return True for b in self.bad: if re.search(b, r.os): return False if self.ok: return False if self.bad: return True def __str__(self): return self.__class__.__name__+"("+str(self.ok)+","+str(self.bad)+")" class ConserveExitsRestriction(NodeRestriction): "Restriction to reject exits from selection" def __init__(self, exit_ports=None): self.exit_ports = exit_ports def r_is_ok(self, r): if self.exit_ports: for port in self.exit_ports: if r.will_exit_to("255.255.255.255", port): return False return True return not "Exit" in r.flags def __str__(self): return self.__class__.__name__+"()" class ExitPortRestriction(NodeRestriction): "Restriction to select exits that can exit to a port list" def __init__(self, exit_ports=None): self.exit_ports = exit_ports def r_is_ok(self, r): for port in self.exit_ports: if not r.will_exit_to("255.255.255.255", port): return False return True def __str__(self): return self.__class__.__name__+"()" class FlagsRestriction(NodeRestriction): "Restriction for mandatory and forbidden router flags" def __init__(self, mandatory, forbidden=[]): """Constructor. 'mandatory' and 'forbidden' are both lists of router flags as strings.""" self.mandatory = mandatory self.forbidden = forbidden def r_is_ok(self, router): for m in self.mandatory: if not m in router.flags: return False for f in self.forbidden: if f in router.flags: return False return True def __str__(self): return self.__class__.__name__+"("+str(self.mandatory)+","+str(self.forbidden)+")" class NickRestriction(NodeRestriction): """Require that the node nickname is as specified""" def __init__(self, nickname): self.nickname = nickname def r_is_ok(self, router): return router.nickname == self.nickname def __str__(self): return self.__class__.__name__+"("+str(self.nickname)+")" class IdHexRestriction(NodeRestriction): """Require that the node idhash is as specified""" def __init__(self, idhex): if idhex[0] == '$': self.idhex = idhex[1:].upper() else: self.idhex = idhex.upper() def r_is_ok(self, router): return router.idhex == self.idhex def __str__(self): return self.__class__.__name__+"("+str(self.idhex)+")" class MinBWRestriction(NodeRestriction): """Require a minimum bandwidth""" def __init__(self, minbw): self.min_bw = minbw def r_is_ok(self, router): return router.bw >= self.min_bw def __str__(self): return self.__class__.__name__+"("+str(self.min_bw)+")" class RateLimitedRestriction(NodeRestriction): def __init__(self, limited=True): self.limited = limited def r_is_ok(self, router): return router.rate_limited == self.limited def __str__(self): return self.__class__.__name__+"("+str(self.limited)+")" class VersionIncludeRestriction(NodeRestriction): """Require that the version match one in the list""" def __init__(self, eq): "Constructor. 'eq' is a list of versions as strings" self.eq = map(TorCtl.RouterVersion, eq) def r_is_ok(self, router): """Returns true if the version of 'router' matches one of the specified versions.""" for e in self.eq: if e == router.version: return True return False def __str__(self): return self.__class__.__name__+"("+str(self.eq)+")" class VersionExcludeRestriction(NodeRestriction): """Require that the version not match one in the list""" def __init__(self, exclude): "Constructor. 'exclude' is a list of versions as strings" self.exclude = map(TorCtl.RouterVersion, exclude) def r_is_ok(self, router): """Returns false if the version of 'router' matches one of the specified versions.""" for e in self.exclude: if e == router.version: return False return True def __str__(self): return self.__class__.__name__+"("+str(map(str, self.exclude))+")" class VersionRangeRestriction(NodeRestriction): """Require that the versions be inside a specified range""" def __init__(self, gr_eq, less_eq=None): self.gr_eq = TorCtl.RouterVersion(gr_eq) if less_eq: self.less_eq = TorCtl.RouterVersion(less_eq) else: self.less_eq = None def r_is_ok(self, router): return (not self.gr_eq or router.version >= self.gr_eq) and \ (not self.less_eq or router.version <= self.less_eq) def __str__(self): return self.__class__.__name__+"("+str(self.gr_eq)+","+str(self.less_eq)+")" class ExitPolicyRestriction(NodeRestriction): """Require that a router exit to an ip+port""" def __init__(self, to_ip, to_port): self.to_ip = to_ip self.to_port = to_port def r_is_ok(self, r): return r.will_exit_to(self.to_ip, self.to_port) def __str__(self): return self.__class__.__name__+"("+str(self.to_ip)+","+str(self.to_port)+")" class MetaNodeRestriction(NodeRestriction): """Interface for a NodeRestriction that is an expression consisting of multiple other NodeRestrictions""" def add_restriction(self, rstr): raise NotImplemented() # TODO: these should collapse the restriction and return a new # instance for re-insertion (or None) def next_rstr(self): raise NotImplemented() def del_restriction(self, RestrictionClass): raise NotImplemented() class OrNodeRestriction(MetaNodeRestriction): """MetaNodeRestriction that is the boolean or of two or more NodeRestrictions""" def __init__(self, rs): "Constructor. 'rs' is a list of NodeRestrictions" self.rstrs = rs def r_is_ok(self, r): "Returns true if one of 'rs' is true for this router" for rs in self.rstrs: if rs.r_is_ok(r): return True return False def __str__(self): return self.__class__.__name__+"("+str(map(str, self.rstrs))+")" class NotNodeRestriction(MetaNodeRestriction): """Negates a single restriction""" def __init__(self, a): self.a = a def r_is_ok(self, r): return not self.a.r_is_ok(r) def __str__(self): return self.__class__.__name__+"("+str(self.a)+")" class AtLeastNNodeRestriction(MetaNodeRestriction): """MetaNodeRestriction that is true if at least n member restrictions are true.""" def __init__(self, rstrs, n): self.rstrs = rstrs self.n = n def r_is_ok(self, r): cnt = 0 for rs in self.rstrs: if rs.r_is_ok(r): cnt += 1 if cnt < self.n: return False else: return True def __str__(self): return self.__class__.__name__+"("+str(map(str, self.rstrs))+","+str(self.n)+")" class NodeRestrictionList(MetaNodeRestriction): "Class to manage a list of NodeRestrictions" def __init__(self, restrictions): "Constructor. 'restrictions' is a list of NodeRestriction instances" self.restrictions = restrictions def r_is_ok(self, r): "Returns true of Router 'r' passes all of the contained restrictions" for rs in self.restrictions: if not rs.r_is_ok(r): return False return True def add_restriction(self, restr): "Add a NodeRestriction 'restr' to the list of restrictions" self.restrictions.append(restr) # TODO: This does not collapse meta restrictions.. def del_restriction(self, RestrictionClass): """Remove all restrictions of type RestrictionClass from the list. Does NOT inspect or collapse MetaNode Restrictions (though MetaRestrictions can be removed if RestrictionClass is MetaNodeRestriction)""" self.restrictions = filter( lambda r: not isinstance(r, RestrictionClass), self.restrictions) def clear(self): """ Remove all restrictions """ self.restrictions = [] def __str__(self): return self.__class__.__name__+"("+str(map(str, self.restrictions))+")" #################### Path Restrictions ##################### class Subnet16Restriction(PathRestriction): """PathRestriction that mandates that no two nodes from the same /16 subnet be in the path""" def path_is_ok(self, path): mask16 = struct.unpack(">I", socket.inet_aton("255.255.0.0"))[0] ip16 = path[0].ip & mask16 for r in path[1:]: if ip16 == (r.ip & mask16): return False return True def __str__(self): return self.__class__.__name__+"()" class UniqueRestriction(PathRestriction): """Path restriction that mandates that the same router can't appear more than once in a path""" def path_is_ok(self, path): for i in xrange(0,len(path)): if path[i] in path[:i]: return False return True def __str__(self): return self.__class__.__name__+"()" #################### GeoIP Restrictions ################### class CountryCodeRestriction(NodeRestriction): """ Ensure that the country_code is set """ def r_is_ok(self, r): return r.country_code != None def __str__(self): return self.__class__.__name__+"()" class CountryRestriction(NodeRestriction): """ Only accept nodes that are in 'country_code' """ def __init__(self, country_code): self.country_code = country_code def r_is_ok(self, r): return r.country_code == self.country_code def __str__(self): return self.__class__.__name__+"("+str(self.country_code)+")" class ExcludeCountriesRestriction(NodeRestriction): """ Exclude a list of countries """ def __init__(self, countries): self.countries = countries def r_is_ok(self, r): return not (r.country_code in self.countries) def __str__(self): return self.__class__.__name__+"("+str(self.countries)+")" class UniqueCountryRestriction(PathRestriction): """ Ensure every router to have a distinct country_code """ def path_is_ok(self, path): for i in xrange(0, len(path)-1): for j in xrange(i+1, len(path)): if path[i].country_code == path[j].country_code: return False; return True; def __str__(self): return self.__class__.__name__+"()" class SingleCountryRestriction(PathRestriction): """ Ensure every router to have the same country_code """ def path_is_ok(self, path): country_code = path[0].country_code for r in path: if country_code != r.country_code: return False return True def __str__(self): return self.__class__.__name__+"()" class ContinentRestriction(PathRestriction): """ Do not more than n continent crossings """ # TODO: Add src and dest def __init__(self, n, src=None, dest=None): self.n = n def path_is_ok(self, path): crossings = 0 prev = None # Compute crossings until now for r in path: # Jump over the first router if prev: if r.continent != prev.continent: crossings += 1 prev = r if crossings > self.n: return False else: return True def __str__(self): return self.__class__.__name__+"("+str(self.n)+")" class ContinentJumperRestriction(PathRestriction): """ Ensure continent crossings between all hops """ def path_is_ok(self, path): prev = None for r in path: # Jump over the first router if prev: if r.continent == prev.continent: return False prev = r return True def __str__(self): return self.__class__.__name__+"()" class UniqueContinentRestriction(PathRestriction): """ Ensure every hop to be on a different continent """ def path_is_ok(self, path): for i in xrange(0, len(path)-1): for j in xrange(i+1, len(path)): if path[i].continent == path[j].continent: return False; return True; def __str__(self): return self.__class__.__name__+"()" class OceanPhobicRestriction(PathRestriction): """ Not more than n ocean crossings """ # TODO: Add src and dest def __init__(self, n, src=None, dest=None): self.n = n def path_is_ok(self, path): crossings = 0 prev = None # Compute ocean crossings until now for r in path: # Jump over the first router if prev: if r.cont_group != prev.cont_group: crossings += 1 prev = r if crossings > self.n: return False else: return True def __str__(self): return self.__class__.__name__+"("+str(self.n)+")" #################### Node Generators ###################### class UniformGenerator(NodeGenerator): """NodeGenerator that produces nodes in the uniform distribution""" def generate(self): # XXX: hrmm.. this is not really the right thing to check while not self.all_chosen(): yield random.choice(self.routers) class ExactUniformGenerator(NodeGenerator): """NodeGenerator that produces nodes randomly, yet strictly uniformly over time""" def __init__(self, sorted_r, rstr_list, position=0): self.position = position NodeGenerator.__init__(self, sorted_r, rstr_list) def generate(self): min_gen = min(map(lambda r: r._generated[self.position], self.routers)) choices = filter(lambda r: r._generated[self.position]==min_gen, self.routers) while choices: r = random.choice(choices) yield r choices.remove(r) choices = filter(lambda r: r._generated[self.position]==min_gen, self.routers) plog("NOTICE", "Ran out of choices in ExactUniformGenerator. Incrementing nodes") for r in choices: r._generated[self.position] += 1 def mark_chosen(self, r): r._generated[self.position] += 1 NodeGenerator.mark_chosen(self, r) def rebuild(self, sorted_r=None): plog("DEBUG", "Rebuilding ExactUniformGenerator") NodeGenerator.rebuild(self, sorted_r) for r in self.rstr_routers: lgen = len(r._generated) if lgen < self.position+1: for i in xrange(lgen, self.position+1): r._generated.append(0) plog("DEBUG", "Rebuilt ExactUniformGenerator") class OrderedExitGenerator(NodeGenerator): """NodeGenerator that produces exits in an ordered fashion for a specific port""" def __init__(self, to_port, sorted_r, rstr_list): self.to_port = to_port self.next_exit_by_port = {} NodeGenerator.__init__(self, sorted_r, rstr_list) def rewind(self): NodeGenerator.rewind(self) if self.to_port not in self.next_exit_by_port or not self.next_exit_by_port[self.to_port]: self.next_exit_by_port[self.to_port] = 0 self.last_idx = len(self.routers) else: self.last_idx = self.next_exit_by_port[self.to_port] def set_port(self, port): self.to_port = port self.rewind() def mark_chosen(self, r): self.next_exit_by_port[self.to_port] += 1 def all_chosen(self): return self.last_idx == self.next_exit_by_port[self.to_port] def generate(self): while True: # A do..while would be real nice here.. if self.next_exit_by_port[self.to_port] >= len(self.routers): self.next_exit_by_port[self.to_port] = 0 yield self.routers[self.next_exit_by_port[self.to_port]] self.next_exit_by_port[self.to_port] += 1 if self.last_idx == self.next_exit_by_port[self.to_port]: break class BwWeightedGenerator(NodeGenerator): """ This is a generator designed to match the Tor Path Selection algorithm. It will generate nodes weighted by their bandwidth, but take the appropriate weighting into account against guard nodes and exit nodes when they are chosen for positions other than guard/exit. For background see: routerlist.c::smartlist_choose_by_bandwidth(), http://archives.seul.org/or/dev/Jul-2007/msg00021.html, http://archives.seul.org/or/dev/Jul-2007/msg00056.html, and https://tor-svn.freehaven.net/svn/tor/trunk/doc/spec/path-spec.txt The formulas used are from the first or-dev link, but are proven optimal and equivalent to the ones now used in routerlist.c in the second or-dev link. """ def __init__(self, sorted_r, rstr_list, pathlen, exit=False, guard=False): """ Pass exit=True to create a generator for exit-nodes """ self.max_bandwidth = 10000000 # Out for an exit-node? self.exit = exit # Is this a guard node? self.guard = guard # Different sums of bandwidths self.total_bw = 0 self.total_exit_bw = 0 self.total_guard_bw = 0 self.total_weighted_bw = 0 self.pathlen = pathlen NodeGenerator.__init__(self, sorted_r, rstr_list) def rebuild(self, sorted_r=None): NodeGenerator.rebuild(self, sorted_r) NodeGenerator.rewind(self) # Set the exit_weight # We are choosing a non-exit self.total_exit_bw = 0 self.total_guard_bw = 0 self.total_bw = 0 for r in self.routers: # TODO: Check max_bandwidth and cap... self.total_bw += r.bw if "Exit" in r.flags: self.total_exit_bw += r.bw if "Guard" in r.flags: self.total_guard_bw += r.bw bw_per_hop = (1.0*self.total_bw)/self.pathlen # Print some debugging info about bandwidth ratios if self.total_bw > 0: e_ratio = self.total_exit_bw/float(self.total_bw) g_ratio = self.total_guard_bw/float(self.total_bw) else: g_ratio = 0 e_ratio = 0 plog("DEBUG", "E = " + str(self.total_exit_bw) + ", G = " + str(self.total_guard_bw) + ", T = " + str(self.total_bw) + ", g_ratio = " + str(g_ratio) + ", e_ratio = " +str(e_ratio) + ", bw_per_hop = " + str(bw_per_hop)) if self.exit: self.exit_weight = 1.0 else: if self.total_exit_bw < bw_per_hop: # Don't use exit nodes at all self.exit_weight = 0 else: if self.total_exit_bw > 0: self.exit_weight = ((self.total_exit_bw-bw_per_hop)/self.total_exit_bw) else: self.exit_weight = 0 if self.guard: self.guard_weight = 1.0 else: if self.total_guard_bw < bw_per_hop: # Don't use exit nodes at all self.guard_weight = 0 else: if self.total_guard_bw > 0: self.guard_weight = ((self.total_guard_bw-bw_per_hop)/self.total_guard_bw) else: self.guard_weight = 0 for r in self.routers: bw = r.bw if "Exit" in r.flags: bw *= self.exit_weight if "Guard" in r.flags: bw *= self.guard_weight self.total_weighted_bw += bw self.total_weighted_bw = int(self.total_weighted_bw) plog("DEBUG", "Bw: "+str(self.total_weighted_bw)+"/"+str(self.total_bw) +". The exit-weight is: "+str(self.exit_weight) + ", guard weight is: "+str(self.guard_weight)) def generate(self): while True: # Choose a suitable random int i = random.randint(0, self.total_weighted_bw) # Go through the routers for r in self.routers: # Below zero here means next() -> choose a new random int+router if i < 0: break bw = r.bw if "Exit" in r.flags: bw *= self.exit_weight if "Guard" in r.flags: bw *= self.guard_weight i -= bw if i < 0: plog("DEBUG", "Chosen router with a bandwidth of: " + str(r.bw)) yield r ####################### Secret Sauce ########################### class PathError(Exception): pass class NoRouters(PathError): pass class PathSelector: """Implementation of path selection policies. Builds a path according to entry, middle, and exit generators that satisfies the path restrictions.""" def __init__(self, entry_gen, mid_gen, exit_gen, path_restrict): """Constructor. The first three arguments are NodeGenerators with their appropriate restrictions. The 'path_restrict' is a PathRestrictionList""" self.entry_gen = entry_gen self.mid_gen = mid_gen self.exit_gen = exit_gen self.path_restrict = path_restrict def rebuild_gens(self, sorted_r): "Rebuild the 3 generators with a new sorted router list" self.entry_gen.rebuild(sorted_r) self.mid_gen.rebuild(sorted_r) self.exit_gen.rebuild(sorted_r) def select_path(self, pathlen): """Creates a path of 'pathlen' hops, and returns it as a list of Router instances""" self.entry_gen.rewind() self.mid_gen.rewind() self.exit_gen.rewind() entry = self.entry_gen.generate() mid = self.mid_gen.generate() ext = self.exit_gen.generate() plog("DEBUG", "Selecting path..") while True: path = [] plog("DEBUG", "Building path..") try: if pathlen == 1: path = [ext.next()] else: path.append(entry.next()) for i in xrange(1, pathlen-1): path.append(mid.next()) path.append(ext.next()) if self.path_restrict.path_is_ok(path): self.entry_gen.mark_chosen(path[0]) for i in xrange(1, pathlen-1): self.mid_gen.mark_chosen(path[i]) self.exit_gen.mark_chosen(path[pathlen-1]) plog("DEBUG", "Marked path.") break else: plog("DEBUG", "Path rejected by path restrictions.") except StopIteration: plog("NOTICE", "Ran out of routers during buildpath.."); self.entry_gen.rewind() self.mid_gen.rewind() self.exit_gen.rewind() entry = self.entry_gen.generate() mid = self.mid_gen.generate() ext = self.exit_gen.generate() for r in path: r.refcount += 1 plog("DEBUG", "Circ refcount "+str(r.refcount)+" for "+r.idhex) return path # TODO: Implement example manager. class BaseSelectionManager: """ The BaseSelectionManager is a minimalistic node selection manager. It is meant to be used with a PathSelector that consists of an entry NodeGenerator, a middle NodeGenerator, and an exit NodeGenerator. However, none of these are absolutely necessary. It is possible to completely avoid them if you wish by hacking whatever selection mechanisms you want straight into this interface and then passing an instance to a PathBuilder implementation. """ def __init__(self): self.bad_restrictions = False self.consensus = None def reconfigure(self, consensus=None): """ This method is called whenever a significant configuration change occurs. Currently, this only happens via PathBuilder.__init__ and PathBuilder.schedule_selmgr(). This method should NOT throw any exceptions. """ pass def new_consensus(self, consensus): """ This method is called whenever a consensus change occurs. This method should NOT throw any exceptions. """ pass def set_exit(self, exit_name): """ This method provides notification that a fixed exit is desired. This method should NOT throw any exceptions. """ pass def set_target(self, host, port): """ This method provides notification that a new target endpoint is desired. May throw a RestrictionError if target is impossible to reach. """ pass def select_path(self): """ Returns a new path in the form of a list() of Router instances. May throw a RestrictionError. """ pass class SelectionManager(BaseSelectionManager): """Helper class to handle configuration updates The methods are NOT threadsafe. They may ONLY be called from EventHandler's thread. This means that to update the selection manager, you must schedule a config update job using PathBuilder.schedule_selmgr() with a worker function to modify this object. XXX: Warning. The constructor of this class is subject to change and may undergo reorganization in the near future. Watch for falling bits. """ # XXX: Hrmm, consider simplifying this. It is confusing and unweildy. def __init__(self, pathlen, order_exits, percent_fast, percent_skip, min_bw, use_all_exits, uniform, use_exit, use_guards,geoip_config=None, restrict_guards=False, extra_node_rstr=None, exit_ports=None, order_by_ratio=False, min_exits=0, only_unmeasured=False): BaseSelectionManager.__init__(self) self.__ordered_exit_gen = None self.pathlen = pathlen self.order_exits = order_exits self.percent_fast = percent_fast self.percent_skip = percent_skip self.min_bw = min_bw self.use_all_exits = use_all_exits self.uniform = uniform self.exit_id = use_exit self.use_guards = use_guards self.geoip_config = geoip_config self.restrict_guards_only = restrict_guards self.bad_restrictions = False self.consensus = None self.exit_ports = exit_ports self.extra_node_rstr=extra_node_rstr self.order_by_ratio = order_by_ratio self.min_exits = min_exits self.added_exits = [] self.only_unmeasured = only_unmeasured def reconfigure(self, consensus=None): try: self._reconfigure(consensus) self.bad_restrictions = False except NoNodesRemain: plog("WARN", "No nodes remain in selection manager") self.bad_restrictions = True return self.bad_restrictions def _reconfigure(self, consensus=None): """This function is called after a configuration change, to rebuild the RestrictionLists.""" if consensus: plog("DEBUG", "Reconfigure with consensus") self.consensus = consensus else: plog("DEBUG", "Reconfigure without consensus") sorted_r = self.consensus.sorted_r if self.use_all_exits: self.path_rstr = PathRestrictionList([UniqueRestriction()]) else: self.path_rstr = PathRestrictionList( [Subnet16Restriction(), UniqueRestriction()]) if self.use_guards: entry_flags = ["Guard", "Running"] else: entry_flags = ["Running"] if self.restrict_guards_only: nonentry_skip = 0 nonentry_fast = 100 else: nonentry_skip = self.percent_skip nonentry_fast = self.percent_fast if self.only_unmeasured: PctRstr = UnmeasuredPercentileRestriction elif self.order_by_ratio: PctRstr = RatioPercentileRestriction else: PctRstr = PercentileRestriction # XXX: sometimes we want the ability to do uniform scans # without the conserve exit restrictions.. entry_rstr = NodeRestrictionList( [PctRstr(self.percent_skip, self.percent_fast, sorted_r), OrNodeRestriction( [FlagsRestriction(["BadExit"]), ConserveExitsRestriction(self.exit_ports)]), FlagsRestriction(entry_flags, [])] ) mid_rstr = NodeRestrictionList( [PctRstr(nonentry_skip, nonentry_fast, sorted_r), OrNodeRestriction( [FlagsRestriction(["BadExit"]), ConserveExitsRestriction(self.exit_ports)]), FlagsRestriction(["Running"], [])] ) if self.exit_id: self._set_exit(self.exit_id) plog("DEBUG", "Applying Setexit: "+self.exit_id) self.exit_rstr = NodeRestrictionList([IdHexRestriction(self.exit_id)]) elif self.use_all_exits: self.exit_rstr = NodeRestrictionList( [FlagsRestriction(["Running"], ["BadExit"])]) else: self.exit_rstr = NodeRestrictionList( [PctRstr(nonentry_skip, nonentry_fast, sorted_r), FlagsRestriction(["Running"], ["BadExit"])]) if self.extra_node_rstr: entry_rstr.add_restriction(self.extra_node_rstr) mid_rstr.add_restriction(self.extra_node_rstr) self.exit_rstr.add_restriction(self.extra_node_rstr) # This is a hack just for the bw auths to avoid slices with no exits if self.min_exits and self.exit_ports: test_rstr = NodeRestrictionList( [PctRstr(nonentry_skip, nonentry_fast, sorted_r), ExitPortRestriction(self.exit_ports), FlagsRestriction(["Running"], ["BadExit"])]) exit_count = len(filter(lambda r: test_rstr.r_is_ok(r), sorted_r)) if exit_count < self.min_exits: self.added_exits = self.find_emergency_exits(sorted_r, self.min_exits-exit_count) plog("NOTICE", "Only "+str(exit_count)+" exits remain in slice "+str(nonentry_skip)+"-"+str(nonentry_fast)+" after restrictions. Adding in "+str(self.added_exits)) idhex_list = map(IdHexRestriction, self.added_exits) idhex_list.append(self.exit_rstr) self.exit_rstr = NodeRestrictionList([OrNodeRestriction(idhex_list)]) # GeoIP configuration if self.geoip_config: # Every node needs country_code entry_rstr.add_restriction(CountryCodeRestriction()) mid_rstr.add_restriction(CountryCodeRestriction()) self.exit_rstr.add_restriction(CountryCodeRestriction()) # Specified countries for different positions if self.geoip_config.entry_country: entry_rstr.add_restriction(CountryRestriction(self.geoip_config.entry_country)) if self.geoip_config.middle_country: mid_rstr.add_restriction(CountryRestriction(self.geoip_config.middle_country)) if self.geoip_config.exit_country: self.exit_rstr.add_restriction(CountryRestriction(self.geoip_config.exit_country)) # Excluded countries if self.geoip_config.excludes: plog("INFO", "Excluded countries: " + str(self.geoip_config.excludes)) if len(self.geoip_config.excludes) > 0: entry_rstr.add_restriction(ExcludeCountriesRestriction(self.geoip_config.excludes)) mid_rstr.add_restriction(ExcludeCountriesRestriction(self.geoip_config.excludes)) self.exit_rstr.add_restriction(ExcludeCountriesRestriction(self.geoip_config.excludes)) # Unique countries set? None --> pass if self.geoip_config.unique_countries != None: if self.geoip_config.unique_countries: # If True: unique countries self.path_rstr.add_restriction(UniqueCountryRestriction()) else: # False: use the same country for all nodes in a path self.path_rstr.add_restriction(SingleCountryRestriction()) # Specify max number of continent crossings, None means UniqueContinents if self.geoip_config.continent_crossings == None: self.path_rstr.add_restriction(UniqueContinentRestriction()) else: self.path_rstr.add_restriction(ContinentRestriction(self.geoip_config.continent_crossings)) # Should even work in combination with continent crossings if self.geoip_config.ocean_crossings != None: self.path_rstr.add_restriction(OceanPhobicRestriction(self.geoip_config.ocean_crossings)) # This is kind of hokey.. if self.order_exits: if self.__ordered_exit_gen: exitgen = self.__ordered_exit_gen exitgen.reset_restriction(self.exit_rstr) else: exitgen = self.__ordered_exit_gen = \ OrderedExitGenerator(80, sorted_r, self.exit_rstr) elif self.uniform: exitgen = ExactUniformGenerator(sorted_r, self.exit_rstr) else: exitgen = BwWeightedGenerator(sorted_r, self.exit_rstr, self.pathlen, exit=True) if self.uniform: self.path_selector = PathSelector( ExactUniformGenerator(sorted_r, entry_rstr), ExactUniformGenerator(sorted_r, mid_rstr), exitgen, self.path_rstr) else: # Remove ConserveExitsRestriction for entry and middle positions # by removing the OrNodeRestriction that contains it... # FIXME: This is a landmine for a poor soul to hit. # Then again, most of the rest of this function is, too. entry_rstr.del_restriction(OrNodeRestriction) mid_rstr.del_restriction(OrNodeRestriction) self.path_selector = PathSelector( BwWeightedGenerator(sorted_r, entry_rstr, self.pathlen, guard=self.use_guards), BwWeightedGenerator(sorted_r, mid_rstr, self.pathlen), exitgen, self.path_rstr) return # Picks num of the top_n fastest exits that can handle our exit ports. def find_emergency_exits(self, sorted_r, num, top_n=100): new_exits = [] test_rstr = NodeRestrictionList( [ExitPortRestriction(self.exit_ports), FlagsRestriction(["Running"], ["BadExit"])]) for r in sorted_r: if test_rstr.r_is_ok(r): new_exits.append(r.idhex) if len(new_exits) >= top_n: break return random.sample(new_exits, min(len(new_exits), num)) def _set_exit(self, exit_name): # sets an exit, if bad, sets bad_exit exit_id = None if exit_name: if exit_name[0] == '$': exit_id = exit_name elif exit_name in self.consensus.name_to_key: exit_id = self.consensus.name_to_key[exit_name] self.exit_id = exit_id def set_exit(self, exit_name): self._set_exit(exit_name) self.exit_rstr.clear() if not self.exit_id: plog("NOTICE", "Requested null exit "+str(self.exit_id)) self.bad_restrictions = True elif self.exit_id[1:] not in self.consensus.routers: plog("NOTICE", "Requested absent exit "+str(self.exit_id)) self.bad_restrictions = True elif self.consensus.routers[self.exit_id[1:]].down: e = self.consensus.routers[self.exit_id[1:]] plog("NOTICE", "Requested downed exit "+str(self.exit_id)+" (bw: "+str(e.bw)+", flags: "+str(e.flags)+")") self.bad_restrictions = True elif self.consensus.routers[self.exit_id[1:]].deleted: e = self.consensus.routers[self.exit_id[1:]] plog("NOTICE", "Requested deleted exit "+str(self.exit_id)+" (bw: "+str(e.bw)+", flags: "+str(e.flags)+", Down: "+str(e.down)+", ref: "+str(e.refcount)+")") self.bad_restrictions = True else: self.exit_rstr.add_restriction(IdHexRestriction(self.exit_id)) plog("DEBUG", "Added exit restriction for "+self.exit_id) try: self.path_selector.exit_gen.rebuild() self.bad_restrictions = False except RestrictionError, e: plog("WARN", "Restriction error "+str(e)+" after set_exit") self.bad_restrictions = True return self.bad_restrictions def new_consensus(self, consensus): self.consensus = consensus try: self.path_selector.rebuild_gens(self.consensus.sorted_r) if self.exit_id: self.set_exit(self.exit_id) except NoNodesRemain: plog("NOTICE", "No viable nodes in consensus for restrictions.") # Punting + performing reconfigure..") #self.reconfigure(consensus) def set_target(self, ip, port): # sets an exit policy, if bad, rasies exception.. "Called to update the ExitPolicyRestrictions with a new ip and port" if self.bad_restrictions: plog("WARN", "Requested target with bad restrictions") raise RestrictionError() self.exit_rstr.del_restriction(ExitPolicyRestriction) self.exit_rstr.add_restriction(ExitPolicyRestriction(ip, port)) if self.__ordered_exit_gen: self.__ordered_exit_gen.set_port(port) # Try to choose an exit node in the destination country # needs an IP != 255.255.255.255 if self.geoip_config and self.geoip_config.echelon: import GeoIPSupport c = GeoIPSupport.get_country(ip) if c: plog("INFO", "[Echelon] IP "+ip+" is in ["+c+"]") self.exit_rstr.del_restriction(CountryRestriction) self.exit_rstr.add_restriction(CountryRestriction(c)) else: plog("INFO", "[Echelon] Could not determine destination country of IP "+ip) # Try to use a backup country if self.geoip_config.exit_country: self.exit_rstr.del_restriction(CountryRestriction) self.exit_rstr.add_restriction(CountryRestriction(self.geoip_config.exit_country)) # Need to rebuild exit generator self.path_selector.exit_gen.rebuild() def select_path(self): if self.bad_restrictions: plog("WARN", "Requested target with bad restrictions") raise RestrictionError() return self.path_selector.select_path(self.pathlen) class Circuit: "Class to describe a circuit" def __init__(self): self.circ_id = 0 self.path = [] # routers self.exit = None self.built = False self.failed = False self.dirty = False self.requested_closed = False self.detached_cnt = 0 self.last_extended_at = time.time() self.extend_times = [] # List of all extend-durations self.setup_duration = None # Sum of extend-times self.pending_streams = [] # Which stream IDs are pending us # XXX: Unused.. Need to use for refcounting because # sometimes circuit closed events come before the stream # close and we need to track those failures.. self.carried_streams = [] def id_path(self): "Returns a list of idhex keys for the path of Routers" return map(lambda r: r.idhex, self.path) class Stream: "Class to describe a stream" def __init__(self, sid, host, port, kind): self.strm_id = sid self.detached_from = [] # circ id #'s self.pending_circ = None self.circ = None self.host = host self.port = port self.kind = kind self.attached_at = 0 self.bytes_read = 0 self.bytes_written = 0 self.failed = False self.ignored = False # Set if PURPOSE=DIR_* self.failed_reason = None # Cheating a little.. Only used by StatsHandler def lifespan(self, now): "Returns the age of the stream" return now-self.attached_at _origsocket = socket.socket class _SocketWrapper(socket.socket): """ Ghetto wrapper to workaround python same_slots_added() and socket __base__ braindamage """ pass class SmartSocket(_SocketWrapper): """ A SmartSocket is a socket that tracks global socket creation for local ports. It has a member StreamSelector that can be used as a PathBuilder stream StreamSelector (see below). Most users will want to reset the base class of SocksiPy to use this class: __oldsocket = socket.socket socket.socket = PathSupport.SmartSocket import SocksiPy socket.socket = __oldsocket """ port_table = set() _table_lock = threading.Lock() def __init__(self, family=2, type=1, proto=0, _sock=None): ret = super(SmartSocket, self).__init__(family, type, proto, _sock) self.__local_addr = None plog("DEBUG", "New socket constructor") return ret def connect(self, args): ret = super(SmartSocket, self).connect(args) myaddr = self.getsockname() self.__local_addr = myaddr[0]+":"+str(myaddr[1]) SmartSocket._table_lock.acquire() assert(self.__local_addr not in SmartSocket.port_table) SmartSocket.port_table.add(myaddr[0]+":"+str(myaddr[1])) SmartSocket._table_lock.release() plog("DEBUG", "Added "+self.__local_addr+" to our local port list") return ret def connect_ex(self, args): ret = super(SmartSocket, self).connect_ex(args) myaddr = ret.getsockname() self.__local_addr = myaddr[0]+":"+str(myaddr[1]) SmartSocket._table_lock.acquire() assert(self.__local_addr not in SmartSocket.port_table) SmartSocket.port_table.add(myaddr[0]+":"+str(myaddr[1])) SmartSocket._table_lock.release() plog("DEBUG", "Added "+self.__local_addr+" to our local port list") return ret def __del__(self): if self.__local_addr: SmartSocket._table_lock.acquire() SmartSocket.port_table.remove(self.__local_addr) plog("DEBUG", "Removed "+self.__local_addr+" from our local port list") SmartSocket._table_lock.release() else: plog("DEBUG", "Got a socket deletion with no address") def table_size(): SmartSocket._table_lock.acquire() ret = len(SmartSocket.port_table) SmartSocket._table_lock.release() return ret table_size = Callable(table_size) def clear_port_table(): """ WARNING: Calling this periodically is a *really good idea*. Relying on __del__ can expose you to race conditions on garbage collection between your processes. """ SmartSocket._table_lock.acquire() for i in list(SmartSocket.port_table): plog("DEBUG", "Cleared "+i+" from our local port list") SmartSocket.port_table.remove(i) SmartSocket._table_lock.release() clear_port_table = Callable(clear_port_table) def StreamSelector(host, port): to_test = host+":"+str(port) SmartSocket._table_lock.acquire() ret = (to_test in SmartSocket.port_table) SmartSocket._table_lock.release() return ret StreamSelector = Callable(StreamSelector) def StreamSelector(host, port): """ A StreamSelector is a function that takes a host and a port as arguments (parsed from Tor's SOURCE_ADDR field in STREAM NEW events) and decides if it is a stream from this process or not. This StreamSelector is just a placeholder that always returns True. When you define your own, be aware that you MUST DO YOUR OWN LOCKING inside this function, as it is called from the Eventhandler thread. See PathSupport.SmartSocket.StreamSelctor for an actual implementation. """ return True # TODO: Make passive "PathWatcher" so people can get aggregate # node reliability stats for normal usage without us attaching streams # Can use __metaclass__ and type class PathBuilder(TorCtl.ConsensusTracker): """ PathBuilder implementation. Handles circuit construction, subject to the constraints of the SelectionManager selmgr. Do not access this object from other threads. Instead, use the schedule_* functions to schedule work to be done in the thread of the EventHandler. """ def __init__(self, c, selmgr, RouterClass=TorCtl.Router, strm_selector=StreamSelector): """Constructor. 'c' is a Connection, 'selmgr' is a SelectionManager, and 'RouterClass' is a class that inherits from Router and is used to create annotated Routers.""" TorCtl.ConsensusTracker.__init__(self, c, RouterClass) self.last_exit = None self.new_nym = False self.resolve_port = 0 self.num_circuits = 1 self.circuits = {} self.streams = {} self.selmgr = selmgr self.selmgr.reconfigure(self.current_consensus()) self.imm_jobs = Queue.Queue() self.low_prio_jobs = Queue.Queue() self.run_all_jobs = False self.do_reconfigure = False self.strm_selector = strm_selector plog("INFO", "Read "+str(len(self.sorted_r))+"/"+str(len(self.ns_map))+" routers") def schedule_immediate(self, job): """ Schedules an immediate job to be run before the next event is processed. """ assert(self.c.is_live()) self.imm_jobs.put(job) def schedule_low_prio(self, job): """ Schedules a job to be run when a non-time critical event arrives. """ assert(self.c.is_live()) self.low_prio_jobs.put(job) def reset(self): """ Resets accumulated state. Currently only clears the ExactUniformGenerator state. """ plog("DEBUG", "Resetting _generated values for ExactUniformGenerator") for r in self.routers.itervalues(): for g in xrange(0, len(r._generated)): r._generated[g] = 0 def is_urgent_event(event): # If event is stream:NEW*/DETACHED or circ BUILT/FAILED, # it is high priority and requires immediate action. if isinstance(event, TorCtl.CircuitEvent): if event.status in ("BUILT", "FAILED", "CLOSED"): return True elif isinstance(event, TorCtl.StreamEvent): if event.status in ("NEW", "NEWRESOLVE", "DETACHED"): return True return False is_urgent_event = Callable(is_urgent_event) def schedule_selmgr(self, job): """ Schedules an immediate job to be run before the next event is processed. Also notifies the selection manager that it needs to update itself. """ assert(self.c.is_live()) def notlambda(this): job(this.selmgr) this.do_reconfigure = True self.schedule_immediate(notlambda) def heartbeat_event(self, event): """This function handles dispatching scheduled jobs. If you extend PathBuilder and want to implement this function for some reason, be sure to call the parent class""" while not self.imm_jobs.empty(): imm_job = self.imm_jobs.get_nowait() imm_job(self) if self.do_reconfigure: self.selmgr.reconfigure(self.current_consensus()) self.do_reconfigure = False if self.run_all_jobs: while not self.low_prio_jobs.empty() and self.run_all_jobs: imm_job = self.low_prio_jobs.get_nowait() imm_job(self) self.run_all_jobs = False return # If event is stream:NEW*/DETACHED or circ BUILT/FAILED, # don't run low prio jobs.. No need to delay streams for them. if PathBuilder.is_urgent_event(event): return # Do the low prio jobs one at a time in case a # higher priority event is queued if not self.low_prio_jobs.empty(): delay_job = self.low_prio_jobs.get_nowait() delay_job(self) def build_path(self): """ Get a path from the SelectionManager's PathSelector, can be used e.g. for generating paths without actually creating any circuits """ return self.selmgr.select_path() def close_all_streams(self, reason): """ Close all open streams """ for strm in self.streams.itervalues(): if not strm.ignored: try: self.c.close_stream(strm.strm_id, reason) except TorCtl.ErrorReply, e: # This can happen. Streams can timeout before this call. plog("NOTICE", "Error closing stream "+str(strm.strm_id)+": "+str(e)) def close_all_circuits(self): """ Close all open circuits """ for circ in self.circuits.itervalues(): self.close_circuit(circ.circ_id) def close_circuit(self, id): """ Close a circuit with given id """ # TODO: Pass streams to another circ before closing? plog("DEBUG", "Requesting close of circuit id: "+str(id)) if self.circuits[id].requested_closed: return self.circuits[id].requested_closed = True try: self.c.close_circuit(id) except TorCtl.ErrorReply, e: plog("ERROR", "Failed closing circuit " + str(id) + ": " + str(e)) def circuit_list(self): """ Return an iterator or a list of circuits prioritized for stream selection.""" return self.circuits.itervalues() def attach_stream_any(self, stream, badcircs): "Attach a stream to a valid circuit, avoiding any in 'badcircs'" # Newnym, and warn if not built plus pending unattached_streams = [stream] if self.new_nym: self.new_nym = False plog("DEBUG", "Obeying new nym") for key in self.circuits.keys(): if (not self.circuits[key].dirty and len(self.circuits[key].pending_streams)): plog("WARN", "New nym called, destroying circuit "+str(key) +" with "+str(len(self.circuits[key].pending_streams)) +" pending streams") unattached_streams.extend(self.circuits[key].pending_streams) self.circuits[key].pending_streams = [] # FIXME: Consider actually closing circ if no streams. self.circuits[key].dirty = True for circ in self.circuit_list(): if circ.built and not circ.requested_closed and not circ.dirty \ and circ.circ_id not in badcircs: # XXX: Fails for 'tor-resolve 530.19.6.80' -> NEWRESOLVE if circ.exit.will_exit_to(stream.host, stream.port): try: self.c.attach_stream(stream.strm_id, circ.circ_id) stream.pending_circ = circ # Only one possible here circ.pending_streams.append(stream) except TorCtl.ErrorReply, e: # No need to retry here. We should get the failed # event for either the circ or stream next plog("WARN", "Error attaching new stream: "+str(e.args)) return break # This else clause is executed when we go through the circuit # list without finding an entry (or it is empty). # http://docs.python.org/tutorial/controlflow.html#break-and-continue-statements-and-else-clauses-on-loops else: circ = None try: self.selmgr.set_target(stream.host, stream.port) circ = self.c.build_circuit(self.selmgr.select_path()) except RestrictionError, e: # XXX: Dress this up a bit self.last_exit = None # Kill this stream plog("WARN", "Closing impossible stream "+str(stream.strm_id)+" ("+str(e)+")") try: self.c.close_stream(stream.strm_id, "4") # END_STREAM_REASON_EXITPOLICY except TorCtl.ErrorReply, e: plog("WARN", "Error closing stream: "+str(e)) return except TorCtl.ErrorReply, e: plog("WARN", "Error building circ: "+str(e.args)) self.last_exit = None # Kill this stream plog("NOTICE", "Closing stream "+str(stream.strm_id)) try: self.c.close_stream(stream.strm_id, "5") # END_STREAM_REASON_DESTROY except TorCtl.ErrorReply, e: plog("WARN", "Error closing stream: "+str(e)) return for u in unattached_streams: plog("DEBUG", "Attaching "+str(u.strm_id)+" pending build of "+str(circ.circ_id)) u.pending_circ = circ circ.pending_streams.extend(unattached_streams) self.circuits[circ.circ_id] = circ self.last_exit = circ.exit plog("DEBUG", "Set last exit to "+self.last_exit.idhex) def circ_status_event(self, c): output = [str(time.time()-c.arrived_at), c.event_name, str(c.circ_id), c.status] if c.path: output.append(",".join(c.path)) if c.reason: output.append("REASON=" + c.reason) if c.remote_reason: output.append("REMOTE_REASON=" + c.remote_reason) plog("DEBUG", " ".join(output)) # Circuits we don't control get built by Tor if c.circ_id not in self.circuits: plog("DEBUG", "Ignoring circ " + str(c.circ_id)) return if c.status == "EXTENDED": self.circuits[c.circ_id].last_extended_at = c.arrived_at elif c.status == "FAILED" or c.status == "CLOSED": # XXX: Can still get a STREAM FAILED for this circ after this circ = self.circuits[c.circ_id] for r in circ.path: r.refcount -= 1 plog("DEBUG", "Close refcount "+str(r.refcount)+" for "+r.idhex) if r.deleted and r.refcount == 0: # XXX: This shouldn't happen with StatsRouters.. if r.__class__.__name__ == "StatsRouter": plog("WARN", "Purging expired StatsRouter "+r.idhex) else: plog("INFO", "Purging expired router "+r.idhex) del self.routers[r.idhex] self.selmgr.new_consensus(self.current_consensus()) del self.circuits[c.circ_id] for stream in circ.pending_streams: # If it was built, let Tor decide to detach or fail the stream if not circ.built: plog("DEBUG", "Finding new circ for " + str(stream.strm_id)) self.attach_stream_any(stream, stream.detached_from) else: plog("NOTICE", "Waiting on Tor to hint about stream "+str(stream.strm_id)+" on closed circ "+str(circ.circ_id)) elif c.status == "BUILT": self.circuits[c.circ_id].built = True try: for stream in self.circuits[c.circ_id].pending_streams: self.c.attach_stream(stream.strm_id, c.circ_id) except TorCtl.ErrorReply, e: # No need to retry here. We should get the failed # event for either the circ or stream in the next event plog("NOTICE", "Error attaching pending stream: "+str(e.args)) return def stream_status_event(self, s): output = [str(time.time()-s.arrived_at), s.event_name, str(s.strm_id), s.status, str(s.circ_id), s.target_host, str(s.target_port)] if s.reason: output.append("REASON=" + s.reason) if s.remote_reason: output.append("REMOTE_REASON=" + s.remote_reason) if s.purpose: output.append("PURPOSE=" + s.purpose) if s.source_addr: output.append("SOURCE_ADDR="+s.source_addr) if not re.match(r"\d+.\d+.\d+.\d+", s.target_host): s.target_host = "255.255.255.255" # ignore DNS for exit policy check # Hack to ignore Tor-handled streams if s.strm_id in self.streams and self.streams[s.strm_id].ignored: if s.status == "CLOSED": plog("DEBUG", "Deleting ignored stream: " + str(s.strm_id)) del self.streams[s.strm_id] else: plog("DEBUG", "Ignoring stream: " + str(s.strm_id)) return plog("DEBUG", " ".join(output)) # XXX: Copy s.circ_id==0 check+reset from StatsSupport here too? if s.status == "NEW" or s.status == "NEWRESOLVE": if s.status == "NEWRESOLVE" and not s.target_port: s.target_port = self.resolve_port if s.circ_id == 0: self.streams[s.strm_id] = Stream(s.strm_id, s.target_host, s.target_port, s.status) elif s.strm_id not in self.streams: plog("NOTICE", "Got new stream "+str(s.strm_id)+" with circuit " +str(s.circ_id)+" already attached.") self.streams[s.strm_id] = Stream(s.strm_id, s.target_host, s.target_port, s.status) self.streams[s.strm_id].circ_id = s.circ_id # Remember Tor-handled streams (Currently only directory streams) if s.purpose and s.purpose.find("DIR_") == 0: self.streams[s.strm_id].ignored = True plog("DEBUG", "Ignoring stream: " + str(s.strm_id)) return elif s.source_addr: src_addr = s.source_addr.split(":") src_addr[1] = int(src_addr[1]) if not self.strm_selector(*src_addr): self.streams[s.strm_id].ignored = True plog("INFO", "Ignoring foreign stream: " + str(s.strm_id)) return if s.circ_id == 0: self.attach_stream_any(self.streams[s.strm_id], self.streams[s.strm_id].detached_from) elif s.status == "DETACHED": if s.strm_id not in self.streams: plog("WARN", "Detached stream "+str(s.strm_id)+" not found") self.streams[s.strm_id] = Stream(s.strm_id, s.target_host, s.target_port, "NEW") # FIXME Stats (differentiate Resolved streams also..) if not s.circ_id: if s.reason == "TIMEOUT" or s.reason == "EXITPOLICY": plog("NOTICE", "Stream "+str(s.strm_id)+" detached with "+s.reason) else: plog("WARN", "Stream "+str(s.strm_id)+" detached from no circuit with reason: "+str(s.reason)) else: self.streams[s.strm_id].detached_from.append(s.circ_id) if self.streams[s.strm_id].pending_circ and \ self.streams[s.strm_id] in \ self.streams[s.strm_id].pending_circ.pending_streams: self.streams[s.strm_id].pending_circ.pending_streams.remove( self.streams[s.strm_id]) self.streams[s.strm_id].pending_circ = None self.attach_stream_any(self.streams[s.strm_id], self.streams[s.strm_id].detached_from) elif s.status == "SUCCEEDED": if s.strm_id not in self.streams: plog("NOTICE", "Succeeded stream "+str(s.strm_id)+" not found") return if s.circ_id and self.streams[s.strm_id].pending_circ.circ_id != s.circ_id: # Hrmm.. this can happen on a new-nym.. Very rare, putting warn # in because I'm still not sure this is correct plog("WARN", "Mismatch of pending: " +str(self.streams[s.strm_id].pending_circ.circ_id)+" vs " +str(s.circ_id)) # This can happen if the circuit existed before we started up if s.circ_id in self.circuits: self.streams[s.strm_id].circ = self.circuits[s.circ_id] else: plog("NOTICE", "Stream "+str(s.strm_id)+" has unknown circuit: "+str(s.circ_id)) else: self.streams[s.strm_id].circ = self.streams[s.strm_id].pending_circ self.streams[s.strm_id].pending_circ.pending_streams.remove(self.streams[s.strm_id]) self.streams[s.strm_id].pending_circ = None self.streams[s.strm_id].attached_at = s.arrived_at elif s.status == "FAILED" or s.status == "CLOSED": # FIXME stats if s.strm_id not in self.streams: plog("NOTICE", "Failed stream "+str(s.strm_id)+" not found") return # XXX: Can happen on timeout if not s.circ_id: if s.reason == "TIMEOUT" or s.reason == "EXITPOLICY": plog("NOTICE", "Stream "+str(s.strm_id)+" "+s.status+" with "+s.reason) else: plog("WARN", "Stream "+str(s.strm_id)+" "+s.status+" from no circuit with reason: "+str(s.reason)) # We get failed and closed for each stream. OK to return # and let the closed do the cleanup if s.status == "FAILED": # Avoid busted circuits that will not resolve or carry # traffic. self.streams[s.strm_id].failed = True if s.circ_id in self.circuits: self.circuits[s.circ_id].dirty = True elif s.circ_id != 0: plog("WARN", "Failed stream "+str(s.strm_id)+" on unknown circ "+str(s.circ_id)) return if self.streams[s.strm_id].pending_circ: self.streams[s.strm_id].pending_circ.pending_streams.remove(self.streams[s.strm_id]) del self.streams[s.strm_id] elif s.status == "REMAP": if s.strm_id not in self.streams: plog("WARN", "Remap id "+str(s.strm_id)+" not found") else: if not re.match(r"\d+.\d+.\d+.\d+", s.target_host): s.target_host = "255.255.255.255" plog("NOTICE", "Non-IP remap for "+str(s.strm_id)+" to " + s.target_host) self.streams[s.strm_id].host = s.target_host self.streams[s.strm_id].port = s.target_port def stream_bw_event(self, s): output = [str(time.time()-s.arrived_at), s.event_name, str(s.strm_id), str(s.bytes_written), str(s.bytes_read)] if not s.strm_id in self.streams: plog("DEBUG", " ".join(output)) plog("WARN", "BW event for unknown stream id: "+str(s.strm_id)) else: if not self.streams[s.strm_id].ignored: plog("DEBUG", " ".join(output)) self.streams[s.strm_id].bytes_read += s.bytes_read self.streams[s.strm_id].bytes_written += s.bytes_written def new_consensus_event(self, n): TorCtl.ConsensusTracker.new_consensus_event(self, n) self.selmgr.new_consensus(self.current_consensus()) def new_desc_event(self, d): if TorCtl.ConsensusTracker.new_desc_event(self, d): self.selmgr.new_consensus(self.current_consensus()) def bandwidth_event(self, b): pass # For heartbeat only.. ################### CircuitHandler ############################# class CircuitHandler(PathBuilder): """ CircuitHandler that extends from PathBuilder to handle multiple circuits as opposed to just one. """ def __init__(self, c, selmgr, num_circuits, RouterClass): """Constructor. 'c' is a Connection, 'selmgr' is a SelectionManager, 'num_circuits' is the number of circuits to keep in the pool, and 'RouterClass' is a class that inherits from Router and is used to create annotated Routers.""" PathBuilder.__init__(self, c, selmgr, RouterClass) # Set handler to the connection here to # not miss any circuit events on startup c.set_event_handler(self) self.num_circuits = num_circuits # Size of the circuit pool self.check_circuit_pool() # Bring up the pool of circs def check_circuit_pool(self): """ Init or check the status of the circuit-pool """ # Get current number of circuits n = len(self.circuits.values()) i = self.num_circuits-n if i > 0: plog("INFO", "Checked pool of circuits: we need to build " + str(i) + " circuits") # Schedule (num_circs-n) circuit-buildups while (n < self.num_circuits): # TODO: Should mimic Tor's learning here self.build_circuit("255.255.255.255", 80) plog("DEBUG", "Scheduled circuit No. " + str(n+1)) n += 1 def build_circuit(self, host, port): """ Build a circuit """ circ = None while circ == None: try: self.selmgr.set_target(host, port) circ = self.c.build_circuit(self.selmgr.select_path()) self.circuits[circ.circ_id] = circ return circ except RestrictionError, e: # XXX: Dress this up a bit traceback.print_exc() plog("ERROR", "Impossible restrictions: "+str(e)) except TorCtl.ErrorReply, e: traceback.print_exc() plog("WARN", "Error building circuit: " + str(e.args)) def circ_status_event(self, c): """ Handle circuit status events """ output = [c.event_name, str(c.circ_id), c.status] if c.path: output.append(",".join(c.path)) if c.reason: output.append("REASON=" + c.reason) if c.remote_reason: output.append("REMOTE_REASON=" + c.remote_reason) plog("DEBUG", " ".join(output)) # Circuits we don't control get built by Tor if c.circ_id not in self.circuits: plog("DEBUG", "Ignoring circuit " + str(c.circ_id) + " (controlled by Tor)") return # EXTENDED if c.status == "EXTENDED": # Compute elapsed time extend_time = c.arrived_at-self.circuits[c.circ_id].last_extended_at self.circuits[c.circ_id].extend_times.append(extend_time) plog("INFO", "Circuit " + str(c.circ_id) + " extended in " + str(extend_time) + " sec") self.circuits[c.circ_id].last_extended_at = c.arrived_at # FAILED & CLOSED elif c.status == "FAILED" or c.status == "CLOSED": PathBuilder.circ_status_event(self, c) # Check if there are enough circs self.check_circuit_pool() return # BUILT elif c.status == "BUILT": PathBuilder.circ_status_event(self, c) # Compute duration by summing up extend_times circ = self.circuits[c.circ_id] duration = reduce(lambda x, y: x+y, circ.extend_times, 0.0) plog("INFO", "Circuit " + str(c.circ_id) + " needed " + str(duration) + " seconds to be built") # Save the duration to the circuit for later use circ.setup_duration = duration # OTHER? else: # If this was e.g. a LAUNCHED pass ################### StreamHandler ############################## class StreamHandler(CircuitHandler): """ StreamHandler that extends from the CircuitHandler to handle attaching streams to an appropriate circuit in the pool. """ def __init__(self, c, selmgr, num_circs, RouterClass): CircuitHandler.__init__(self, c, selmgr, num_circs, RouterClass) def clear_dns_cache(self): """ Send signal CLEARDNSCACHE """ lines = self.c.sendAndRecv("SIGNAL CLEARDNSCACHE\r\n") for _, msg, more in lines: plog("DEBUG", "CLEARDNSCACHE: " + msg) def close_stream(self, id, reason): """ Close a stream with given id and reason """ self.c.close_stream(id, reason) def address_mapped_event(self, event): """ It is necessary to listen to ADDRMAP events to be able to perform DNS lookups using Tor """ output = [event.event_name, event.from_addr, event.to_addr, time.asctime(event.when)] plog("DEBUG", " ".join(output)) def unknown_event(self, event): plog("DEBUG", "UNKNOWN EVENT '" + event.event_name + "':" + event.event_string) ########################## Unit tests ########################## def do_gen_unit(gen, r_list, weight_bw, num_print): trials = 0 for r in r_list: if gen.rstr_list.r_is_ok(r): trials += weight_bw(gen, r) trials = int(trials/1024) print "Running "+str(trials)+" trials" # 0. Reset r.chosen = 0 for all routers for r in r_list: r.chosen = 0 # 1. Generate 'trials' choices: # 1a. r.chosen++ loglevel = TorUtil.loglevel TorUtil.loglevel = "INFO" gen.rewind() rtrs = gen.generate() for i in xrange(1, trials): r = rtrs.next() r.chosen += 1 TorUtil.loglevel = loglevel # 2. Print top num_print routers choices+bandwidth stats+flags i = 0 copy_rlist = copy.copy(r_list) copy_rlist.sort(lambda x, y: cmp(y.chosen, x.chosen)) for r in copy_rlist: if r.chosen and not gen.rstr_list.r_is_ok(r): print "WARN: Restriction fail at "+r.idhex if not r.chosen and gen.rstr_list.r_is_ok(r): print "WARN: Generation fail at "+r.idhex if not gen.rstr_list.r_is_ok(r): continue flag = "" bw = int(weight_bw(gen, r)) if "Exit" in r.flags: flag += "E" if "Guard" in r.flags: flag += "G" print str(r.list_rank)+". "+r.nickname+" "+str(r.bw/1024.0)+"/"+str(bw/1024.0)+": "+str(r.chosen)+", "+flag i += 1 if i > num_print: break def do_unit(rst, r_list, plamb): print "\n" print "-----------------------------------" print rst.r_is_ok.im_class above_i = 0 above_bw = 0 below_i = 0 below_bw = 0 for r in r_list: if rst.r_is_ok(r): print r.nickname+" "+plamb(r)+"="+str(rst.r_is_ok(r))+" "+str(r.bw) if r.bw > 400000: above_i = above_i + 1 above_bw += r.bw else: below_i = below_i + 1 below_bw += r.bw print "Routers above: " + str(above_i) + " bw: " + str(above_bw) print "Routers below: " + str(below_i) + " bw: " + str(below_bw) # TODO: Tests: # - Test each NodeRestriction and print in/out lines for it # - Test NodeGenerator and reapply NodeRestrictions # - Same for PathSelector and PathRestrictions # - Also Reapply each restriction by hand to path. Verify returns true if __name__ == '__main__': s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((TorUtil.control_host,TorUtil.control_port)) c = Connection(s) c.debug(file("control.log", "w")) c.authenticate(TorUtil.control_pass) nslist = c.get_network_status() sorted_rlist = c.read_routers(c.get_network_status()) sorted_rlist.sort(lambda x, y: cmp(y.bw, x.bw)) for i in xrange(len(sorted_rlist)): sorted_rlist[i].list_rank = i def flag_weighting(bwgen, r): bw = r.bw if "Exit" in r.flags: bw *= bwgen.exit_weight if "Guard" in r.flags: bw *= bwgen.guard_weight return bw def uniform_weighting(bwgen, r): return 10240000 # XXX: Test OrderedexitGenerators do_gen_unit( UniformGenerator(sorted_rlist, NodeRestrictionList([PercentileRestriction(20,30,sorted_rlist), FlagsRestriction(["Valid"])])), sorted_rlist, uniform_weighting, 1500) do_gen_unit(BwWeightedGenerator(sorted_rlist, FlagsRestriction(["Exit"]), 3, exit=True), sorted_rlist, flag_weighting, 500) do_gen_unit(BwWeightedGenerator(sorted_rlist, FlagsRestriction(["Guard"]), 3, guard=True), sorted_rlist, flag_weighting, 500) do_gen_unit( BwWeightedGenerator(sorted_rlist, FlagsRestriction(["Valid"]), 3), sorted_rlist, flag_weighting, 500) for r in sorted_rlist: if r.will_exit_to("211.11.21.22", 465): print r.nickname+" "+str(r.bw) do_unit(FlagsRestriction(["Guard"], []), sorted_rlist, lambda r: " ".join(r.flags)) do_unit(FlagsRestriction(["Fast"], []), sorted_rlist, lambda r: " ".join(r.flags)) do_unit(ExitPolicyRestriction("2.11.2.2", 80), sorted_rlist, lambda r: "exits to 80") do_unit(PercentileRestriction(0, 100, sorted_rlist), sorted_rlist, lambda r: "") do_unit(PercentileRestriction(10, 20, sorted_rlist), sorted_rlist, lambda r: "") do_unit(OSRestriction([r"[lL]inux", r"BSD", "Darwin"], []), sorted_rlist, lambda r: r.os) do_unit(OSRestriction([], ["Windows", "Solaris"]), sorted_rlist, lambda r: r.os) do_unit(VersionRangeRestriction("0.1.2.0"), sorted_rlist, lambda r: str(r.version)) do_unit(VersionRangeRestriction("0.1.2.0", "0.1.2.5"), sorted_rlist, lambda r: str(r.version)) do_unit(VersionIncludeRestriction(["0.1.1.26-alpha", "0.1.2.7-ignored"]), sorted_rlist, lambda r: str(r.version)) do_unit(VersionExcludeRestriction(["0.1.1.26"]), sorted_rlist, lambda r: str(r.version)) do_unit(ConserveExitsRestriction(), sorted_rlist, lambda r: " ".join(r.flags)) do_unit(FlagsRestriction([], ["Valid"]), sorted_rlist, lambda r: " ".join(r.flags)) do_unit(IdHexRestriction("$FFCB46DB1339DA84674C70D7CB586434C4370441"), sorted_rlist, lambda r: r.idhex) rl = [AtLeastNNodeRestriction([ExitPolicyRestriction("255.255.255.255", 80), ExitPolicyRestriction("255.255.255.255", 443), ExitPolicyRestriction("255.255.255.255", 6667)], 2), FlagsRestriction([], ["BadExit"])] exit_rstr = NodeRestrictionList(rl) ug = UniformGenerator(sorted_rlist, exit_rstr) ug.rewind() rlist = [] for r in ug.generate(): print "Checking: " + r.nickname for rs in rl: if not rs.r_is_ok(r): raise PathError() if not "Exit" in r.flags: print "No exit in flags of "+r.idhex for e in r.exitpolicy: print " "+str(e) print " 80: "+str(r.will_exit_to("255.255.255.255", 80)) print " 443: "+str(r.will_exit_to("255.255.255.255", 443)) print " 6667: "+str(r.will_exit_to("255.255.255.255", 6667)) ug.mark_chosen(r) rlist.append(r) for r in sorted_rlist: if "Exit" in r.flags and not r in rlist: print r.idhex+" is an exit not in rl!"
bsd-3-clause
dhocker/athomepowerlineserver
commands/group_off.py
1
1500
# -*- coding: utf-8 -*- # # Group off # Copyright © 2020 Dave Hocker # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3 of the License. # # See the LICENSE file for more details. # from commands.ServerCommand import ServerCommand from database.action_group_devices import ActionGroupDevices ####################################################################### # Command handler for group off command class GroupOff(ServerCommand): ####################################################################### # Execute the "of" command. def Execute(self, request): group_id = int(request["args"]["group-id"]) r = self.CreateResponse(request["request"]) agd = ActionGroupDevices() group_devices = agd.get_group_devices(group_id) if group_devices is None: r['result-code'] = agd.last_error_code r['message'] = agd.last_error return r for group_device in group_devices: driver = self.get_driver_for_id(group_device["id"]) result = driver.device_off(group_device["mfg"], group_device["name"], group_device["address"], group_device["channel"]) r['result-code'] = driver.last_error_code if result: r['message'] = ServerCommand.MSG_SUCCESS else: r['message'] = driver.last_error return r
gpl-3.0
Impactstory/total-impact-webapp
totalimpact/providers/bibtex_lookup.py
4
75780
# from https://gist.github.com/jalavik/976294 # original XML at http://www.w3.org/Math/characters/unicode.xml # XSL for conversion: https://gist.github.com/798546 unicode_to_latex = { u"\u0020": "\\space ", u"\u0023": "\\#", u"\u0024": "\\textdollar ", u"\u0025": "\\%", u"\u0026": "\\&amp;", u"\u0027": "\\textquotesingle ", u"\u002A": "\\ast ", u"\u005C": "\\textbackslash ", u"\u005E": "\\^{}", u"\u005F": "\\_", u"\u0060": "\\textasciigrave ", u"\u007B": "\\lbrace ", u"\u007C": "\\vert ", u"\u007D": "\\rbrace ", u"\u007E": "\\textasciitilde ", u"\u00A1": "\\textexclamdown ", u"\u00A2": "\\textcent ", u"\u00A3": "\\textsterling ", u"\u00A4": "\\textcurrency ", u"\u00A5": "\\textyen ", u"\u00A6": "\\textbrokenbar ", u"\u00A7": "\\textsection ", u"\u00A8": "\\textasciidieresis ", u"\u00A9": "\\textcopyright ", u"\u00AA": "\\textordfeminine ", u"\u00AB": "\\guillemotleft ", u"\u00AC": "\\lnot ", u"\u00AD": "\\-", u"\u00AE": "\\textregistered ", u"\u00AF": "\\textasciimacron ", u"\u00B0": "\\textdegree ", u"\u00B1": "\\pm ", u"\u00B2": "{^2}", u"\u00B3": "{^3}", u"\u00B4": "\\textasciiacute ", u"\u00B5": "\\mathrm{\\mu}", u"\u00B6": "\\textparagraph ", u"\u00B7": "\\cdot ", u"\u00B8": "\\c{}", u"\u00B9": "{^1}", u"\u00BA": "\\textordmasculine ", u"\u00BB": "\\guillemotright ", u"\u00BC": "\\textonequarter ", u"\u00BD": "\\textonehalf ", u"\u00BE": "\\textthreequarters ", u"\u00BF": "\\textquestiondown ", u"\u00C0": "\\`{A}", u"\u00C1": "\\'{A}", u"\u00C2": "\\^{A}", u"\u00C3": "\\~{A}", u"\u00C4": "\\\"{A}", u"\u00C5": "\\AA ", u"\u00C6": "\\AE ", u"\u00C7": "\\c{C}", u"\u00C8": "\\`{E}", u"\u00C9": "\\'{E}", u"\u00CA": "\\^{E}", u"\u00CB": "\\\"{E}", u"\u00CC": "\\`{I}", u"\u00CD": "\\'{I}", u"\u00CE": "\\^{I}", u"\u00CF": "\\\"{I}", u"\u00D0": "\\DH ", u"\u00D1": "\\~{N}", u"\u00D2": "\\`{O}", u"\u00D3": "\\'{O}", u"\u00D4": "\\^{O}", u"\u00D5": "\\~{O}", u"\u00D6": "\\\"{O}", u"\u00D7": "\\texttimes ", u"\u00D8": "\\O ", u"\u00D9": "\\`{U}", u"\u00DA": "\\'{U}", u"\u00DB": "\\^{U}", u"\u00DC": "\\\"{U}", u"\u00DD": "\\'{Y}", u"\u00DE": "\\TH ", u"\u00DF": "\\ss ", u"\u00E0": "\\`{a}", u"\u00E1": "\\'{a}", u"\u00E2": "\\^{a}", u"\u00E3": "\\~{a}", u"\u00E4": "\\\"{a}", u"\u00E5": "\\aa ", u"\u00E6": "\\ae ", u"\u00E7": "\\c{c}", u"\u00E8": "\\`{e}", u"\u00E9": "\\'{e}", u"\u00EA": "\\^{e}", u"\u00EB": "\\\"{e}", u"\u00EC": "\\`{\\i}", u"\u00ED": "\\'{\\i}", u"\u00EE": "\\^{\\i}", u"\u00EF": "\\\"{\\i}", u"\u00F0": "\\dh ", u"\u00F1": "\\~{n}", u"\u00F2": "\\`{o}", u"\u00F3": "\\'{o}", u"\u00F4": "\\^{o}", u"\u00F5": "\\~{o}", u"\u00F6": "\\\"{o}", u"\u00F7": "\\div ", u"\u00F8": "\\o ", u"\u00F9": "\\`{u}", u"\u00FA": "\\'{u}", u"\u00FB": "\\^{u}", u"\u00FC": "\\\"{u}", u"\u00FD": "\\'{y}", u"\u00FE": "\\th ", u"\u00FF": "\\\"{y}", u"\u0100": "\\={A}", u"\u0101": "\\={a}", u"\u0102": "\\u{A}", u"\u0103": "\\u{a}", u"\u0104": "\\k{A}", u"\u0105": "\\k{a}", u"\u0106": "\\'{C}", u"\u0107": "\\'{c}", u"\u0108": "\\^{C}", u"\u0109": "\\^{c}", u"\u010A": "\\.{C}", u"\u010B": "\\.{c}", u"\u010C": "\\v{C}", u"\u010D": "\\v{c}", u"\u010E": "\\v{D}", u"\u010F": "\\v{d}", u"\u0110": "\\DJ ", u"\u0111": "\\dj ", u"\u0112": "\\={E}", u"\u0113": "\\={e}", u"\u0114": "\\u{E}", u"\u0115": "\\u{e}", u"\u0116": "\\.{E}", u"\u0117": "\\.{e}", u"\u0118": "\\k{E}", u"\u0119": "\\k{e}", u"\u011A": "\\v{E}", u"\u011B": "\\v{e}", u"\u011C": "\\^{G}", u"\u011D": "\\^{g}", u"\u011E": "\\u{G}", u"\u011F": "\\u{g}", u"\u0120": "\\.{G}", u"\u0121": "\\.{g}", u"\u0122": "\\c{G}", u"\u0123": "\\c{g}", u"\u0124": "\\^{H}", u"\u0125": "\\^{h}", u"\u0126": "{\\fontencoding{LELA}\\selectfont\\char40}", u"\u0127": "\\Elzxh ", u"\u0128": "\\~{I}", u"\u0129": "\\~{\\i}", u"\u012A": "\\={I}", u"\u012B": "\\={\\i}", u"\u012C": "\\u{I}", u"\u012D": "\\u{\\i}", u"\u012E": "\\k{I}", u"\u012F": "\\k{i}", u"\u0130": "\\.{I}", u"\u0131": "\\i ", u"\u0132": "IJ", u"\u0133": "ij", u"\u0134": "\\^{J}", u"\u0135": "\\^{\\j}", u"\u0136": "\\c{K}", u"\u0137": "\\c{k}", u"\u0138": "{\\fontencoding{LELA}\\selectfont\\char91}", u"\u0139": "\\'{L}", u"\u013A": "\\'{l}", u"\u013B": "\\c{L}", u"\u013C": "\\c{l}", u"\u013D": "\\v{L}", u"\u013E": "\\v{l}", u"\u013F": "{\\fontencoding{LELA}\\selectfont\\char201}", u"\u0140": "{\\fontencoding{LELA}\\selectfont\\char202}", u"\u0141": "\\L ", u"\u0142": "\\l ", u"\u0143": "\\'{N}", u"\u0144": "\\'{n}", u"\u0145": "\\c{N}", u"\u0146": "\\c{n}", u"\u0147": "\\v{N}", u"\u0148": "\\v{n}", u"\u0149": "'n", u"\u014A": "\\NG ", u"\u014B": "\\ng ", u"\u014C": "\\={O}", u"\u014D": "\\={o}", u"\u014E": "\\u{O}", u"\u014F": "\\u{o}", u"\u0150": "\\H{O}", u"\u0151": "\\H{o}", u"\u0152": "\\OE ", u"\u0153": "\\oe ", u"\u0154": "\\'{R}", u"\u0155": "\\'{r}", u"\u0156": "\\c{R}", u"\u0157": "\\c{r}", u"\u0158": "\\v{R}", u"\u0159": "\\v{r}", u"\u015A": "\\'{S}", u"\u015B": "\\'{s}", u"\u015C": "\\^{S}", u"\u015D": "\\^{s}", u"\u015E": "\\c{S}", u"\u015F": "\\c{s}", u"\u0160": "\\v{S}", u"\u0161": "\\v{s}", u"\u0162": "\\c{T}", u"\u0163": "\\c{t}", u"\u0164": "\\v{T}", u"\u0165": "\\v{t}", u"\u0166": "{\\fontencoding{LELA}\\selectfont\\char47}", u"\u0167": "{\\fontencoding{LELA}\\selectfont\\char63}", u"\u0168": "\\~{U}", u"\u0169": "\\~{u}", u"\u016A": "\\={U}", u"\u016B": "\\={u}", u"\u016C": "\\u{U}", u"\u016D": "\\u{u}", u"\u016E": "\\r{U}", u"\u016F": "\\r{u}", u"\u0170": "\\H{U}", u"\u0171": "\\H{u}", u"\u0172": "\\k{U}", u"\u0173": "\\k{u}", u"\u0174": "\\^{W}", u"\u0175": "\\^{w}", u"\u0176": "\\^{Y}", u"\u0177": "\\^{y}", u"\u0178": "\\\"{Y}", u"\u0179": "\\'{Z}", u"\u017A": "\\'{z}", u"\u017B": "\\.{Z}", u"\u017C": "\\.{z}", u"\u017D": "\\v{Z}", u"\u017E": "\\v{z}", u"\u0195": "\\texthvlig ", u"\u019E": "\\textnrleg ", u"\u01AA": "\\eth ", u"\u01BA": "{\\fontencoding{LELA}\\selectfont\\char195}", u"\u01C2": "\\textdoublepipe ", u"\u01F5": "\\'{g}", u"\u0250": "\\Elztrna ", u"\u0252": "\\Elztrnsa ", u"\u0254": "\\Elzopeno ", u"\u0256": "\\Elzrtld ", u"\u0258": "{\\fontencoding{LEIP}\\selectfont\\char61}", u"\u0259": "\\Elzschwa ", u"\u025B": "\\varepsilon ", u"\u0263": "\\Elzpgamma ", u"\u0264": "\\Elzpbgam ", u"\u0265": "\\Elztrnh ", u"\u026C": "\\Elzbtdl ", u"\u026D": "\\Elzrtll ", u"\u026F": "\\Elztrnm ", u"\u0270": "\\Elztrnmlr ", u"\u0271": "\\Elzltlmr ", u"\u0272": "\\Elzltln ", u"\u0273": "\\Elzrtln ", u"\u0277": "\\Elzclomeg ", u"\u0278": "\\textphi ", u"\u0279": "\\Elztrnr ", u"\u027A": "\\Elztrnrl ", u"\u027B": "\\Elzrttrnr ", u"\u027C": "\\Elzrl ", u"\u027D": "\\Elzrtlr ", u"\u027E": "\\Elzfhr ", u"\u027F": "{\\fontencoding{LEIP}\\selectfont\\char202}", u"\u0282": "\\Elzrtls ", u"\u0283": "\\Elzesh ", u"\u0287": "\\Elztrnt ", u"\u0288": "\\Elzrtlt ", u"\u028A": "\\Elzpupsil ", u"\u028B": "\\Elzpscrv ", u"\u028C": "\\Elzinvv ", u"\u028D": "\\Elzinvw ", u"\u028E": "\\Elztrny ", u"\u0290": "\\Elzrtlz ", u"\u0292": "\\Elzyogh ", u"\u0294": "\\Elzglst ", u"\u0295": "\\Elzreglst ", u"\u0296": "\\Elzinglst ", u"\u029E": "\\textturnk ", u"\u02A4": "\\Elzdyogh ", u"\u02A7": "\\Elztesh ", u"\u02C7": "\\textasciicaron ", u"\u02C8": "\\Elzverts ", u"\u02CC": "\\Elzverti ", u"\u02D0": "\\Elzlmrk ", u"\u02D1": "\\Elzhlmrk ", u"\u02D2": "\\Elzsbrhr ", u"\u02D3": "\\Elzsblhr ", u"\u02D4": "\\Elzrais ", u"\u02D5": "\\Elzlow ", u"\u02D8": "\\textasciibreve ", u"\u02D9": "\\textperiodcentered ", u"\u02DA": "\\r{}", u"\u02DB": "\\k{}", u"\u02DC": "\\texttildelow ", u"\u02DD": "\\H{}", u"\u02E5": "\\tone{55}", u"\u02E6": "\\tone{44}", u"\u02E7": "\\tone{33}", u"\u02E8": "\\tone{22}", u"\u02E9": "\\tone{11}", u"\u0300": "\\`", u"\u0301": "\\'", u"\u0302": "\\^", u"\u0303": "\\~", u"\u0304": "\\=", u"\u0306": "\\u", u"\u0307": "\\.", u"\u0308": "\\\"", u"\u030A": "\\r", u"\u030B": "\\H", u"\u030C": "\\v", u"\u030F": "\\cyrchar\\C", u"\u0311": "{\\fontencoding{LECO}\\selectfont\\char177}", u"\u0318": "{\\fontencoding{LECO}\\selectfont\\char184}", u"\u0319": "{\\fontencoding{LECO}\\selectfont\\char185}", u"\u0321": "\\Elzpalh ", u"\u0322": "\\Elzrh ", u"\u0327": "\\c", u"\u0328": "\\k", u"\u032A": "\\Elzsbbrg ", u"\u032B": "{\\fontencoding{LECO}\\selectfont\\char203}", u"\u032F": "{\\fontencoding{LECO}\\selectfont\\char207}", u"\u0335": "\\Elzxl ", u"\u0336": "\\Elzbar ", u"\u0337": "{\\fontencoding{LECO}\\selectfont\\char215}", u"\u0338": "{\\fontencoding{LECO}\\selectfont\\char216}", u"\u033A": "{\\fontencoding{LECO}\\selectfont\\char218}", u"\u033B": "{\\fontencoding{LECO}\\selectfont\\char219}", u"\u033C": "{\\fontencoding{LECO}\\selectfont\\char220}", u"\u033D": "{\\fontencoding{LECO}\\selectfont\\char221}", u"\u0361": "{\\fontencoding{LECO}\\selectfont\\char225}", u"\u0386": "\\'{A}", u"\u0388": "\\'{E}", u"\u0389": "\\'{H}", u"\u038A": "\\'{}{I}", u"\u038C": "\\'{}O", u"\u038E": "\\mathrm{'Y}", u"\u038F": "\\mathrm{'\\Omega}", u"\u0390": "\\acute{\\ddot{\\iota}}", u"\u0391": "\\Alpha ", u"\u0392": "\\Beta ", u"\u0393": "\\Gamma ", u"\u0394": "\\Delta ", u"\u0395": "\\Epsilon ", u"\u0396": "\\Zeta ", u"\u0397": "\\Eta ", u"\u0398": "\\Theta ", u"\u0399": "\\Iota ", u"\u039A": "\\Kappa ", u"\u039B": "\\Lambda ", u"\u039E": "\\Xi ", u"\u03A0": "\\Pi ", u"\u03A1": "\\Rho ", u"\u03A3": "\\Sigma ", u"\u03A4": "\\Tau ", u"\u03A5": "\\Upsilon ", u"\u03A6": "\\Phi ", u"\u03A7": "\\Chi ", u"\u03A8": "\\Psi ", u"\u03A9": "\\Omega ", u"\u03AA": "\\mathrm{\\ddot{I}}", u"\u03AB": "\\mathrm{\\ddot{Y}}", u"\u03AC": "\\'{$\\alpha$}", u"\u03AD": "\\acute{\\epsilon}", u"\u03AE": "\\acute{\\eta}", u"\u03AF": "\\acute{\\iota}", u"\u03B0": "\\acute{\\ddot{\\upsilon}}", u"\u03B1": "\\alpha ", u"\u03B2": "\\beta ", u"\u03B3": "\\gamma ", u"\u03B4": "\\delta ", u"\u03B5": "\\epsilon ", u"\u03B6": "\\zeta ", u"\u03B7": "\\eta ", u"\u03B8": "\\texttheta ", u"\u03B9": "\\iota ", u"\u03BA": "\\kappa ", u"\u03BB": "\\lambda ", u"\u03BC": "\\mu ", u"\u03BD": "\\nu ", u"\u03BE": "\\xi ", u"\u03C0": "\\pi ", u"\u03C1": "\\rho ", u"\u03C2": "\\varsigma ", u"\u03C3": "\\sigma ", u"\u03C4": "\\tau ", u"\u03C5": "\\upsilon ", u"\u03C6": "\\varphi ", u"\u03C7": "\\chi ", u"\u03C8": "\\psi ", u"\u03C9": "\\omega ", u"\u03CA": "\\ddot{\\iota}", u"\u03CB": "\\ddot{\\upsilon}", u"\u03CC": "\\'{o}", u"\u03CD": "\\acute{\\upsilon}", u"\u03CE": "\\acute{\\omega}", u"\u03D0": "\\Pisymbol{ppi022}{87}", u"\u03D1": "\\textvartheta ", u"\u03D2": "\\Upsilon ", u"\u03D5": "\\phi ", u"\u03D6": "\\varpi ", u"\u03DA": "\\Stigma ", u"\u03DC": "\\Digamma ", u"\u03DD": "\\digamma ", u"\u03DE": "\\Koppa ", u"\u03E0": "\\Sampi ", u"\u03F0": "\\varkappa ", u"\u03F1": "\\varrho ", u"\u03F4": "\\textTheta ", u"\u03F6": "\\backepsilon ", u"\u0401": "\\cyrchar\\CYRYO ", u"\u0402": "\\cyrchar\\CYRDJE ", u"\u0403": "\\cyrchar{\\'\\CYRG}", u"\u0404": "\\cyrchar\\CYRIE ", u"\u0405": "\\cyrchar\\CYRDZE ", u"\u0406": "\\cyrchar\\CYRII ", u"\u0407": "\\cyrchar\\CYRYI ", u"\u0408": "\\cyrchar\\CYRJE ", u"\u0409": "\\cyrchar\\CYRLJE ", u"\u040A": "\\cyrchar\\CYRNJE ", u"\u040B": "\\cyrchar\\CYRTSHE ", u"\u040C": "\\cyrchar{\\'\\CYRK}", u"\u040E": "\\cyrchar\\CYRUSHRT ", u"\u040F": "\\cyrchar\\CYRDZHE ", u"\u0410": "\\cyrchar\\CYRA ", u"\u0411": "\\cyrchar\\CYRB ", u"\u0412": "\\cyrchar\\CYRV ", u"\u0413": "\\cyrchar\\CYRG ", u"\u0414": "\\cyrchar\\CYRD ", u"\u0415": "\\cyrchar\\CYRE ", u"\u0416": "\\cyrchar\\CYRZH ", u"\u0417": "\\cyrchar\\CYRZ ", u"\u0418": "\\cyrchar\\CYRI ", u"\u0419": "\\cyrchar\\CYRISHRT ", u"\u041A": "\\cyrchar\\CYRK ", u"\u041B": "\\cyrchar\\CYRL ", u"\u041C": "\\cyrchar\\CYRM ", u"\u041D": "\\cyrchar\\CYRN ", u"\u041E": "\\cyrchar\\CYRO ", u"\u041F": "\\cyrchar\\CYRP ", u"\u0420": "\\cyrchar\\CYRR ", u"\u0421": "\\cyrchar\\CYRS ", u"\u0422": "\\cyrchar\\CYRT ", u"\u0423": "\\cyrchar\\CYRU ", u"\u0424": "\\cyrchar\\CYRF ", u"\u0425": "\\cyrchar\\CYRH ", u"\u0426": "\\cyrchar\\CYRC ", u"\u0427": "\\cyrchar\\CYRCH ", u"\u0428": "\\cyrchar\\CYRSH ", u"\u0429": "\\cyrchar\\CYRSHCH ", u"\u042A": "\\cyrchar\\CYRHRDSN ", u"\u042B": "\\cyrchar\\CYRERY ", u"\u042C": "\\cyrchar\\CYRSFTSN ", u"\u042D": "\\cyrchar\\CYREREV ", u"\u042E": "\\cyrchar\\CYRYU ", u"\u042F": "\\cyrchar\\CYRYA ", u"\u0430": "\\cyrchar\\cyra ", u"\u0431": "\\cyrchar\\cyrb ", u"\u0432": "\\cyrchar\\cyrv ", u"\u0433": "\\cyrchar\\cyrg ", u"\u0434": "\\cyrchar\\cyrd ", u"\u0435": "\\cyrchar\\cyre ", u"\u0436": "\\cyrchar\\cyrzh ", u"\u0437": "\\cyrchar\\cyrz ", u"\u0438": "\\cyrchar\\cyri ", u"\u0439": "\\cyrchar\\cyrishrt ", u"\u043A": "\\cyrchar\\cyrk ", u"\u043B": "\\cyrchar\\cyrl ", u"\u043C": "\\cyrchar\\cyrm ", u"\u043D": "\\cyrchar\\cyrn ", u"\u043E": "\\cyrchar\\cyro ", u"\u043F": "\\cyrchar\\cyrp ", u"\u0440": "\\cyrchar\\cyrr ", u"\u0441": "\\cyrchar\\cyrs ", u"\u0442": "\\cyrchar\\cyrt ", u"\u0443": "\\cyrchar\\cyru ", u"\u0444": "\\cyrchar\\cyrf ", u"\u0445": "\\cyrchar\\cyrh ", u"\u0446": "\\cyrchar\\cyrc ", u"\u0447": "\\cyrchar\\cyrch ", u"\u0448": "\\cyrchar\\cyrsh ", u"\u0449": "\\cyrchar\\cyrshch ", u"\u044A": "\\cyrchar\\cyrhrdsn ", u"\u044B": "\\cyrchar\\cyrery ", u"\u044C": "\\cyrchar\\cyrsftsn ", u"\u044D": "\\cyrchar\\cyrerev ", u"\u044E": "\\cyrchar\\cyryu ", u"\u044F": "\\cyrchar\\cyrya ", u"\u0451": "\\cyrchar\\cyryo ", u"\u0452": "\\cyrchar\\cyrdje ", u"\u0453": "\\cyrchar{\\'\\cyrg}", u"\u0454": "\\cyrchar\\cyrie ", u"\u0455": "\\cyrchar\\cyrdze ", u"\u0456": "\\cyrchar\\cyrii ", u"\u0457": "\\cyrchar\\cyryi ", u"\u0458": "\\cyrchar\\cyrje ", u"\u0459": "\\cyrchar\\cyrlje ", u"\u045A": "\\cyrchar\\cyrnje ", u"\u045B": "\\cyrchar\\cyrtshe ", u"\u045C": "\\cyrchar{\\'\\cyrk}", u"\u045E": "\\cyrchar\\cyrushrt ", u"\u045F": "\\cyrchar\\cyrdzhe ", u"\u0460": "\\cyrchar\\CYROMEGA ", u"\u0461": "\\cyrchar\\cyromega ", u"\u0462": "\\cyrchar\\CYRYAT ", u"\u0464": "\\cyrchar\\CYRIOTE ", u"\u0465": "\\cyrchar\\cyriote ", u"\u0466": "\\cyrchar\\CYRLYUS ", u"\u0467": "\\cyrchar\\cyrlyus ", u"\u0468": "\\cyrchar\\CYRIOTLYUS ", u"\u0469": "\\cyrchar\\cyriotlyus ", u"\u046A": "\\cyrchar\\CYRBYUS ", u"\u046C": "\\cyrchar\\CYRIOTBYUS ", u"\u046D": "\\cyrchar\\cyriotbyus ", u"\u046E": "\\cyrchar\\CYRKSI ", u"\u046F": "\\cyrchar\\cyrksi ", u"\u0470": "\\cyrchar\\CYRPSI ", u"\u0471": "\\cyrchar\\cyrpsi ", u"\u0472": "\\cyrchar\\CYRFITA ", u"\u0474": "\\cyrchar\\CYRIZH ", u"\u0478": "\\cyrchar\\CYRUK ", u"\u0479": "\\cyrchar\\cyruk ", u"\u047A": "\\cyrchar\\CYROMEGARND ", u"\u047B": "\\cyrchar\\cyromegarnd ", u"\u047C": "\\cyrchar\\CYROMEGATITLO ", u"\u047D": "\\cyrchar\\cyromegatitlo ", u"\u047E": "\\cyrchar\\CYROT ", u"\u047F": "\\cyrchar\\cyrot ", u"\u0480": "\\cyrchar\\CYRKOPPA ", u"\u0481": "\\cyrchar\\cyrkoppa ", u"\u0482": "\\cyrchar\\cyrthousands ", u"\u0488": "\\cyrchar\\cyrhundredthousands ", u"\u0489": "\\cyrchar\\cyrmillions ", u"\u048C": "\\cyrchar\\CYRSEMISFTSN ", u"\u048D": "\\cyrchar\\cyrsemisftsn ", u"\u048E": "\\cyrchar\\CYRRTICK ", u"\u048F": "\\cyrchar\\cyrrtick ", u"\u0490": "\\cyrchar\\CYRGUP ", u"\u0491": "\\cyrchar\\cyrgup ", u"\u0492": "\\cyrchar\\CYRGHCRS ", u"\u0493": "\\cyrchar\\cyrghcrs ", u"\u0494": "\\cyrchar\\CYRGHK ", u"\u0495": "\\cyrchar\\cyrghk ", u"\u0496": "\\cyrchar\\CYRZHDSC ", u"\u0497": "\\cyrchar\\cyrzhdsc ", u"\u0498": "\\cyrchar\\CYRZDSC ", u"\u0499": "\\cyrchar\\cyrzdsc ", u"\u049A": "\\cyrchar\\CYRKDSC ", u"\u049B": "\\cyrchar\\cyrkdsc ", u"\u049C": "\\cyrchar\\CYRKVCRS ", u"\u049D": "\\cyrchar\\cyrkvcrs ", u"\u049E": "\\cyrchar\\CYRKHCRS ", u"\u049F": "\\cyrchar\\cyrkhcrs ", u"\u04A0": "\\cyrchar\\CYRKBEAK ", u"\u04A1": "\\cyrchar\\cyrkbeak ", u"\u04A2": "\\cyrchar\\CYRNDSC ", u"\u04A3": "\\cyrchar\\cyrndsc ", u"\u04A4": "\\cyrchar\\CYRNG ", u"\u04A5": "\\cyrchar\\cyrng ", u"\u04A6": "\\cyrchar\\CYRPHK ", u"\u04A7": "\\cyrchar\\cyrphk ", u"\u04A8": "\\cyrchar\\CYRABHHA ", u"\u04A9": "\\cyrchar\\cyrabhha ", u"\u04AA": "\\cyrchar\\CYRSDSC ", u"\u04AB": "\\cyrchar\\cyrsdsc ", u"\u04AC": "\\cyrchar\\CYRTDSC ", u"\u04AD": "\\cyrchar\\cyrtdsc ", u"\u04AE": "\\cyrchar\\CYRY ", u"\u04AF": "\\cyrchar\\cyry ", u"\u04B0": "\\cyrchar\\CYRYHCRS ", u"\u04B1": "\\cyrchar\\cyryhcrs ", u"\u04B2": "\\cyrchar\\CYRHDSC ", u"\u04B3": "\\cyrchar\\cyrhdsc ", u"\u04B4": "\\cyrchar\\CYRTETSE ", u"\u04B5": "\\cyrchar\\cyrtetse ", u"\u04B6": "\\cyrchar\\CYRCHRDSC ", u"\u04B7": "\\cyrchar\\cyrchrdsc ", u"\u04B8": "\\cyrchar\\CYRCHVCRS ", u"\u04B9": "\\cyrchar\\cyrchvcrs ", u"\u04BA": "\\cyrchar\\CYRSHHA ", u"\u04BB": "\\cyrchar\\cyrshha ", u"\u04BC": "\\cyrchar\\CYRABHCH ", u"\u04BD": "\\cyrchar\\cyrabhch ", u"\u04BE": "\\cyrchar\\CYRABHCHDSC ", u"\u04BF": "\\cyrchar\\cyrabhchdsc ", u"\u04C0": "\\cyrchar\\CYRpalochka ", u"\u04C3": "\\cyrchar\\CYRKHK ", u"\u04C4": "\\cyrchar\\cyrkhk ", u"\u04C7": "\\cyrchar\\CYRNHK ", u"\u04C8": "\\cyrchar\\cyrnhk ", u"\u04CB": "\\cyrchar\\CYRCHLDSC ", u"\u04CC": "\\cyrchar\\cyrchldsc ", u"\u04D4": "\\cyrchar\\CYRAE ", u"\u04D5": "\\cyrchar\\cyrae ", u"\u04D8": "\\cyrchar\\CYRSCHWA ", u"\u04D9": "\\cyrchar\\cyrschwa ", u"\u04E0": "\\cyrchar\\CYRABHDZE ", u"\u04E1": "\\cyrchar\\cyrabhdze ", u"\u04E8": "\\cyrchar\\CYROTLD ", u"\u04E9": "\\cyrchar\\cyrotld ", u"\u2002": "\\hspace{0.6em}", u"\u2003": "\\hspace{1em}", u"\u2004": "\\hspace{0.33em}", u"\u2005": "\\hspace{0.25em}", u"\u2006": "\\hspace{0.166em}", u"\u2007": "\\hphantom{0}", u"\u2008": "\\hphantom{,}", u"\u2009": "\\hspace{0.167em}", u"\u2009-0200A-0200A": "\\;", u"\u200A": "\\mkern1mu ", u"\u2013": "\\textendash ", u"\u2014": "\\textemdash ", u"\u2015": "\\rule{1em}{1pt}", u"\u2016": "\\Vert ", u"\u201B": "\\Elzreapos ", u"\u201C": "\\textquotedblleft ", u"\u201D": "\\textquotedblright ", u"\u201E": ",,", u"\u2020": "\\textdagger ", u"\u2021": "\\textdaggerdbl ", u"\u2022": "\\textbullet ", u"\u2025": "..", u"\u2026": "\\ldots ", u"\u2030": "\\textperthousand ", u"\u2031": "\\textpertenthousand ", u"\u2032": "{'}", u"\u2033": "{''}", u"\u2034": "{'''}", u"\u2035": "\\backprime ", u"\u2039": "\\guilsinglleft ", u"\u203A": "\\guilsinglright ", u"\u2057": "''''", u"\u205F": "\\mkern4mu ", u"\u2060": "\\nolinebreak ", u"\u20A7": "\\ensuremath{\\Elzpes}", u"\u20AC": "\\mbox{\\texteuro} ", u"\u20DB": "\\dddot ", u"\u20DC": "\\ddddot ", u"\u2102": "\\mathbb{C}", u"\u210A": "\\mathscr{g}", u"\u210B": "\\mathscr{H}", u"\u210C": "\\mathfrak{H}", u"\u210D": "\\mathbb{H}", u"\u210F": "\\hslash ", u"\u2110": "\\mathscr{I}", u"\u2111": "\\mathfrak{I}", u"\u2112": "\\mathscr{L}", u"\u2113": "\\mathscr{l}", u"\u2115": "\\mathbb{N}", u"\u2116": "\\cyrchar\\textnumero ", u"\u2118": "\\wp ", u"\u2119": "\\mathbb{P}", u"\u211A": "\\mathbb{Q}", u"\u211B": "\\mathscr{R}", u"\u211C": "\\mathfrak{R}", u"\u211D": "\\mathbb{R}", u"\u211E": "\\Elzxrat ", u"\u2122": "\\texttrademark ", u"\u2124": "\\mathbb{Z}", u"\u2126": "\\Omega ", u"\u2127": "\\mho ", u"\u2128": "\\mathfrak{Z}", u"\u2129": "\\ElsevierGlyph{2129}", u"\u212B": "\\AA ", u"\u212C": "\\mathscr{B}", u"\u212D": "\\mathfrak{C}", u"\u212F": "\\mathscr{e}", u"\u2130": "\\mathscr{E}", u"\u2131": "\\mathscr{F}", u"\u2133": "\\mathscr{M}", u"\u2134": "\\mathscr{o}", u"\u2135": "\\aleph ", u"\u2136": "\\beth ", u"\u2137": "\\gimel ", u"\u2138": "\\daleth ", u"\u2153": "\\textfrac{1}{3}", u"\u2154": "\\textfrac{2}{3}", u"\u2155": "\\textfrac{1}{5}", u"\u2156": "\\textfrac{2}{5}", u"\u2157": "\\textfrac{3}{5}", u"\u2158": "\\textfrac{4}{5}", u"\u2159": "\\textfrac{1}{6}", u"\u215A": "\\textfrac{5}{6}", u"\u215B": "\\textfrac{1}{8}", u"\u215C": "\\textfrac{3}{8}", u"\u215D": "\\textfrac{5}{8}", u"\u215E": "\\textfrac{7}{8}", u"\u2190": "\\leftarrow ", u"\u2191": "\\uparrow ", u"\u2192": "\\rightarrow ", u"\u2193": "\\downarrow ", u"\u2194": "\\leftrightarrow ", u"\u2195": "\\updownarrow ", u"\u2196": "\\nwarrow ", u"\u2197": "\\nearrow ", u"\u2198": "\\searrow ", u"\u2199": "\\swarrow ", u"\u219A": "\\nleftarrow ", u"\u219B": "\\nrightarrow ", u"\u219C": "\\arrowwaveright ", u"\u219D": "\\arrowwaveright ", u"\u219E": "\\twoheadleftarrow ", u"\u21A0": "\\twoheadrightarrow ", u"\u21A2": "\\leftarrowtail ", u"\u21A3": "\\rightarrowtail ", u"\u21A6": "\\mapsto ", u"\u21A9": "\\hookleftarrow ", u"\u21AA": "\\hookrightarrow ", u"\u21AB": "\\looparrowleft ", u"\u21AC": "\\looparrowright ", u"\u21AD": "\\leftrightsquigarrow ", u"\u21AE": "\\nleftrightarrow ", u"\u21B0": "\\Lsh ", u"\u21B1": "\\Rsh ", u"\u21B3": "\\ElsevierGlyph{21B3}", u"\u21B6": "\\curvearrowleft ", u"\u21B7": "\\curvearrowright ", u"\u21BA": "\\circlearrowleft ", u"\u21BB": "\\circlearrowright ", u"\u21BC": "\\leftharpoonup ", u"\u21BD": "\\leftharpoondown ", u"\u21BE": "\\upharpoonright ", u"\u21BF": "\\upharpoonleft ", u"\u21C0": "\\rightharpoonup ", u"\u21C1": "\\rightharpoondown ", u"\u21C2": "\\downharpoonright ", u"\u21C3": "\\downharpoonleft ", u"\u21C4": "\\rightleftarrows ", u"\u21C5": "\\dblarrowupdown ", u"\u21C6": "\\leftrightarrows ", u"\u21C7": "\\leftleftarrows ", u"\u21C8": "\\upuparrows ", u"\u21C9": "\\rightrightarrows ", u"\u21CA": "\\downdownarrows ", u"\u21CB": "\\leftrightharpoons ", u"\u21CC": "\\rightleftharpoons ", u"\u21CD": "\\nLeftarrow ", u"\u21CE": "\\nLeftrightarrow ", u"\u21CF": "\\nRightarrow ", u"\u21D0": "\\Leftarrow ", u"\u21D1": "\\Uparrow ", u"\u21D2": "\\Rightarrow ", u"\u21D3": "\\Downarrow ", u"\u21D4": "\\Leftrightarrow ", u"\u21D5": "\\Updownarrow ", u"\u21DA": "\\Lleftarrow ", u"\u21DB": "\\Rrightarrow ", u"\u21DD": "\\rightsquigarrow ", u"\u21F5": "\\DownArrowUpArrow ", u"\u2200": "\\forall ", u"\u2201": "\\complement ", u"\u2202": "\\partial ", u"\u2203": "\\exists ", u"\u2204": "\\nexists ", u"\u2205": "\\varnothing ", u"\u2207": "\\nabla ", u"\u2208": "\\in ", u"\u2209": "\\not\\in ", u"\u220B": "\\ni ", u"\u220C": "\\not\\ni ", u"\u220F": "\\prod ", u"\u2210": "\\coprod ", u"\u2211": "\\sum ", u"\u2213": "\\mp ", u"\u2214": "\\dotplus ", u"\u2216": "\\setminus ", u"\u2217": "{_\\ast}", u"\u2218": "\\circ ", u"\u2219": "\\bullet ", u"\u221A": "\\surd ", u"\u221D": "\\propto ", u"\u221E": "\\infty ", u"\u221F": "\\rightangle ", u"\u2220": "\\angle ", u"\u2221": "\\measuredangle ", u"\u2222": "\\sphericalangle ", u"\u2223": "\\mid ", u"\u2224": "\\nmid ", u"\u2225": "\\parallel ", u"\u2226": "\\nparallel ", u"\u2227": "\\wedge ", u"\u2228": "\\vee ", u"\u2229": "\\cap ", u"\u222A": "\\cup ", u"\u222B": "\\int ", u"\u222C": "\\int\\!\\int ", u"\u222D": "\\int\\!\\int\\!\\int ", u"\u222E": "\\oint ", u"\u222F": "\\surfintegral ", u"\u2230": "\\volintegral ", u"\u2231": "\\clwintegral ", u"\u2232": "\\ElsevierGlyph{2232}", u"\u2233": "\\ElsevierGlyph{2233}", u"\u2234": "\\therefore ", u"\u2235": "\\because ", u"\u2237": "\\Colon ", u"\u2238": "\\ElsevierGlyph{2238}", u"\u223A": "\\mathbin{{:}\\!\\!{-}\\!\\!{:}}", u"\u223B": "\\homothetic ", u"\u223C": "\\sim ", u"\u223D": "\\backsim ", u"\u223E": "\\lazysinv ", u"\u2240": "\\wr ", u"\u2241": "\\not\\sim ", u"\u2242": "\\ElsevierGlyph{2242}", u"\u2242-00338": "\\NotEqualTilde ", u"\u2243": "\\simeq ", u"\u2244": "\\not\\simeq ", u"\u2245": "\\cong ", u"\u2246": "\\approxnotequal ", u"\u2247": "\\not\\cong ", u"\u2248": "\\approx ", u"\u2249": "\\not\\approx ", u"\u224A": "\\approxeq ", u"\u224B": "\\tildetrpl ", u"\u224B-00338": "\\not\\apid ", u"\u224C": "\\allequal ", u"\u224D": "\\asymp ", u"\u224E": "\\Bumpeq ", u"\u224E-00338": "\\NotHumpDownHump ", u"\u224F": "\\bumpeq ", u"\u224F-00338": "\\NotHumpEqual ", u"\u2250": "\\doteq ", u"\u2250-00338": "\\not\\doteq", u"\u2251": "\\doteqdot ", u"\u2252": "\\fallingdotseq ", u"\u2253": "\\risingdotseq ", u"\u2254": ":=", u"\u2255": "=:", u"\u2256": "\\eqcirc ", u"\u2257": "\\circeq ", u"\u2259": "\\estimates ", u"\u225A": "\\ElsevierGlyph{225A}", u"\u225B": "\\starequal ", u"\u225C": "\\triangleq ", u"\u225F": "\\ElsevierGlyph{225F}", u"\u2260": "\\not =", u"\u2261": "\\equiv ", u"\u2262": "\\not\\equiv ", u"\u2264": "\\leq ", u"\u2265": "\\geq ", u"\u2266": "\\leqq ", u"\u2267": "\\geqq ", u"\u2268": "\\lneqq ", u"\u2268-0FE00": "\\lvertneqq ", u"\u2269": "\\gneqq ", u"\u2269-0FE00": "\\gvertneqq ", u"\u226A": "\\ll ", u"\u226A-00338": "\\NotLessLess ", u"\u226B": "\\gg ", u"\u226B-00338": "\\NotGreaterGreater ", u"\u226C": "\\between ", u"\u226D": "\\not\\kern-0.3em\\times ", u"\u226E": "\\not&lt;", u"\u226F": "\\not&gt;", u"\u2270": "\\not\\leq ", u"\u2271": "\\not\\geq ", u"\u2272": "\\lessequivlnt ", u"\u2273": "\\greaterequivlnt ", u"\u2274": "\\ElsevierGlyph{2274}", u"\u2275": "\\ElsevierGlyph{2275}", u"\u2276": "\\lessgtr ", u"\u2277": "\\gtrless ", u"\u2278": "\\notlessgreater ", u"\u2279": "\\notgreaterless ", u"\u227A": "\\prec ", u"\u227B": "\\succ ", u"\u227C": "\\preccurlyeq ", u"\u227D": "\\succcurlyeq ", u"\u227E": "\\precapprox ", u"\u227E-00338": "\\NotPrecedesTilde ", u"\u227F": "\\succapprox ", u"\u227F-00338": "\\NotSucceedsTilde ", u"\u2280": "\\not\\prec ", u"\u2281": "\\not\\succ ", u"\u2282": "\\subset ", u"\u2283": "\\supset ", u"\u2284": "\\not\\subset ", u"\u2285": "\\not\\supset ", u"\u2286": "\\subseteq ", u"\u2287": "\\supseteq ", u"\u2288": "\\not\\subseteq ", u"\u2289": "\\not\\supseteq ", u"\u228A": "\\subsetneq ", u"\u228A-0FE00": "\\varsubsetneqq ", u"\u228B": "\\supsetneq ", u"\u228B-0FE00": "\\varsupsetneq ", u"\u228E": "\\uplus ", u"\u228F": "\\sqsubset ", u"\u228F-00338": "\\NotSquareSubset ", u"\u2290": "\\sqsupset ", u"\u2290-00338": "\\NotSquareSuperset ", u"\u2291": "\\sqsubseteq ", u"\u2292": "\\sqsupseteq ", u"\u2293": "\\sqcap ", u"\u2294": "\\sqcup ", u"\u2295": "\\oplus ", u"\u2296": "\\ominus ", u"\u2297": "\\otimes ", u"\u2298": "\\oslash ", u"\u2299": "\\odot ", u"\u229A": "\\circledcirc ", u"\u229B": "\\circledast ", u"\u229D": "\\circleddash ", u"\u229E": "\\boxplus ", u"\u229F": "\\boxminus ", u"\u22A0": "\\boxtimes ", u"\u22A1": "\\boxdot ", u"\u22A2": "\\vdash ", u"\u22A3": "\\dashv ", u"\u22A4": "\\top ", u"\u22A5": "\\perp ", u"\u22A7": "\\truestate ", u"\u22A8": "\\forcesextra ", u"\u22A9": "\\Vdash ", u"\u22AA": "\\Vvdash ", u"\u22AB": "\\VDash ", u"\u22AC": "\\nvdash ", u"\u22AD": "\\nvDash ", u"\u22AE": "\\nVdash ", u"\u22AF": "\\nVDash ", u"\u22B2": "\\vartriangleleft ", u"\u22B3": "\\vartriangleright ", u"\u22B4": "\\trianglelefteq ", u"\u22B5": "\\trianglerighteq ", u"\u22B6": "\\original ", u"\u22B7": "\\image ", u"\u22B8": "\\multimap ", u"\u22B9": "\\hermitconjmatrix ", u"\u22BA": "\\intercal ", u"\u22BB": "\\veebar ", u"\u22BE": "\\rightanglearc ", u"\u22C0": "\\ElsevierGlyph{22C0}", u"\u22C1": "\\ElsevierGlyph{22C1}", u"\u22C2": "\\bigcap ", u"\u22C3": "\\bigcup ", u"\u22C4": "\\diamond ", u"\u22C5": "\\cdot ", u"\u22C6": "\\star ", u"\u22C7": "\\divideontimes ", u"\u22C8": "\\bowtie ", u"\u22C9": "\\ltimes ", u"\u22CA": "\\rtimes ", u"\u22CB": "\\leftthreetimes ", u"\u22CC": "\\rightthreetimes ", u"\u22CD": "\\backsimeq ", u"\u22CE": "\\curlyvee ", u"\u22CF": "\\curlywedge ", u"\u22D0": "\\Subset ", u"\u22D1": "\\Supset ", u"\u22D2": "\\Cap ", u"\u22D3": "\\Cup ", u"\u22D4": "\\pitchfork ", u"\u22D6": "\\lessdot ", u"\u22D7": "\\gtrdot ", u"\u22D8": "\\verymuchless ", u"\u22D9": "\\verymuchgreater ", u"\u22DA": "\\lesseqgtr ", u"\u22DB": "\\gtreqless ", u"\u22DE": "\\curlyeqprec ", u"\u22DF": "\\curlyeqsucc ", u"\u22E2": "\\not\\sqsubseteq ", u"\u22E3": "\\not\\sqsupseteq ", u"\u22E5": "\\Elzsqspne ", u"\u22E6": "\\lnsim ", u"\u22E7": "\\gnsim ", u"\u22E8": "\\precedesnotsimilar ", u"\u22E9": "\\succnsim ", u"\u22EA": "\\ntriangleleft ", u"\u22EB": "\\ntriangleright ", u"\u22EC": "\\ntrianglelefteq ", u"\u22ED": "\\ntrianglerighteq ", u"\u22EE": "\\vdots ", u"\u22EF": "\\cdots ", u"\u22F0": "\\upslopeellipsis ", u"\u22F1": "\\downslopeellipsis ", u"\u2305": "\\barwedge ", u"\u2306": "\\perspcorrespond ", u"\u2308": "\\lceil ", u"\u2309": "\\rceil ", u"\u230A": "\\lfloor ", u"\u230B": "\\rfloor ", u"\u2315": "\\recorder ", u"\u2316": "\\mathchar\"2208", u"\u231C": "\\ulcorner ", u"\u231D": "\\urcorner ", u"\u231E": "\\llcorner ", u"\u231F": "\\lrcorner ", u"\u2322": "\\frown ", u"\u2323": "\\smile ", u"\u2329": "\\langle ", u"\u232A": "\\rangle ", u"\u233D": "\\ElsevierGlyph{E838}", u"\u23A3": "\\Elzdlcorn ", u"\u23B0": "\\lmoustache ", u"\u23B1": "\\rmoustache ", u"\u2423": "\\textvisiblespace ", u"\u2460": "\\ding{172}", u"\u2461": "\\ding{173}", u"\u2462": "\\ding{174}", u"\u2463": "\\ding{175}", u"\u2464": "\\ding{176}", u"\u2465": "\\ding{177}", u"\u2466": "\\ding{178}", u"\u2467": "\\ding{179}", u"\u2468": "\\ding{180}", u"\u2469": "\\ding{181}", u"\u24C8": "\\circledS ", u"\u2506": "\\Elzdshfnc ", u"\u2519": "\\Elzsqfnw ", u"\u2571": "\\diagup ", u"\u25A0": "\\ding{110}", u"\u25A1": "\\square ", u"\u25AA": "\\blacksquare ", u"\u25AD": "\\fbox{~~}", u"\u25AF": "\\Elzvrecto ", u"\u25B1": "\\ElsevierGlyph{E381}", u"\u25B2": "\\ding{115}", u"\u25B3": "\\bigtriangleup ", u"\u25B4": "\\blacktriangle ", u"\u25B5": "\\vartriangle ", u"\u25B8": "\\blacktriangleright ", u"\u25B9": "\\triangleright ", u"\u25BC": "\\ding{116}", u"\u25BD": "\\bigtriangledown ", u"\u25BE": "\\blacktriangledown ", u"\u25BF": "\\triangledown ", u"\u25C2": "\\blacktriangleleft ", u"\u25C3": "\\triangleleft ", u"\u25C6": "\\ding{117}", u"\u25CA": "\\lozenge ", u"\u25CB": "\\bigcirc ", u"\u25CF": "\\ding{108}", u"\u25D0": "\\Elzcirfl ", u"\u25D1": "\\Elzcirfr ", u"\u25D2": "\\Elzcirfb ", u"\u25D7": "\\ding{119}", u"\u25D8": "\\Elzrvbull ", u"\u25E7": "\\Elzsqfl ", u"\u25E8": "\\Elzsqfr ", u"\u25EA": "\\Elzsqfse ", u"\u25EF": "\\bigcirc ", u"\u2605": "\\ding{72}", u"\u2606": "\\ding{73}", u"\u260E": "\\ding{37}", u"\u261B": "\\ding{42}", u"\u261E": "\\ding{43}", u"\u263E": "\\rightmoon ", u"\u263F": "\\mercury ", u"\u2640": "\\venus ", u"\u2642": "\\male ", u"\u2643": "\\jupiter ", u"\u2644": "\\saturn ", u"\u2645": "\\uranus ", u"\u2646": "\\neptune ", u"\u2647": "\\pluto ", u"\u2648": "\\aries ", u"\u2649": "\\taurus ", u"\u264A": "\\gemini ", u"\u264B": "\\cancer ", u"\u264C": "\\leo ", u"\u264D": "\\virgo ", u"\u264E": "\\libra ", u"\u264F": "\\scorpio ", u"\u2650": "\\sagittarius ", u"\u2651": "\\capricornus ", u"\u2652": "\\aquarius ", u"\u2653": "\\pisces ", u"\u2660": "\\ding{171}", u"\u2662": "\\diamond ", u"\u2663": "\\ding{168}", u"\u2665": "\\ding{170}", u"\u2666": "\\ding{169}", u"\u2669": "\\quarternote ", u"\u266A": "\\eighthnote ", u"\u266D": "\\flat ", u"\u266E": "\\natural ", u"\u266F": "\\sharp ", u"\u2701": "\\ding{33}", u"\u2702": "\\ding{34}", u"\u2703": "\\ding{35}", u"\u2704": "\\ding{36}", u"\u2706": "\\ding{38}", u"\u2707": "\\ding{39}", u"\u2708": "\\ding{40}", u"\u2709": "\\ding{41}", u"\u270C": "\\ding{44}", u"\u270D": "\\ding{45}", u"\u270E": "\\ding{46}", u"\u270F": "\\ding{47}", u"\u2710": "\\ding{48}", u"\u2711": "\\ding{49}", u"\u2712": "\\ding{50}", u"\u2713": "\\ding{51}", u"\u2714": "\\ding{52}", u"\u2715": "\\ding{53}", u"\u2716": "\\ding{54}", u"\u2717": "\\ding{55}", u"\u2718": "\\ding{56}", u"\u2719": "\\ding{57}", u"\u271A": "\\ding{58}", u"\u271B": "\\ding{59}", u"\u271C": "\\ding{60}", u"\u271D": "\\ding{61}", u"\u271E": "\\ding{62}", u"\u271F": "\\ding{63}", u"\u2720": "\\ding{64}", u"\u2721": "\\ding{65}", u"\u2722": "\\ding{66}", u"\u2723": "\\ding{67}", u"\u2724": "\\ding{68}", u"\u2725": "\\ding{69}", u"\u2726": "\\ding{70}", u"\u2727": "\\ding{71}", u"\u2729": "\\ding{73}", u"\u272A": "\\ding{74}", u"\u272B": "\\ding{75}", u"\u272C": "\\ding{76}", u"\u272D": "\\ding{77}", u"\u272E": "\\ding{78}", u"\u272F": "\\ding{79}", u"\u2730": "\\ding{80}", u"\u2731": "\\ding{81}", u"\u2732": "\\ding{82}", u"\u2733": "\\ding{83}", u"\u2734": "\\ding{84}", u"\u2735": "\\ding{85}", u"\u2736": "\\ding{86}", u"\u2737": "\\ding{87}", u"\u2738": "\\ding{88}", u"\u2739": "\\ding{89}", u"\u273A": "\\ding{90}", u"\u273B": "\\ding{91}", u"\u273C": "\\ding{92}", u"\u273D": "\\ding{93}", u"\u273E": "\\ding{94}", u"\u273F": "\\ding{95}", u"\u2740": "\\ding{96}", u"\u2741": "\\ding{97}", u"\u2742": "\\ding{98}", u"\u2743": "\\ding{99}", u"\u2744": "\\ding{100}", u"\u2745": "\\ding{101}", u"\u2746": "\\ding{102}", u"\u2747": "\\ding{103}", u"\u2748": "\\ding{104}", u"\u2749": "\\ding{105}", u"\u274A": "\\ding{106}", u"\u274B": "\\ding{107}", u"\u274D": "\\ding{109}", u"\u274F": "\\ding{111}", u"\u2750": "\\ding{112}", u"\u2751": "\\ding{113}", u"\u2752": "\\ding{114}", u"\u2756": "\\ding{118}", u"\u2758": "\\ding{120}", u"\u2759": "\\ding{121}", u"\u275A": "\\ding{122}", u"\u275B": "\\ding{123}", u"\u275C": "\\ding{124}", u"\u275D": "\\ding{125}", u"\u275E": "\\ding{126}", u"\u2761": "\\ding{161}", u"\u2762": "\\ding{162}", u"\u2763": "\\ding{163}", u"\u2764": "\\ding{164}", u"\u2765": "\\ding{165}", u"\u2766": "\\ding{166}", u"\u2767": "\\ding{167}", u"\u2776": "\\ding{182}", u"\u2777": "\\ding{183}", u"\u2778": "\\ding{184}", u"\u2779": "\\ding{185}", u"\u277A": "\\ding{186}", u"\u277B": "\\ding{187}", u"\u277C": "\\ding{188}", u"\u277D": "\\ding{189}", u"\u277E": "\\ding{190}", u"\u277F": "\\ding{191}", u"\u2780": "\\ding{192}", u"\u2781": "\\ding{193}", u"\u2782": "\\ding{194}", u"\u2783": "\\ding{195}", u"\u2784": "\\ding{196}", u"\u2785": "\\ding{197}", u"\u2786": "\\ding{198}", u"\u2787": "\\ding{199}", u"\u2788": "\\ding{200}", u"\u2789": "\\ding{201}", u"\u278A": "\\ding{202}", u"\u278B": "\\ding{203}", u"\u278C": "\\ding{204}", u"\u278D": "\\ding{205}", u"\u278E": "\\ding{206}", u"\u278F": "\\ding{207}", u"\u2790": "\\ding{208}", u"\u2791": "\\ding{209}", u"\u2792": "\\ding{210}", u"\u2793": "\\ding{211}", u"\u2794": "\\ding{212}", u"\u2798": "\\ding{216}", u"\u2799": "\\ding{217}", u"\u279A": "\\ding{218}", u"\u279B": "\\ding{219}", u"\u279C": "\\ding{220}", u"\u279D": "\\ding{221}", u"\u279E": "\\ding{222}", u"\u279F": "\\ding{223}", u"\u27A0": "\\ding{224}", u"\u27A1": "\\ding{225}", u"\u27A2": "\\ding{226}", u"\u27A3": "\\ding{227}", u"\u27A4": "\\ding{228}", u"\u27A5": "\\ding{229}", u"\u27A6": "\\ding{230}", u"\u27A7": "\\ding{231}", u"\u27A8": "\\ding{232}", u"\u27A9": "\\ding{233}", u"\u27AA": "\\ding{234}", u"\u27AB": "\\ding{235}", u"\u27AC": "\\ding{236}", u"\u27AD": "\\ding{237}", u"\u27AE": "\\ding{238}", u"\u27AF": "\\ding{239}", u"\u27B1": "\\ding{241}", u"\u27B2": "\\ding{242}", u"\u27B3": "\\ding{243}", u"\u27B4": "\\ding{244}", u"\u27B5": "\\ding{245}", u"\u27B6": "\\ding{246}", u"\u27B7": "\\ding{247}", u"\u27B8": "\\ding{248}", u"\u27B9": "\\ding{249}", u"\u27BA": "\\ding{250}", u"\u27BB": "\\ding{251}", u"\u27BC": "\\ding{252}", u"\u27BD": "\\ding{253}", u"\u27BE": "\\ding{254}", u"\u27F5": "\\longleftarrow ", u"\u27F6": "\\longrightarrow ", u"\u27F7": "\\longleftrightarrow ", u"\u27F8": "\\Longleftarrow ", u"\u27F9": "\\Longrightarrow ", u"\u27FA": "\\Longleftrightarrow ", u"\u27FC": "\\longmapsto ", u"\u27FF": "\\sim\\joinrel\\leadsto", u"\u2905": "\\ElsevierGlyph{E212}", u"\u2912": "\\UpArrowBar ", u"\u2913": "\\DownArrowBar ", u"\u2923": "\\ElsevierGlyph{E20C}", u"\u2924": "\\ElsevierGlyph{E20D}", u"\u2925": "\\ElsevierGlyph{E20B}", u"\u2926": "\\ElsevierGlyph{E20A}", u"\u2927": "\\ElsevierGlyph{E211}", u"\u2928": "\\ElsevierGlyph{E20E}", u"\u2929": "\\ElsevierGlyph{E20F}", u"\u292A": "\\ElsevierGlyph{E210}", u"\u2933": "\\ElsevierGlyph{E21C}", u"\u2933-00338": "\\ElsevierGlyph{E21D}", u"\u2936": "\\ElsevierGlyph{E21A}", u"\u2937": "\\ElsevierGlyph{E219}", u"\u2940": "\\Elolarr ", u"\u2941": "\\Elorarr ", u"\u2942": "\\ElzRlarr ", u"\u2944": "\\ElzrLarr ", u"\u2947": "\\Elzrarrx ", u"\u294E": "\\LeftRightVector ", u"\u294F": "\\RightUpDownVector ", u"\u2950": "\\DownLeftRightVector ", u"\u2951": "\\LeftUpDownVector ", u"\u2952": "\\LeftVectorBar ", u"\u2953": "\\RightVectorBar ", u"\u2954": "\\RightUpVectorBar ", u"\u2955": "\\RightDownVectorBar ", u"\u2956": "\\DownLeftVectorBar ", u"\u2957": "\\DownRightVectorBar ", u"\u2958": "\\LeftUpVectorBar ", u"\u2959": "\\LeftDownVectorBar ", u"\u295A": "\\LeftTeeVector ", u"\u295B": "\\RightTeeVector ", u"\u295C": "\\RightUpTeeVector ", u"\u295D": "\\RightDownTeeVector ", u"\u295E": "\\DownLeftTeeVector ", u"\u295F": "\\DownRightTeeVector ", u"\u2960": "\\LeftUpTeeVector ", u"\u2961": "\\LeftDownTeeVector ", u"\u296E": "\\UpEquilibrium ", u"\u296F": "\\ReverseUpEquilibrium ", u"\u2970": "\\RoundImplies ", u"\u297C": "\\ElsevierGlyph{E214}", u"\u297D": "\\ElsevierGlyph{E215}", u"\u2980": "\\Elztfnc ", u"\u2985": "\\ElsevierGlyph{3018}", u"\u2986": "\\Elroang ", u"\u2993": "&lt;\\kern-0.58em(", u"\u2994": "\\ElsevierGlyph{E291}", u"\u2999": "\\Elzddfnc ", u"\u299C": "\\Angle ", u"\u29A0": "\\Elzlpargt ", u"\u29B5": "\\ElsevierGlyph{E260}", u"\u29B6": "\\ElsevierGlyph{E61B}", u"\u29CA": "\\ElzLap ", u"\u29CB": "\\Elzdefas ", u"\u29CF": "\\LeftTriangleBar ", u"\u29CF-00338": "\\NotLeftTriangleBar ", u"\u29D0": "\\RightTriangleBar ", u"\u29D0-00338": "\\NotRightTriangleBar ", u"\u29DC": "\\ElsevierGlyph{E372}", u"\u29EB": "\\blacklozenge ", u"\u29F4": "\\RuleDelayed ", u"\u2A04": "\\Elxuplus ", u"\u2A05": "\\ElzThr ", u"\u2A06": "\\Elxsqcup ", u"\u2A07": "\\ElzInf ", u"\u2A08": "\\ElzSup ", u"\u2A0D": "\\ElzCint ", u"\u2A0F": "\\clockoint ", u"\u2A10": "\\ElsevierGlyph{E395}", u"\u2A16": "\\sqrint ", u"\u2A25": "\\ElsevierGlyph{E25A}", u"\u2A2A": "\\ElsevierGlyph{E25B}", u"\u2A2D": "\\ElsevierGlyph{E25C}", u"\u2A2E": "\\ElsevierGlyph{E25D}", u"\u2A2F": "\\ElzTimes ", u"\u2A34": "\\ElsevierGlyph{E25E}", u"\u2A35": "\\ElsevierGlyph{E25E}", u"\u2A3C": "\\ElsevierGlyph{E259}", u"\u2A3F": "\\amalg ", u"\u2A53": "\\ElzAnd ", u"\u2A54": "\\ElzOr ", u"\u2A55": "\\ElsevierGlyph{E36E}", u"\u2A56": "\\ElOr ", u"\u2A5E": "\\perspcorrespond ", u"\u2A5F": "\\Elzminhat ", u"\u2A63": "\\ElsevierGlyph{225A}", u"\u2A6E": "\\stackrel{*}{=}", u"\u2A75": "\\Equal ", u"\u2A7D": "\\leqslant ", u"\u2A7D-00338": "\\nleqslant ", u"\u2A7E": "\\geqslant ", u"\u2A7E-00338": "\\ngeqslant ", u"\u2A85": "\\lessapprox ", u"\u2A86": "\\gtrapprox ", u"\u2A87": "\\lneq ", u"\u2A88": "\\gneq ", u"\u2A89": "\\lnapprox ", u"\u2A8A": "\\gnapprox ", u"\u2A8B": "\\lesseqqgtr ", u"\u2A8C": "\\gtreqqless ", u"\u2A95": "\\eqslantless ", u"\u2A96": "\\eqslantgtr ", u"\u2A9D": "\\Pisymbol{ppi020}{117}", u"\u2A9E": "\\Pisymbol{ppi020}{105}", u"\u2AA1": "\\NestedLessLess ", u"\u2AA1-00338": "\\NotNestedLessLess ", u"\u2AA2": "\\NestedGreaterGreater ", u"\u2AA2-00338": "\\NotNestedGreaterGreater ", u"\u2AAF": "\\preceq ", u"\u2AAF-00338": "\\not\\preceq ", u"\u2AB0": "\\succeq ", u"\u2AB0-00338": "\\not\\succeq ", u"\u2AB5": "\\precneqq ", u"\u2AB6": "\\succneqq ", u"\u2AB7": "\\precapprox ", u"\u2AB8": "\\succapprox ", u"\u2AB9": "\\precnapprox ", u"\u2ABA": "\\succnapprox ", u"\u2AC5": "\\subseteqq ", u"\u2AC5-00338": "\\nsubseteqq ", u"\u2AC6": "\\supseteqq ", u"\u2AC6-00338": "\\nsupseteqq", u"\u2ACB": "\\subsetneqq ", u"\u2ACC": "\\supsetneqq ", u"\u2AEB": "\\ElsevierGlyph{E30D}", u"\u2AF6": "\\Elztdcol ", u"\u2AFD": "{{/}\\!\\!{/}}", u"\u2AFD-020E5": "{\\rlap{\\textbackslash}{{/}\\!\\!{/}}}", u"\u300A": "\\ElsevierGlyph{300A}", u"\u300B": "\\ElsevierGlyph{300B}", u"\u3018": "\\ElsevierGlyph{3018}", u"\u3019": "\\ElsevierGlyph{3019}", u"\u301A": "\\openbracketleft ", u"\u301B": "\\openbracketright ", u"\uFB00": "ff", u"\uFB01": "fi", u"\uFB02": "fl", u"\uFB03": "ffi", u"\uFB04": "ffl", u"\uD400": "\\mathbf{A}", u"\uD401": "\\mathbf{B}", u"\uD402": "\\mathbf{C}", u"\uD403": "\\mathbf{D}", u"\uD404": "\\mathbf{E}", u"\uD405": "\\mathbf{F}", u"\uD406": "\\mathbf{G}", u"\uD407": "\\mathbf{H}", u"\uD408": "\\mathbf{I}", u"\uD409": "\\mathbf{J}", u"\uD40A": "\\mathbf{K}", u"\uD40B": "\\mathbf{L}", u"\uD40C": "\\mathbf{M}", u"\uD40D": "\\mathbf{N}", u"\uD40E": "\\mathbf{O}", u"\uD40F": "\\mathbf{P}", u"\uD410": "\\mathbf{Q}", u"\uD411": "\\mathbf{R}", u"\uD412": "\\mathbf{S}", u"\uD413": "\\mathbf{T}", u"\uD414": "\\mathbf{U}", u"\uD415": "\\mathbf{V}", u"\uD416": "\\mathbf{W}", u"\uD417": "\\mathbf{X}", u"\uD418": "\\mathbf{Y}", u"\uD419": "\\mathbf{Z}", u"\uD41A": "\\mathbf{a}", u"\uD41B": "\\mathbf{b}", u"\uD41C": "\\mathbf{c}", u"\uD41D": "\\mathbf{d}", u"\uD41E": "\\mathbf{e}", u"\uD41F": "\\mathbf{f}", u"\uD420": "\\mathbf{g}", u"\uD421": "\\mathbf{h}", u"\uD422": "\\mathbf{i}", u"\uD423": "\\mathbf{j}", u"\uD424": "\\mathbf{k}", u"\uD425": "\\mathbf{l}", u"\uD426": "\\mathbf{m}", u"\uD427": "\\mathbf{n}", u"\uD428": "\\mathbf{o}", u"\uD429": "\\mathbf{p}", u"\uD42A": "\\mathbf{q}", u"\uD42B": "\\mathbf{r}", u"\uD42C": "\\mathbf{s}", u"\uD42D": "\\mathbf{t}", u"\uD42E": "\\mathbf{u}", u"\uD42F": "\\mathbf{v}", u"\uD430": "\\mathbf{w}", u"\uD431": "\\mathbf{x}", u"\uD432": "\\mathbf{y}", u"\uD433": "\\mathbf{z}", u"\uD434": "\\mathsl{A}", u"\uD435": "\\mathsl{B}", u"\uD436": "\\mathsl{C}", u"\uD437": "\\mathsl{D}", u"\uD438": "\\mathsl{E}", u"\uD439": "\\mathsl{F}", u"\uD43A": "\\mathsl{G}", u"\uD43B": "\\mathsl{H}", u"\uD43C": "\\mathsl{I}", u"\uD43D": "\\mathsl{J}", u"\uD43E": "\\mathsl{K}", u"\uD43F": "\\mathsl{L}", u"\uD440": "\\mathsl{M}", u"\uD441": "\\mathsl{N}", u"\uD442": "\\mathsl{O}", u"\uD443": "\\mathsl{P}", u"\uD444": "\\mathsl{Q}", u"\uD445": "\\mathsl{R}", u"\uD446": "\\mathsl{S}", u"\uD447": "\\mathsl{T}", u"\uD448": "\\mathsl{U}", u"\uD449": "\\mathsl{V}", u"\uD44A": "\\mathsl{W}", u"\uD44B": "\\mathsl{X}", u"\uD44C": "\\mathsl{Y}", u"\uD44D": "\\mathsl{Z}", u"\uD44E": "\\mathsl{a}", u"\uD44F": "\\mathsl{b}", u"\uD450": "\\mathsl{c}", u"\uD451": "\\mathsl{d}", u"\uD452": "\\mathsl{e}", u"\uD453": "\\mathsl{f}", u"\uD454": "\\mathsl{g}", u"\uD456": "\\mathsl{i}", u"\uD457": "\\mathsl{j}", u"\uD458": "\\mathsl{k}", u"\uD459": "\\mathsl{l}", u"\uD45A": "\\mathsl{m}", u"\uD45B": "\\mathsl{n}", u"\uD45C": "\\mathsl{o}", u"\uD45D": "\\mathsl{p}", u"\uD45E": "\\mathsl{q}", u"\uD45F": "\\mathsl{r}", u"\uD460": "\\mathsl{s}", u"\uD461": "\\mathsl{t}", u"\uD462": "\\mathsl{u}", u"\uD463": "\\mathsl{v}", u"\uD464": "\\mathsl{w}", u"\uD465": "\\mathsl{x}", u"\uD466": "\\mathsl{y}", u"\uD467": "\\mathsl{z}", u"\uD468": "\\mathbit{A}", u"\uD469": "\\mathbit{B}", u"\uD46A": "\\mathbit{C}", u"\uD46B": "\\mathbit{D}", u"\uD46C": "\\mathbit{E}", u"\uD46D": "\\mathbit{F}", u"\uD46E": "\\mathbit{G}", u"\uD46F": "\\mathbit{H}", u"\uD470": "\\mathbit{I}", u"\uD471": "\\mathbit{J}", u"\uD472": "\\mathbit{K}", u"\uD473": "\\mathbit{L}", u"\uD474": "\\mathbit{M}", u"\uD475": "\\mathbit{N}", u"\uD476": "\\mathbit{O}", u"\uD477": "\\mathbit{P}", u"\uD478": "\\mathbit{Q}", u"\uD479": "\\mathbit{R}", u"\uD47A": "\\mathbit{S}", u"\uD47B": "\\mathbit{T}", u"\uD47C": "\\mathbit{U}", u"\uD47D": "\\mathbit{V}", u"\uD47E": "\\mathbit{W}", u"\uD47F": "\\mathbit{X}", u"\uD480": "\\mathbit{Y}", u"\uD481": "\\mathbit{Z}", u"\uD482": "\\mathbit{a}", u"\uD483": "\\mathbit{b}", u"\uD484": "\\mathbit{c}", u"\uD485": "\\mathbit{d}", u"\uD486": "\\mathbit{e}", u"\uD487": "\\mathbit{f}", u"\uD488": "\\mathbit{g}", u"\uD489": "\\mathbit{h}", u"\uD48A": "\\mathbit{i}", u"\uD48B": "\\mathbit{j}", u"\uD48C": "\\mathbit{k}", u"\uD48D": "\\mathbit{l}", u"\uD48E": "\\mathbit{m}", u"\uD48F": "\\mathbit{n}", u"\uD490": "\\mathbit{o}", u"\uD491": "\\mathbit{p}", u"\uD492": "\\mathbit{q}", u"\uD493": "\\mathbit{r}", u"\uD494": "\\mathbit{s}", u"\uD495": "\\mathbit{t}", u"\uD496": "\\mathbit{u}", u"\uD497": "\\mathbit{v}", u"\uD498": "\\mathbit{w}", u"\uD499": "\\mathbit{x}", u"\uD49A": "\\mathbit{y}", u"\uD49B": "\\mathbit{z}", u"\uD49C": "\\mathscr{A}", u"\uD49E": "\\mathscr{C}", u"\uD49F": "\\mathscr{D}", u"\uD4A2": "\\mathscr{G}", u"\uD4A5": "\\mathscr{J}", u"\uD4A6": "\\mathscr{K}", u"\uD4A9": "\\mathscr{N}", u"\uD4AA": "\\mathscr{O}", u"\uD4AB": "\\mathscr{P}", u"\uD4AC": "\\mathscr{Q}", u"\uD4AE": "\\mathscr{S}", u"\uD4AF": "\\mathscr{T}", u"\uD4B0": "\\mathscr{U}", u"\uD4B1": "\\mathscr{V}", u"\uD4B2": "\\mathscr{W}", u"\uD4B3": "\\mathscr{X}", u"\uD4B4": "\\mathscr{Y}", u"\uD4B5": "\\mathscr{Z}", u"\uD4B6": "\\mathscr{a}", u"\uD4B7": "\\mathscr{b}", u"\uD4B8": "\\mathscr{c}", u"\uD4B9": "\\mathscr{d}", u"\uD4BB": "\\mathscr{f}", u"\uD4BD": "\\mathscr{h}", u"\uD4BE": "\\mathscr{i}", u"\uD4BF": "\\mathscr{j}", u"\uD4C0": "\\mathscr{k}", u"\uD4C1": "\\mathscr{l}", u"\uD4C2": "\\mathscr{m}", u"\uD4C3": "\\mathscr{n}", u"\uD4C5": "\\mathscr{p}", u"\uD4C6": "\\mathscr{q}", u"\uD4C7": "\\mathscr{r}", u"\uD4C8": "\\mathscr{s}", u"\uD4C9": "\\mathscr{t}", u"\uD4CA": "\\mathscr{u}", u"\uD4CB": "\\mathscr{v}", u"\uD4CC": "\\mathscr{w}", u"\uD4CD": "\\mathscr{x}", u"\uD4CE": "\\mathscr{y}", u"\uD4CF": "\\mathscr{z}", u"\uD4D0": "\\mathmit{A}", u"\uD4D1": "\\mathmit{B}", u"\uD4D2": "\\mathmit{C}", u"\uD4D3": "\\mathmit{D}", u"\uD4D4": "\\mathmit{E}", u"\uD4D5": "\\mathmit{F}", u"\uD4D6": "\\mathmit{G}", u"\uD4D7": "\\mathmit{H}", u"\uD4D8": "\\mathmit{I}", u"\uD4D9": "\\mathmit{J}", u"\uD4DA": "\\mathmit{K}", u"\uD4DB": "\\mathmit{L}", u"\uD4DC": "\\mathmit{M}", u"\uD4DD": "\\mathmit{N}", u"\uD4DE": "\\mathmit{O}", u"\uD4DF": "\\mathmit{P}", u"\uD4E0": "\\mathmit{Q}", u"\uD4E1": "\\mathmit{R}", u"\uD4E2": "\\mathmit{S}", u"\uD4E3": "\\mathmit{T}", u"\uD4E4": "\\mathmit{U}", u"\uD4E5": "\\mathmit{V}", u"\uD4E6": "\\mathmit{W}", u"\uD4E7": "\\mathmit{X}", u"\uD4E8": "\\mathmit{Y}", u"\uD4E9": "\\mathmit{Z}", u"\uD4EA": "\\mathmit{a}", u"\uD4EB": "\\mathmit{b}", u"\uD4EC": "\\mathmit{c}", u"\uD4ED": "\\mathmit{d}", u"\uD4EE": "\\mathmit{e}", u"\uD4EF": "\\mathmit{f}", u"\uD4F0": "\\mathmit{g}", u"\uD4F1": "\\mathmit{h}", u"\uD4F2": "\\mathmit{i}", u"\uD4F3": "\\mathmit{j}", u"\uD4F4": "\\mathmit{k}", u"\uD4F5": "\\mathmit{l}", u"\uD4F6": "\\mathmit{m}", u"\uD4F7": "\\mathmit{n}", u"\uD4F8": "\\mathmit{o}", u"\uD4F9": "\\mathmit{p}", u"\uD4FA": "\\mathmit{q}", u"\uD4FB": "\\mathmit{r}", u"\uD4FC": "\\mathmit{s}", u"\uD4FD": "\\mathmit{t}", u"\uD4FE": "\\mathmit{u}", u"\uD4FF": "\\mathmit{v}", u"\uD500": "\\mathmit{w}", u"\uD501": "\\mathmit{x}", u"\uD502": "\\mathmit{y}", u"\uD503": "\\mathmit{z}", u"\uD504": "\\mathfrak{A}", u"\uD505": "\\mathfrak{B}", u"\uD507": "\\mathfrak{D}", u"\uD508": "\\mathfrak{E}", u"\uD509": "\\mathfrak{F}", u"\uD50A": "\\mathfrak{G}", u"\uD50D": "\\mathfrak{J}", u"\uD50E": "\\mathfrak{K}", u"\uD50F": "\\mathfrak{L}", u"\uD510": "\\mathfrak{M}", u"\uD511": "\\mathfrak{N}", u"\uD512": "\\mathfrak{O}", u"\uD513": "\\mathfrak{P}", u"\uD514": "\\mathfrak{Q}", u"\uD516": "\\mathfrak{S}", u"\uD517": "\\mathfrak{T}", u"\uD518": "\\mathfrak{U}", u"\uD519": "\\mathfrak{V}", u"\uD51A": "\\mathfrak{W}", u"\uD51B": "\\mathfrak{X}", u"\uD51C": "\\mathfrak{Y}", u"\uD51E": "\\mathfrak{a}", u"\uD51F": "\\mathfrak{b}", u"\uD520": "\\mathfrak{c}", u"\uD521": "\\mathfrak{d}", u"\uD522": "\\mathfrak{e}", u"\uD523": "\\mathfrak{f}", u"\uD524": "\\mathfrak{g}", u"\uD525": "\\mathfrak{h}", u"\uD526": "\\mathfrak{i}", u"\uD527": "\\mathfrak{j}", u"\uD528": "\\mathfrak{k}", u"\uD529": "\\mathfrak{l}", u"\uD52A": "\\mathfrak{m}", u"\uD52B": "\\mathfrak{n}", u"\uD52C": "\\mathfrak{o}", u"\uD52D": "\\mathfrak{p}", u"\uD52E": "\\mathfrak{q}", u"\uD52F": "\\mathfrak{r}", u"\uD530": "\\mathfrak{s}", u"\uD531": "\\mathfrak{t}", u"\uD532": "\\mathfrak{u}", u"\uD533": "\\mathfrak{v}", u"\uD534": "\\mathfrak{w}", u"\uD535": "\\mathfrak{x}", u"\uD536": "\\mathfrak{y}", u"\uD537": "\\mathfrak{z}", u"\uD538": "\\mathbb{A}", u"\uD539": "\\mathbb{B}", u"\uD53B": "\\mathbb{D}", u"\uD53C": "\\mathbb{E}", u"\uD53D": "\\mathbb{F}", u"\uD53E": "\\mathbb{G}", u"\uD540": "\\mathbb{I}", u"\uD541": "\\mathbb{J}", u"\uD542": "\\mathbb{K}", u"\uD543": "\\mathbb{L}", u"\uD544": "\\mathbb{M}", u"\uD546": "\\mathbb{O}", u"\uD54A": "\\mathbb{S}", u"\uD54B": "\\mathbb{T}", u"\uD54C": "\\mathbb{U}", u"\uD54D": "\\mathbb{V}", u"\uD54E": "\\mathbb{W}", u"\uD54F": "\\mathbb{X}", u"\uD550": "\\mathbb{Y}", u"\uD552": "\\mathbb{a}", u"\uD553": "\\mathbb{b}", u"\uD554": "\\mathbb{c}", u"\uD555": "\\mathbb{d}", u"\uD556": "\\mathbb{e}", u"\uD557": "\\mathbb{f}", u"\uD558": "\\mathbb{g}", u"\uD559": "\\mathbb{h}", u"\uD55A": "\\mathbb{i}", u"\uD55B": "\\mathbb{j}", u"\uD55C": "\\mathbb{k}", u"\uD55D": "\\mathbb{l}", u"\uD55E": "\\mathbb{m}", u"\uD55F": "\\mathbb{n}", u"\uD560": "\\mathbb{o}", u"\uD561": "\\mathbb{p}", u"\uD562": "\\mathbb{q}", u"\uD563": "\\mathbb{r}", u"\uD564": "\\mathbb{s}", u"\uD565": "\\mathbb{t}", u"\uD566": "\\mathbb{u}", u"\uD567": "\\mathbb{v}", u"\uD568": "\\mathbb{w}", u"\uD569": "\\mathbb{x}", u"\uD56A": "\\mathbb{y}", u"\uD56B": "\\mathbb{z}", u"\uD56C": "\\mathslbb{A}", u"\uD56D": "\\mathslbb{B}", u"\uD56E": "\\mathslbb{C}", u"\uD56F": "\\mathslbb{D}", u"\uD570": "\\mathslbb{E}", u"\uD571": "\\mathslbb{F}", u"\uD572": "\\mathslbb{G}", u"\uD573": "\\mathslbb{H}", u"\uD574": "\\mathslbb{I}", u"\uD575": "\\mathslbb{J}", u"\uD576": "\\mathslbb{K}", u"\uD577": "\\mathslbb{L}", u"\uD578": "\\mathslbb{M}", u"\uD579": "\\mathslbb{N}", u"\uD57A": "\\mathslbb{O}", u"\uD57B": "\\mathslbb{P}", u"\uD57C": "\\mathslbb{Q}", u"\uD57D": "\\mathslbb{R}", u"\uD57E": "\\mathslbb{S}", u"\uD57F": "\\mathslbb{T}", u"\uD580": "\\mathslbb{U}", u"\uD581": "\\mathslbb{V}", u"\uD582": "\\mathslbb{W}", u"\uD583": "\\mathslbb{X}", u"\uD584": "\\mathslbb{Y}", u"\uD585": "\\mathslbb{Z}", u"\uD586": "\\mathslbb{a}", u"\uD587": "\\mathslbb{b}", u"\uD588": "\\mathslbb{c}", u"\uD589": "\\mathslbb{d}", u"\uD58A": "\\mathslbb{e}", u"\uD58B": "\\mathslbb{f}", u"\uD58C": "\\mathslbb{g}", u"\uD58D": "\\mathslbb{h}", u"\uD58E": "\\mathslbb{i}", u"\uD58F": "\\mathslbb{j}", u"\uD590": "\\mathslbb{k}", u"\uD591": "\\mathslbb{l}", u"\uD592": "\\mathslbb{m}", u"\uD593": "\\mathslbb{n}", u"\uD594": "\\mathslbb{o}", u"\uD595": "\\mathslbb{p}", u"\uD596": "\\mathslbb{q}", u"\uD597": "\\mathslbb{r}", u"\uD598": "\\mathslbb{s}", u"\uD599": "\\mathslbb{t}", u"\uD59A": "\\mathslbb{u}", u"\uD59B": "\\mathslbb{v}", u"\uD59C": "\\mathslbb{w}", u"\uD59D": "\\mathslbb{x}", u"\uD59E": "\\mathslbb{y}", u"\uD59F": "\\mathslbb{z}", u"\uD5A0": "\\mathsf{A}", u"\uD5A1": "\\mathsf{B}", u"\uD5A2": "\\mathsf{C}", u"\uD5A3": "\\mathsf{D}", u"\uD5A4": "\\mathsf{E}", u"\uD5A5": "\\mathsf{F}", u"\uD5A6": "\\mathsf{G}", u"\uD5A7": "\\mathsf{H}", u"\uD5A8": "\\mathsf{I}", u"\uD5A9": "\\mathsf{J}", u"\uD5AA": "\\mathsf{K}", u"\uD5AB": "\\mathsf{L}", u"\uD5AC": "\\mathsf{M}", u"\uD5AD": "\\mathsf{N}", u"\uD5AE": "\\mathsf{O}", u"\uD5AF": "\\mathsf{P}", u"\uD5B0": "\\mathsf{Q}", u"\uD5B1": "\\mathsf{R}", u"\uD5B2": "\\mathsf{S}", u"\uD5B3": "\\mathsf{T}", u"\uD5B4": "\\mathsf{U}", u"\uD5B5": "\\mathsf{V}", u"\uD5B6": "\\mathsf{W}", u"\uD5B7": "\\mathsf{X}", u"\uD5B8": "\\mathsf{Y}", u"\uD5B9": "\\mathsf{Z}", u"\uD5BA": "\\mathsf{a}", u"\uD5BB": "\\mathsf{b}", u"\uD5BC": "\\mathsf{c}", u"\uD5BD": "\\mathsf{d}", u"\uD5BE": "\\mathsf{e}", u"\uD5BF": "\\mathsf{f}", u"\uD5C0": "\\mathsf{g}", u"\uD5C1": "\\mathsf{h}", u"\uD5C2": "\\mathsf{i}", u"\uD5C3": "\\mathsf{j}", u"\uD5C4": "\\mathsf{k}", u"\uD5C5": "\\mathsf{l}", u"\uD5C6": "\\mathsf{m}", u"\uD5C7": "\\mathsf{n}", u"\uD5C8": "\\mathsf{o}", u"\uD5C9": "\\mathsf{p}", u"\uD5CA": "\\mathsf{q}", u"\uD5CB": "\\mathsf{r}", u"\uD5CC": "\\mathsf{s}", u"\uD5CD": "\\mathsf{t}", u"\uD5CE": "\\mathsf{u}", u"\uD5CF": "\\mathsf{v}", u"\uD5D0": "\\mathsf{w}", u"\uD5D1": "\\mathsf{x}", u"\uD5D2": "\\mathsf{y}", u"\uD5D3": "\\mathsf{z}", u"\uD5D4": "\\mathsfbf{A}", u"\uD5D5": "\\mathsfbf{B}", u"\uD5D6": "\\mathsfbf{C}", u"\uD5D7": "\\mathsfbf{D}", u"\uD5D8": "\\mathsfbf{E}", u"\uD5D9": "\\mathsfbf{F}", u"\uD5DA": "\\mathsfbf{G}", u"\uD5DB": "\\mathsfbf{H}", u"\uD5DC": "\\mathsfbf{I}", u"\uD5DD": "\\mathsfbf{J}", u"\uD5DE": "\\mathsfbf{K}", u"\uD5DF": "\\mathsfbf{L}", u"\uD5E0": "\\mathsfbf{M}", u"\uD5E1": "\\mathsfbf{N}", u"\uD5E2": "\\mathsfbf{O}", u"\uD5E3": "\\mathsfbf{P}", u"\uD5E4": "\\mathsfbf{Q}", u"\uD5E5": "\\mathsfbf{R}", u"\uD5E6": "\\mathsfbf{S}", u"\uD5E7": "\\mathsfbf{T}", u"\uD5E8": "\\mathsfbf{U}", u"\uD5E9": "\\mathsfbf{V}", u"\uD5EA": "\\mathsfbf{W}", u"\uD5EB": "\\mathsfbf{X}", u"\uD5EC": "\\mathsfbf{Y}", u"\uD5ED": "\\mathsfbf{Z}", u"\uD5EE": "\\mathsfbf{a}", u"\uD5EF": "\\mathsfbf{b}", u"\uD5F0": "\\mathsfbf{c}", u"\uD5F1": "\\mathsfbf{d}", u"\uD5F2": "\\mathsfbf{e}", u"\uD5F3": "\\mathsfbf{f}", u"\uD5F4": "\\mathsfbf{g}", u"\uD5F5": "\\mathsfbf{h}", u"\uD5F6": "\\mathsfbf{i}", u"\uD5F7": "\\mathsfbf{j}", u"\uD5F8": "\\mathsfbf{k}", u"\uD5F9": "\\mathsfbf{l}", u"\uD5FA": "\\mathsfbf{m}", u"\uD5FB": "\\mathsfbf{n}", u"\uD5FC": "\\mathsfbf{o}", u"\uD5FD": "\\mathsfbf{p}", u"\uD5FE": "\\mathsfbf{q}", u"\uD5FF": "\\mathsfbf{r}", u"\uD600": "\\mathsfbf{s}", u"\uD601": "\\mathsfbf{t}", u"\uD602": "\\mathsfbf{u}", u"\uD603": "\\mathsfbf{v}", u"\uD604": "\\mathsfbf{w}", u"\uD605": "\\mathsfbf{x}", u"\uD606": "\\mathsfbf{y}", u"\uD607": "\\mathsfbf{z}", u"\uD608": "\\mathsfsl{A}", u"\uD609": "\\mathsfsl{B}", u"\uD60A": "\\mathsfsl{C}", u"\uD60B": "\\mathsfsl{D}", u"\uD60C": "\\mathsfsl{E}", u"\uD60D": "\\mathsfsl{F}", u"\uD60E": "\\mathsfsl{G}", u"\uD60F": "\\mathsfsl{H}", u"\uD610": "\\mathsfsl{I}", u"\uD611": "\\mathsfsl{J}", u"\uD612": "\\mathsfsl{K}", u"\uD613": "\\mathsfsl{L}", u"\uD614": "\\mathsfsl{M}", u"\uD615": "\\mathsfsl{N}", u"\uD616": "\\mathsfsl{O}", u"\uD617": "\\mathsfsl{P}", u"\uD618": "\\mathsfsl{Q}", u"\uD619": "\\mathsfsl{R}", u"\uD61A": "\\mathsfsl{S}", u"\uD61B": "\\mathsfsl{T}", u"\uD61C": "\\mathsfsl{U}", u"\uD61D": "\\mathsfsl{V}", u"\uD61E": "\\mathsfsl{W}", u"\uD61F": "\\mathsfsl{X}", u"\uD620": "\\mathsfsl{Y}", u"\uD621": "\\mathsfsl{Z}", u"\uD622": "\\mathsfsl{a}", u"\uD623": "\\mathsfsl{b}", u"\uD624": "\\mathsfsl{c}", u"\uD625": "\\mathsfsl{d}", u"\uD626": "\\mathsfsl{e}", u"\uD627": "\\mathsfsl{f}", u"\uD628": "\\mathsfsl{g}", u"\uD629": "\\mathsfsl{h}", u"\uD62A": "\\mathsfsl{i}", u"\uD62B": "\\mathsfsl{j}", u"\uD62C": "\\mathsfsl{k}", u"\uD62D": "\\mathsfsl{l}", u"\uD62E": "\\mathsfsl{m}", u"\uD62F": "\\mathsfsl{n}", u"\uD630": "\\mathsfsl{o}", u"\uD631": "\\mathsfsl{p}", u"\uD632": "\\mathsfsl{q}", u"\uD633": "\\mathsfsl{r}", u"\uD634": "\\mathsfsl{s}", u"\uD635": "\\mathsfsl{t}", u"\uD636": "\\mathsfsl{u}", u"\uD637": "\\mathsfsl{v}", u"\uD638": "\\mathsfsl{w}", u"\uD639": "\\mathsfsl{x}", u"\uD63A": "\\mathsfsl{y}", u"\uD63B": "\\mathsfsl{z}", u"\uD63C": "\\mathsfbfsl{A}", u"\uD63D": "\\mathsfbfsl{B}", u"\uD63E": "\\mathsfbfsl{C}", u"\uD63F": "\\mathsfbfsl{D}", u"\uD640": "\\mathsfbfsl{E}", u"\uD641": "\\mathsfbfsl{F}", u"\uD642": "\\mathsfbfsl{G}", u"\uD643": "\\mathsfbfsl{H}", u"\uD644": "\\mathsfbfsl{I}", u"\uD645": "\\mathsfbfsl{J}", u"\uD646": "\\mathsfbfsl{K}", u"\uD647": "\\mathsfbfsl{L}", u"\uD648": "\\mathsfbfsl{M}", u"\uD649": "\\mathsfbfsl{N}", u"\uD64A": "\\mathsfbfsl{O}", u"\uD64B": "\\mathsfbfsl{P}", u"\uD64C": "\\mathsfbfsl{Q}", u"\uD64D": "\\mathsfbfsl{R}", u"\uD64E": "\\mathsfbfsl{S}", u"\uD64F": "\\mathsfbfsl{T}", u"\uD650": "\\mathsfbfsl{U}", u"\uD651": "\\mathsfbfsl{V}", u"\uD652": "\\mathsfbfsl{W}", u"\uD653": "\\mathsfbfsl{X}", u"\uD654": "\\mathsfbfsl{Y}", u"\uD655": "\\mathsfbfsl{Z}", u"\uD656": "\\mathsfbfsl{a}", u"\uD657": "\\mathsfbfsl{b}", u"\uD658": "\\mathsfbfsl{c}", u"\uD659": "\\mathsfbfsl{d}", u"\uD65A": "\\mathsfbfsl{e}", u"\uD65B": "\\mathsfbfsl{f}", u"\uD65C": "\\mathsfbfsl{g}", u"\uD65D": "\\mathsfbfsl{h}", u"\uD65E": "\\mathsfbfsl{i}", u"\uD65F": "\\mathsfbfsl{j}", u"\uD660": "\\mathsfbfsl{k}", u"\uD661": "\\mathsfbfsl{l}", u"\uD662": "\\mathsfbfsl{m}", u"\uD663": "\\mathsfbfsl{n}", u"\uD664": "\\mathsfbfsl{o}", u"\uD665": "\\mathsfbfsl{p}", u"\uD666": "\\mathsfbfsl{q}", u"\uD667": "\\mathsfbfsl{r}", u"\uD668": "\\mathsfbfsl{s}", u"\uD669": "\\mathsfbfsl{t}", u"\uD66A": "\\mathsfbfsl{u}", u"\uD66B": "\\mathsfbfsl{v}", u"\uD66C": "\\mathsfbfsl{w}", u"\uD66D": "\\mathsfbfsl{x}", u"\uD66E": "\\mathsfbfsl{y}", u"\uD66F": "\\mathsfbfsl{z}", u"\uD670": "\\mathtt{A}", u"\uD671": "\\mathtt{B}", u"\uD672": "\\mathtt{C}", u"\uD673": "\\mathtt{D}", u"\uD674": "\\mathtt{E}", u"\uD675": "\\mathtt{F}", u"\uD676": "\\mathtt{G}", u"\uD677": "\\mathtt{H}", u"\uD678": "\\mathtt{I}", u"\uD679": "\\mathtt{J}", u"\uD67A": "\\mathtt{K}", u"\uD67B": "\\mathtt{L}", u"\uD67C": "\\mathtt{M}", u"\uD67D": "\\mathtt{N}", u"\uD67E": "\\mathtt{O}", u"\uD67F": "\\mathtt{P}", u"\uD680": "\\mathtt{Q}", u"\uD681": "\\mathtt{R}", u"\uD682": "\\mathtt{S}", u"\uD683": "\\mathtt{T}", u"\uD684": "\\mathtt{U}", u"\uD685": "\\mathtt{V}", u"\uD686": "\\mathtt{W}", u"\uD687": "\\mathtt{X}", u"\uD688": "\\mathtt{Y}", u"\uD689": "\\mathtt{Z}", u"\uD68A": "\\mathtt{a}", u"\uD68B": "\\mathtt{b}", u"\uD68C": "\\mathtt{c}", u"\uD68D": "\\mathtt{d}", u"\uD68E": "\\mathtt{e}", u"\uD68F": "\\mathtt{f}", u"\uD690": "\\mathtt{g}", u"\uD691": "\\mathtt{h}", u"\uD692": "\\mathtt{i}", u"\uD693": "\\mathtt{j}", u"\uD694": "\\mathtt{k}", u"\uD695": "\\mathtt{l}", u"\uD696": "\\mathtt{m}", u"\uD697": "\\mathtt{n}", u"\uD698": "\\mathtt{o}", u"\uD699": "\\mathtt{p}", u"\uD69A": "\\mathtt{q}", u"\uD69B": "\\mathtt{r}", u"\uD69C": "\\mathtt{s}", u"\uD69D": "\\mathtt{t}", u"\uD69E": "\\mathtt{u}", u"\uD69F": "\\mathtt{v}", u"\uD6A0": "\\mathtt{w}", u"\uD6A1": "\\mathtt{x}", u"\uD6A2": "\\mathtt{y}", u"\uD6A3": "\\mathtt{z}", u"\uD6A8": "\\mathbf{\\Alpha}", u"\uD6A9": "\\mathbf{\\Beta}", u"\uD6AA": "\\mathbf{\\Gamma}", u"\uD6AB": "\\mathbf{\\Delta}", u"\uD6AC": "\\mathbf{\\Epsilon}", u"\uD6AD": "\\mathbf{\\Zeta}", u"\uD6AE": "\\mathbf{\\Eta}", u"\uD6AF": "\\mathbf{\\Theta}", u"\uD6B0": "\\mathbf{\\Iota}", u"\uD6B1": "\\mathbf{\\Kappa}", u"\uD6B2": "\\mathbf{\\Lambda}", u"\uD6B5": "\\mathbf{\\Xi}", u"\uD6B7": "\\mathbf{\\Pi}", u"\uD6B8": "\\mathbf{\\Rho}", u"\uD6B9": "\\mathbf{\\vartheta}", u"\uD6BA": "\\mathbf{\\Sigma}", u"\uD6BB": "\\mathbf{\\Tau}", u"\uD6BC": "\\mathbf{\\Upsilon}", u"\uD6BD": "\\mathbf{\\Phi}", u"\uD6BE": "\\mathbf{\\Chi}", u"\uD6BF": "\\mathbf{\\Psi}", u"\uD6C0": "\\mathbf{\\Omega}", u"\uD6C1": "\\mathbf{\\nabla}", u"\uD6C2": "\\mathbf{\\Alpha}", u"\uD6C3": "\\mathbf{\\Beta}", u"\uD6C4": "\\mathbf{\\Gamma}", u"\uD6C5": "\\mathbf{\\Delta}", u"\uD6C6": "\\mathbf{\\Epsilon}", u"\uD6C7": "\\mathbf{\\Zeta}", u"\uD6C8": "\\mathbf{\\Eta}", u"\uD6C9": "\\mathbf{\\theta}", u"\uD6CA": "\\mathbf{\\Iota}", u"\uD6CB": "\\mathbf{\\Kappa}", u"\uD6CC": "\\mathbf{\\Lambda}", u"\uD6CF": "\\mathbf{\\Xi}", u"\uD6D1": "\\mathbf{\\Pi}", u"\uD6D2": "\\mathbf{\\Rho}", u"\uD6D3": "\\mathbf{\\varsigma}", u"\uD6D4": "\\mathbf{\\Sigma}", u"\uD6D5": "\\mathbf{\\Tau}", u"\uD6D6": "\\mathbf{\\Upsilon}", u"\uD6D7": "\\mathbf{\\Phi}", u"\uD6D8": "\\mathbf{\\Chi}", u"\uD6D9": "\\mathbf{\\Psi}", u"\uD6DA": "\\mathbf{\\Omega}", u"\uD6DB": "\\partial ", u"\uD6DC": "\\in", u"\uD6DD": "\\mathbf{\\vartheta}", u"\uD6DE": "\\mathbf{\\varkappa}", u"\uD6DF": "\\mathbf{\\phi}", u"\uD6E0": "\\mathbf{\\varrho}", u"\uD6E1": "\\mathbf{\\varpi}", u"\uD6E2": "\\mathsl{\\Alpha}", u"\uD6E3": "\\mathsl{\\Beta}", u"\uD6E4": "\\mathsl{\\Gamma}", u"\uD6E5": "\\mathsl{\\Delta}", u"\uD6E6": "\\mathsl{\\Epsilon}", u"\uD6E7": "\\mathsl{\\Zeta}", u"\uD6E8": "\\mathsl{\\Eta}", u"\uD6E9": "\\mathsl{\\Theta}", u"\uD6EA": "\\mathsl{\\Iota}", u"\uD6EB": "\\mathsl{\\Kappa}", u"\uD6EC": "\\mathsl{\\Lambda}", u"\uD6EF": "\\mathsl{\\Xi}", u"\uD6F1": "\\mathsl{\\Pi}", u"\uD6F2": "\\mathsl{\\Rho}", u"\uD6F3": "\\mathsl{\\vartheta}", u"\uD6F4": "\\mathsl{\\Sigma}", u"\uD6F5": "\\mathsl{\\Tau}", u"\uD6F6": "\\mathsl{\\Upsilon}", u"\uD6F7": "\\mathsl{\\Phi}", u"\uD6F8": "\\mathsl{\\Chi}", u"\uD6F9": "\\mathsl{\\Psi}", u"\uD6FA": "\\mathsl{\\Omega}", u"\uD6FB": "\\mathsl{\\nabla}", u"\uD6FC": "\\mathsl{\\Alpha}", u"\uD6FD": "\\mathsl{\\Beta}", u"\uD6FE": "\\mathsl{\\Gamma}", u"\uD6FF": "\\mathsl{\\Delta}", u"\uD700": "\\mathsl{\\Epsilon}", u"\uD701": "\\mathsl{\\Zeta}", u"\uD702": "\\mathsl{\\Eta}", u"\uD703": "\\mathsl{\\Theta}", u"\uD704": "\\mathsl{\\Iota}", u"\uD705": "\\mathsl{\\Kappa}", u"\uD706": "\\mathsl{\\Lambda}", u"\uD709": "\\mathsl{\\Xi}", u"\uD70B": "\\mathsl{\\Pi}", u"\uD70C": "\\mathsl{\\Rho}", u"\uD70D": "\\mathsl{\\varsigma}", u"\uD70E": "\\mathsl{\\Sigma}", u"\uD70F": "\\mathsl{\\Tau}", u"\uD710": "\\mathsl{\\Upsilon}", u"\uD711": "\\mathsl{\\Phi}", u"\uD712": "\\mathsl{\\Chi}", u"\uD713": "\\mathsl{\\Psi}", u"\uD714": "\\mathsl{\\Omega}", u"\uD715": "\\partial ", u"\uD716": "\\in", u"\uD717": "\\mathsl{\\vartheta}", u"\uD718": "\\mathsl{\\varkappa}", u"\uD719": "\\mathsl{\\phi}", u"\uD71A": "\\mathsl{\\varrho}", u"\uD71B": "\\mathsl{\\varpi}", u"\uD71C": "\\mathbit{\\Alpha}", u"\uD71D": "\\mathbit{\\Beta}", u"\uD71E": "\\mathbit{\\Gamma}", u"\uD71F": "\\mathbit{\\Delta}", u"\uD720": "\\mathbit{\\Epsilon}", u"\uD721": "\\mathbit{\\Zeta}", u"\uD722": "\\mathbit{\\Eta}", u"\uD723": "\\mathbit{\\Theta}", u"\uD724": "\\mathbit{\\Iota}", u"\uD725": "\\mathbit{\\Kappa}", u"\uD726": "\\mathbit{\\Lambda}", u"\uD729": "\\mathbit{\\Xi}", u"\uD72B": "\\mathbit{\\Pi}", u"\uD72C": "\\mathbit{\\Rho}", u"\uD72D": "\\mathbit{O}", u"\uD72E": "\\mathbit{\\Sigma}", u"\uD72F": "\\mathbit{\\Tau}", u"\uD730": "\\mathbit{\\Upsilon}", u"\uD731": "\\mathbit{\\Phi}", u"\uD732": "\\mathbit{\\Chi}", u"\uD733": "\\mathbit{\\Psi}", u"\uD734": "\\mathbit{\\Omega}", u"\uD735": "\\mathbit{\\nabla}", u"\uD736": "\\mathbit{\\Alpha}", u"\uD737": "\\mathbit{\\Beta}", u"\uD738": "\\mathbit{\\Gamma}", u"\uD739": "\\mathbit{\\Delta}", u"\uD73A": "\\mathbit{\\Epsilon}", u"\uD73B": "\\mathbit{\\Zeta}", u"\uD73C": "\\mathbit{\\Eta}", u"\uD73D": "\\mathbit{\\Theta}", u"\uD73E": "\\mathbit{\\Iota}", u"\uD73F": "\\mathbit{\\Kappa}", u"\uD740": "\\mathbit{\\Lambda}", u"\uD743": "\\mathbit{\\Xi}", u"\uD745": "\\mathbit{\\Pi}", u"\uD746": "\\mathbit{\\Rho}", u"\uD747": "\\mathbit{\\varsigma}", u"\uD748": "\\mathbit{\\Sigma}", u"\uD749": "\\mathbit{\\Tau}", u"\uD74A": "\\mathbit{\\Upsilon}", u"\uD74B": "\\mathbit{\\Phi}", u"\uD74C": "\\mathbit{\\Chi}", u"\uD74D": "\\mathbit{\\Psi}", u"\uD74E": "\\mathbit{\\Omega}", u"\uD74F": "\\partial ", u"\uD750": "\\in", u"\uD751": "\\mathbit{\\vartheta}", u"\uD752": "\\mathbit{\\varkappa}", u"\uD753": "\\mathbit{\\phi}", u"\uD754": "\\mathbit{\\varrho}", u"\uD755": "\\mathbit{\\varpi}", u"\uD756": "\\mathsfbf{\\Alpha}", u"\uD757": "\\mathsfbf{\\Beta}", u"\uD758": "\\mathsfbf{\\Gamma}", u"\uD759": "\\mathsfbf{\\Delta}", u"\uD75A": "\\mathsfbf{\\Epsilon}", u"\uD75B": "\\mathsfbf{\\Zeta}", u"\uD75C": "\\mathsfbf{\\Eta}", u"\uD75D": "\\mathsfbf{\\Theta}", u"\uD75E": "\\mathsfbf{\\Iota}", u"\uD75F": "\\mathsfbf{\\Kappa}", u"\uD760": "\\mathsfbf{\\Lambda}", u"\uD763": "\\mathsfbf{\\Xi}", u"\uD765": "\\mathsfbf{\\Pi}", u"\uD766": "\\mathsfbf{\\Rho}", u"\uD767": "\\mathsfbf{\\vartheta}", u"\uD768": "\\mathsfbf{\\Sigma}", u"\uD769": "\\mathsfbf{\\Tau}", u"\uD76A": "\\mathsfbf{\\Upsilon}", u"\uD76B": "\\mathsfbf{\\Phi}", u"\uD76C": "\\mathsfbf{\\Chi}", u"\uD76D": "\\mathsfbf{\\Psi}", u"\uD76E": "\\mathsfbf{\\Omega}", u"\uD76F": "\\mathsfbf{\\nabla}", u"\uD770": "\\mathsfbf{\\Alpha}", u"\uD771": "\\mathsfbf{\\Beta}", u"\uD772": "\\mathsfbf{\\Gamma}", u"\uD773": "\\mathsfbf{\\Delta}", u"\uD774": "\\mathsfbf{\\Epsilon}", u"\uD775": "\\mathsfbf{\\Zeta}", u"\uD776": "\\mathsfbf{\\Eta}", u"\uD777": "\\mathsfbf{\\Theta}", u"\uD778": "\\mathsfbf{\\Iota}", u"\uD779": "\\mathsfbf{\\Kappa}", u"\uD77A": "\\mathsfbf{\\Lambda}", u"\uD77D": "\\mathsfbf{\\Xi}", u"\uD77F": "\\mathsfbf{\\Pi}", u"\uD780": "\\mathsfbf{\\Rho}", u"\uD781": "\\mathsfbf{\\varsigma}", u"\uD782": "\\mathsfbf{\\Sigma}", u"\uD783": "\\mathsfbf{\\Tau}", u"\uD784": "\\mathsfbf{\\Upsilon}", u"\uD785": "\\mathsfbf{\\Phi}", u"\uD786": "\\mathsfbf{\\Chi}", u"\uD787": "\\mathsfbf{\\Psi}", u"\uD788": "\\mathsfbf{\\Omega}", u"\uD789": "\\partial ", u"\uD78A": "\\in", u"\uD78B": "\\mathsfbf{\\vartheta}", u"\uD78C": "\\mathsfbf{\\varkappa}", u"\uD78D": "\\mathsfbf{\\phi}", u"\uD78E": "\\mathsfbf{\\varrho}", u"\uD78F": "\\mathsfbf{\\varpi}", u"\uD790": "\\mathsfbfsl{\\Alpha}", u"\uD791": "\\mathsfbfsl{\\Beta}", u"\uD792": "\\mathsfbfsl{\\Gamma}", u"\uD793": "\\mathsfbfsl{\\Delta}", u"\uD794": "\\mathsfbfsl{\\Epsilon}", u"\uD795": "\\mathsfbfsl{\\Zeta}", u"\uD796": "\\mathsfbfsl{\\Eta}", u"\uD797": "\\mathsfbfsl{\\vartheta}", u"\uD798": "\\mathsfbfsl{\\Iota}", u"\uD799": "\\mathsfbfsl{\\Kappa}", u"\uD79A": "\\mathsfbfsl{\\Lambda}", u"\uD79D": "\\mathsfbfsl{\\Xi}", u"\uD79F": "\\mathsfbfsl{\\Pi}", u"\uD7A0": "\\mathsfbfsl{\\Rho}", u"\uD7A1": "\\mathsfbfsl{\\vartheta}", u"\uD7A2": "\\mathsfbfsl{\\Sigma}", u"\uD7A3": "\\mathsfbfsl{\\Tau}", u"\uD7A4": "\\mathsfbfsl{\\Upsilon}", u"\uD7A5": "\\mathsfbfsl{\\Phi}", u"\uD7A6": "\\mathsfbfsl{\\Chi}", u"\uD7A7": "\\mathsfbfsl{\\Psi}", u"\uD7A8": "\\mathsfbfsl{\\Omega}", u"\uD7A9": "\\mathsfbfsl{\\nabla}", u"\uD7AA": "\\mathsfbfsl{\\Alpha}", u"\uD7AB": "\\mathsfbfsl{\\Beta}", u"\uD7AC": "\\mathsfbfsl{\\Gamma}", u"\uD7AD": "\\mathsfbfsl{\\Delta}", u"\uD7AE": "\\mathsfbfsl{\\Epsilon}", u"\uD7AF": "\\mathsfbfsl{\\Zeta}", u"\uD7B0": "\\mathsfbfsl{\\Eta}", u"\uD7B1": "\\mathsfbfsl{\\vartheta}", u"\uD7B2": "\\mathsfbfsl{\\Iota}", u"\uD7B3": "\\mathsfbfsl{\\Kappa}", u"\uD7B4": "\\mathsfbfsl{\\Lambda}", u"\uD7B7": "\\mathsfbfsl{\\Xi}", u"\uD7B9": "\\mathsfbfsl{\\Pi}", u"\uD7BA": "\\mathsfbfsl{\\Rho}", u"\uD7BB": "\\mathsfbfsl{\\varsigma}", u"\uD7BC": "\\mathsfbfsl{\\Sigma}", u"\uD7BD": "\\mathsfbfsl{\\Tau}", u"\uD7BE": "\\mathsfbfsl{\\Upsilon}", u"\uD7BF": "\\mathsfbfsl{\\Phi}", u"\uD7C0": "\\mathsfbfsl{\\Chi}", u"\uD7C1": "\\mathsfbfsl{\\Psi}", u"\uD7C2": "\\mathsfbfsl{\\Omega}", u"\uD7C3": "\\partial ", u"\uD7C4": "\\in", u"\uD7C5": "\\mathsfbfsl{\\vartheta}", u"\uD7C6": "\\mathsfbfsl{\\varkappa}", u"\uD7C7": "\\mathsfbfsl{\\phi}", u"\uD7C8": "\\mathsfbfsl{\\varrho}", u"\uD7C9": "\\mathsfbfsl{\\varpi}", u"\uD7CE": "\\mathbf{0}", u"\uD7CF": "\\mathbf{1}", u"\uD7D0": "\\mathbf{2}", u"\uD7D1": "\\mathbf{3}", u"\uD7D2": "\\mathbf{4}", u"\uD7D3": "\\mathbf{5}", u"\uD7D4": "\\mathbf{6}", u"\uD7D5": "\\mathbf{7}", u"\uD7D6": "\\mathbf{8}", u"\uD7D7": "\\mathbf{9}", u"\uD7D8": "\\mathbb{0}", u"\uD7D9": "\\mathbb{1}", u"\uD7DA": "\\mathbb{2}", u"\uD7DB": "\\mathbb{3}", u"\uD7DC": "\\mathbb{4}", u"\uD7DD": "\\mathbb{5}", u"\uD7DE": "\\mathbb{6}", u"\uD7DF": "\\mathbb{7}", u"\uD7E0": "\\mathbb{8}", u"\uD7E1": "\\mathbb{9}", u"\uD7E2": "\\mathsf{0}", u"\uD7E3": "\\mathsf{1}", u"\uD7E4": "\\mathsf{2}", u"\uD7E5": "\\mathsf{3}", u"\uD7E6": "\\mathsf{4}", u"\uD7E7": "\\mathsf{5}", u"\uD7E8": "\\mathsf{6}", u"\uD7E9": "\\mathsf{7}", u"\uD7EA": "\\mathsf{8}", u"\uD7EB": "\\mathsf{9}", u"\uD7EC": "\\mathsfbf{0}", u"\uD7ED": "\\mathsfbf{1}", u"\uD7EE": "\\mathsfbf{2}", u"\uD7EF": "\\mathsfbf{3}", u"\uD7F0": "\\mathsfbf{4}", u"\uD7F1": "\\mathsfbf{5}", u"\uD7F2": "\\mathsfbf{6}", u"\uD7F3": "\\mathsfbf{7}", u"\uD7F4": "\\mathsfbf{8}", u"\uD7F5": "\\mathsfbf{9}", u"\uD7F6": "\\mathtt{0}", u"\uD7F7": "\\mathtt{1}", u"\uD7F8": "\\mathtt{2}", u"\uD7F9": "\\mathtt{3}", u"\uD7FA": "\\mathtt{4}", u"\uD7FB": "\\mathtt{5}", u"\uD7FC": "\\mathtt{6}", u"\uD7FD": "\\mathtt{7}", u"\uD7FE": "\\mathtt{8}", u"\uD7FF": "\\mathtt{9}", }
mit
nagyistoce/devide.johannes
extra/soappy-cvp/tests/largeDataTest.py
6
1091
#!/usr/bin/env python # Copyright (c) 2001 actzero, inc. All rights reserved. import sys sys.path.insert(1, "..") from SOAPpy import * from SOAPpy import Parser # Uncomment to see outgoing HTTP headers and SOAP and incoming #Config.debug = 1 if len(sys.argv) > 1 and sys.argv[1] == '-s': server = SOAPProxy("https://localhost:9900") else: server = SOAPProxy("http://localhost:9900") # BIG data: big = repr('.' * (1<<18) ) # ...in an object print "server.echo_ino(big):..", tmp = server.echo_ino(big) print "done" # ...in an object in an object print "server.prop.echo2(big)..", tmp = server.prop.echo2(big) print "done" # ...with keyword arguments print 'server.echo_wkw(third = big, first = "one", second = "two")..', tmp = server.echo_wkw(third = big, first = "one", second = "two") print "done" # ...with a context object print "server.echo_wc(big)..", tmp = server.echo_wc(big) print "done" # ...with a header hd = headerType(data = {"mystring": "Hello World"}) print "server._hd(hd).echo_wc(big)..", tmp = server._hd(hd).echo_wc(big) print "done" server.quit()
bsd-3-clause
pilou-/ansible
lib/ansible/modules/network/f5/bigip_device_httpd.py
14
22262
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: bigip_device_httpd short_description: Manage HTTPD related settings on BIG-IP description: - Manages HTTPD related settings on the BIG-IP. These settings are interesting to change when you want to set GUI timeouts and other TMUI related settings. version_added: 2.5 options: allow: description: - Specifies, if you have enabled HTTPD access, the IP address or address range for other systems that can communicate with this system. - To specify all addresses, use the value C(all). - IP address can be specified, such as 172.27.1.10. - IP rangees can be specified, such as 172.27.*.* or 172.27.0.0/255.255.0.0. type: list auth_name: description: - Sets the BIG-IP authentication realm name. type: str auth_pam_idle_timeout: description: - Sets the GUI timeout for automatic logout, in seconds. type: int auth_pam_validate_ip: description: - Sets the authPamValidateIp setting. type: bool auth_pam_dashboard_timeout: description: - Sets whether or not the BIG-IP dashboard will timeout. type: bool fast_cgi_timeout: description: - Sets the timeout of FastCGI. type: int hostname_lookup: description: - Sets whether or not to display the hostname, if possible. type: bool log_level: description: - Sets the minimum httpd log level. type: str choices: - alert - crit - debug - emerg - error - info - notice - warn max_clients: description: - Sets the maximum number of clients that can connect to the GUI at once. type: int redirect_http_to_https: description: - Whether or not to redirect http requests to the GUI to https. type: bool ssl_port: description: - The HTTPS port to listen on. type: int ssl_cipher_suite: description: - Specifies the ciphers that the system uses. - The values in the suite are separated by colons (:). - Can be specified in either a string or list form. The list form is the recommended way to provide the cipher suite. See examples for usage. - Use the value C(default) to set the cipher suite to the system default. This value is equivalent to specifying a list of C(ECDHE-RSA-AES128-GCM-SHA256, ECDHE-RSA-AES256-GCM-SHA384,ECDHE-RSA-AES128-SHA,ECDHE-RSA-AES256-SHA, ECDHE-RSA-AES128-SHA256,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256, ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-SHA,ECDHE-ECDSA-AES256-SHA, ECDHE-ECDSA-AES128-SHA256,ECDHE-ECDSA-AES256-SHA384,AES128-GCM-SHA256, AES256-GCM-SHA384,AES128-SHA,AES256-SHA,AES128-SHA256,AES256-SHA256, ECDHE-RSA-DES-CBC3-SHA,ECDHE-ECDSA-DES-CBC3-SHA,DES-CBC3-SHA). type: raw version_added: 2.6 ssl_protocols: description: - The list of SSL protocols to accept on the management console. - A space-separated list of tokens in the format accepted by the Apache mod_ssl SSLProtocol directive. - Can be specified in either a string or list form. The list form is the recommended way to provide the cipher suite. See examples for usage. - Use the value C(default) to set the SSL protocols to the system default. This value is equivalent to specifying a list of C(all,-SSLv2,-SSLv3). type: raw version_added: 2.6 notes: - Requires the requests Python package on the host. This is as easy as C(pip install requests). requirements: - requests extends_documentation_fragment: f5 author: - Joe Reifel (@JoeReifel) - Tim Rupp (@caphrim007) ''' EXAMPLES = r''' - name: Set the BIG-IP authentication realm name bigip_device_httpd: auth_name: BIG-IP provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set the auth pam timeout to 3600 seconds bigip_device_httpd: auth_pam_idle_timeout: 1200 provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set the validate IP settings bigip_device_httpd: auth_pam_validate_ip: on provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set SSL cipher suite by list bigip_device_httpd: ssl_cipher_suite: - ECDHE-RSA-AES128-GCM-SHA256 - ECDHE-RSA-AES256-GCM-SHA384 - ECDHE-RSA-AES128-SHA - AES256-SHA256 provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set SSL cipher suite by string bigip_device_httpd: ssl_cipher_suite: ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA:AES256-SHA256 provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set SSL protocols by list bigip_device_httpd: ssl_protocols: - all - -SSLv2 - -SSLv3 provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set SSL protocols by string bigip_device_httpd: ssl_protocols: all -SSLv2 -SSLv3 provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost ''' RETURN = r''' auth_pam_idle_timeout: description: The new number of seconds for GUI timeout. returned: changed type: str sample: 1200 auth_name: description: The new authentication realm name. returned: changed type: str sample: 'foo' auth_pam_validate_ip: description: The new authPamValidateIp setting. returned: changed type: bool sample: on auth_pam_dashboard_timeout: description: Whether or not the BIG-IP dashboard will timeout. returned: changed type: bool sample: off fast_cgi_timeout: description: The new timeout of FastCGI. returned: changed type: int sample: 500 hostname_lookup: description: Whether or not to display the hostname, if possible. returned: changed type: bool sample: on log_level: description: The new minimum httpd log level. returned: changed type: str sample: crit max_clients: description: The new maximum number of clients that can connect to the GUI at once. returned: changed type: int sample: 20 redirect_http_to_https: description: Whether or not to redirect http requests to the GUI to https. returned: changed type: bool sample: on ssl_port: description: The new HTTPS port to listen on. returned: changed type: int sample: 10443 ssl_cipher_suite: description: The new ciphers that the system uses. returned: changed type: str sample: ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA ssl_cipher_suite_list: description: List of the new ciphers that the system uses. returned: changed type: str sample: ['ECDHE-RSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES128-SHA'] ssl_protocols: description: The new list of SSL protocols to accept on the management console. returned: changed type: str sample: all -SSLv2 -SSLv3 ''' import time from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import string_types try: from library.module_utils.network.f5.bigip import F5RestClient from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import AnsibleF5Parameters from library.module_utils.network.f5.common import f5_argument_spec except ImportError: from ansible.module_utils.network.f5.bigip import F5RestClient from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import AnsibleF5Parameters from ansible.module_utils.network.f5.common import f5_argument_spec class Parameters(AnsibleF5Parameters): api_map = { 'authPamIdleTimeout': 'auth_pam_idle_timeout', 'authPamValidateIp': 'auth_pam_validate_ip', 'authName': 'auth_name', 'authPamDashboardTimeout': 'auth_pam_dashboard_timeout', 'fastcgiTimeout': 'fast_cgi_timeout', 'hostnameLookup': 'hostname_lookup', 'logLevel': 'log_level', 'maxClients': 'max_clients', 'redirectHttpToHttps': 'redirect_http_to_https', 'sslPort': 'ssl_port', 'sslCiphersuite': 'ssl_cipher_suite', 'sslProtocol': 'ssl_protocols' } api_attributes = [ 'authPamIdleTimeout', 'authPamValidateIp', 'authName', 'authPamDashboardTimeout', 'fastcgiTimeout', 'hostnameLookup', 'logLevel', 'maxClients', 'sslPort', 'redirectHttpToHttps', 'allow', 'sslCiphersuite', 'sslProtocol' ] returnables = [ 'auth_pam_idle_timeout', 'auth_pam_validate_ip', 'auth_name', 'auth_pam_dashboard_timeout', 'fast_cgi_timeout', 'hostname_lookup', 'log_level', 'max_clients', 'redirect_http_to_https', 'ssl_port', 'allow', 'ssl_cipher_suite', 'ssl_protocols', 'ssl_cipher_suite_list', ] updatables = [ 'auth_pam_idle_timeout', 'auth_pam_validate_ip', 'auth_name', 'auth_pam_dashboard_timeout', 'fast_cgi_timeout', 'hostname_lookup', 'log_level', 'max_clients', 'redirect_http_to_https', 'ssl_port', 'allow', 'ssl_cipher_suite', 'ssl_protocols' ] _ciphers = "ECDHE-RSA-AES128-GCM-SHA256:" \ "ECDHE-RSA-AES256-GCM-SHA384:" \ "ECDHE-RSA-AES128-SHA:" \ "ECDHE-RSA-AES256-SHA:" \ "ECDHE-RSA-AES128-SHA256:" \ "ECDHE-RSA-AES256-SHA384:" \ "ECDHE-ECDSA-AES128-GCM-SHA256:" \ "ECDHE-ECDSA-AES256-GCM-SHA384:" \ "ECDHE-ECDSA-AES128-SHA:" \ "ECDHE-ECDSA-AES256-SHA:" \ "ECDHE-ECDSA-AES128-SHA256:" \ "ECDHE-ECDSA-AES256-SHA384:" \ "AES128-GCM-SHA256:" \ "AES256-GCM-SHA384:" \ "AES128-SHA:" \ "AES256-SHA:" \ "AES128-SHA256:" \ "AES256-SHA256:" \ "ECDHE-RSA-DES-CBC3-SHA:" \ "ECDHE-ECDSA-DES-CBC3-SHA:" \ "DES-CBC3-SHA" _protocols = 'all -SSLv2 -SSLv3' @property def auth_pam_idle_timeout(self): if self._values['auth_pam_idle_timeout'] is None: return None return int(self._values['auth_pam_idle_timeout']) @property def fast_cgi_timeout(self): if self._values['fast_cgi_timeout'] is None: return None return int(self._values['fast_cgi_timeout']) @property def max_clients(self): if self._values['max_clients'] is None: return None return int(self._values['max_clients']) @property def ssl_port(self): if self._values['ssl_port'] is None: return None return int(self._values['ssl_port']) class ModuleParameters(Parameters): @property def auth_pam_validate_ip(self): if self._values['auth_pam_validate_ip'] is None: return None if self._values['auth_pam_validate_ip']: return "on" return "off" @property def auth_pam_dashboard_timeout(self): if self._values['auth_pam_dashboard_timeout'] is None: return None if self._values['auth_pam_dashboard_timeout']: return "on" return "off" @property def hostname_lookup(self): if self._values['hostname_lookup'] is None: return None if self._values['hostname_lookup']: return "on" return "off" @property def redirect_http_to_https(self): if self._values['redirect_http_to_https'] is None: return None if self._values['redirect_http_to_https']: return "enabled" return "disabled" @property def allow(self): if self._values['allow'] is None: return None if self._values['allow'][0] == 'all': return 'all' if self._values['allow'][0] == '': return '' allow = self._values['allow'] result = list(set([str(x) for x in allow])) result = sorted(result) return result @property def ssl_cipher_suite(self): if self._values['ssl_cipher_suite'] is None: return None if isinstance(self._values['ssl_cipher_suite'], string_types): ciphers = self._values['ssl_cipher_suite'].strip() else: ciphers = self._values['ssl_cipher_suite'] if not ciphers: raise F5ModuleError( "ssl_cipher_suite may not be set to 'none'" ) if ciphers == 'default': ciphers = ':'.join(Parameters._ciphers.split(':')) elif isinstance(self._values['ssl_cipher_suite'], string_types): ciphers = ':'.join(ciphers.split(':')) else: ciphers = ':'.join(ciphers) return ciphers @property def ssl_protocols(self): if self._values['ssl_protocols'] is None: return None if isinstance(self._values['ssl_protocols'], string_types): protocols = self._values['ssl_protocols'].strip() else: protocols = self._values['ssl_protocols'] if not protocols: raise F5ModuleError( "ssl_protocols may not be set to 'none'" ) if protocols == 'default': protocols = ' '.join(sorted(Parameters._protocols.split(' '))) elif isinstance(protocols, string_types): protocols = ' '.join(sorted(protocols.split(' '))) else: protocols = ' '.join(sorted(protocols)) return protocols class ApiParameters(Parameters): @property def allow(self): if self._values['allow'] is None: return '' if self._values['allow'][0] == 'All': return 'all' allow = self._values['allow'] result = list(set([str(x) for x in allow])) result = sorted(result) return result class Changes(Parameters): def to_return(self): result = {} try: for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) except Exception: pass return result class UsableChanges(Changes): pass class ReportableChanges(Changes): @property def ssl_cipher_suite(self): default = ':'.join(Parameters._ciphers.split(':')) if self._values['ssl_cipher_suite'] == default: return 'default' else: return self._values['ssl_cipher_suite'] @property def ssl_cipher_suite_list(self): return self._values['ssl_cipher_suite'].split(':') @property def ssl_protocols(self): default = ' '.join(sorted(Parameters._protocols.split(' '))) if self._values['ssl_protocols'] == default: return 'default' else: return self._values['ssl_protocols'] class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 @property def allow(self): if self.want.allow is None: return None if self.want.allow == 'all' and self.have.allow == 'all': return None if self.want.allow == 'all': return ['All'] if self.want.allow == '' and self.have.allow == '': return None if self.want.allow == '': return [] if self.want.allow != self.have.allow: return self.want.allow class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = Changes(params=changed) def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def should_update(self): result = self._update_changed_options() if result: return True return False def exec_module(self): result = dict() changed = self.present() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.module.deprecate( msg=warning['msg'], version=warning['version'] ) def present(self): return self.update() def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/sys/httpd".format( self.client.provider['server'], self.client.provider['server_port'] ) try: resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) except Exception as ex: valid = [ 'Remote end closed connection', 'Connection aborted', ] # BIG-IP will kill your management connection when you change the HTTP # redirect setting. So this catches that and handles it gracefully. if 'redirectHttpToHttps' in params: if any(i for i in valid if i in str(ex)): # Wait for BIG-IP web server to settle after changing this time.sleep(2) return True raise F5ModuleError(str(ex)) def read_current_from_device(self): uri = "https://{0}:{1}/mgmt/tm/sys/httpd".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return ApiParameters(params=response) class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True argument_spec = dict( allow=dict( type='list' ), auth_name=dict(), auth_pam_idle_timeout=dict( type='int' ), fast_cgi_timeout=dict( type='int' ), max_clients=dict( type='int' ), ssl_port=dict( type='int' ), auth_pam_validate_ip=dict( type='bool' ), auth_pam_dashboard_timeout=dict( type='bool' ), hostname_lookup=dict( type='bool' ), log_level=dict( choices=[ 'alert', 'crit', 'debug', 'emerg', 'error', 'info', 'notice', 'warn' ] ), redirect_http_to_https=dict( type='bool' ), ssl_cipher_suite=dict(type='raw'), ssl_protocols=dict(type='raw') ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, ) try: mm = ModuleManager(module=module) results = mm.exec_module() module.exit_json(**results) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
gpl-3.0
lxl1140989/6291-xl
uboot/u-boot-dm6291-new/tools/patman/project.py
38
1415
# Copyright (c) 2012 The Chromium OS Authors. # # See file CREDITS for list of people who contributed to this # project. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307 USA # import os.path import gitutil def DetectProject(): """Autodetect the name of the current project. This looks for signature files/directories that are unlikely to exist except in the given project. Returns: The name of the project, like "linux" or "u-boot". Returns "unknown" if we can't detect the project. """ top_level = gitutil.GetTopLevel() if os.path.exists(os.path.join(top_level, "include", "u-boot")): return "u-boot" elif os.path.exists(os.path.join(top_level, "kernel")): return "linux" return "unknown"
gpl-2.0
openstack/nova
nova/api/openstack/urlmap.py
3
11598
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from urllib import request as urllib2 from oslo_log import log as logging import paste.urlmap from nova.api.openstack import wsgi LOG = logging.getLogger(__name__) _quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"' _option_header_piece_re = re.compile(r';\s*([^\s;=]+|%s)\s*' r'(?:=\s*([^;]+|%s))?\s*' % (_quoted_string_re, _quoted_string_re)) def unquote_header_value(value): """Unquotes a header value. This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. """ if value and value[0] == value[-1] == '"': # this is not the real unquoting, but fixing this so that the # RFC is met will result in bugs with internet explorer and # probably some other browsers as well. IE for example is # uploading files with "C:\foo\bar.txt" as filename value = value[1:-1] return value def parse_list_header(value): """Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] :param value: a string with a list header. :return: :class:`list` """ result = [] for item in urllib2.parse_http_list(value): if item[:1] == item[-1:] == '"': item = unquote_header_value(item[1:-1]) result.append(item) return result def parse_options_header(value): """Parse a ``Content-Type`` like header into a tuple with the content type and the options: >>> parse_options_header('Content-Type: text/html; mimetype=text/html') ('Content-Type:', {'mimetype': 'text/html'}) :param value: the header to parse. :return: (str, options) """ def _tokenize(string): for match in _option_header_piece_re.finditer(string): key, value = match.groups() key = unquote_header_value(key) if value is not None: value = unquote_header_value(value) yield key, value if not value: return '', {} parts = _tokenize(';' + value) name = next(parts)[0] extra = dict(parts) return name, extra class Accept(object): def __init__(self, value): self._content_types = [parse_options_header(v) for v in parse_list_header(value)] def best_match(self, supported_content_types): # FIXME: Should we have a more sophisticated matching algorithm that # takes into account the version as well? best_quality = -1 best_content_type = None best_params = {} best_match = '*/*' for content_type in supported_content_types: for content_mask, params in self._content_types: try: quality = float(params.get('q', 1)) except ValueError: continue if quality < best_quality: continue elif best_quality == quality: if best_match.count('*') <= content_mask.count('*'): continue if self._match_mask(content_mask, content_type): best_quality = quality best_content_type = content_type best_params = params best_match = content_mask return best_content_type, best_params def _match_mask(self, mask, content_type): if '*' not in mask: return content_type == mask if mask == '*/*': return True mask_major = mask[:-2] content_type_major = content_type.split('/', 1)[0] return content_type_major == mask_major def urlmap_factory(loader, global_conf, **local_conf): if 'not_found_app' in local_conf: not_found_app = local_conf.pop('not_found_app') else: not_found_app = global_conf.get('not_found_app') if not_found_app: not_found_app = loader.get_app(not_found_app, global_conf=global_conf) urlmap = URLMap(not_found_app=not_found_app) for path, app_name in local_conf.items(): path = paste.urlmap.parse_path_expression(path) app = loader.get_app(app_name, global_conf=global_conf) urlmap[path] = app return urlmap class URLMap(paste.urlmap.URLMap): def _match(self, host, port, path_info): """Find longest match for a given URL path.""" for (domain, app_url), app in self.applications: if domain and domain != host and domain != host + ':' + port: continue # Rudimentary "wildcard" support: # By declaring a urlmap path ending in '/+', you're saying the # incoming path must start with everything up to and including the # '/' *and* have something after that as well. For example, path # /foo/bar/+ will match /foo/bar/baz, but not /foo/bar/ or /foo/bar # NOTE(efried): This assumes we'll never need a path URI component # that legitimately starts with '+'. (We could use a # more obscure character/sequence here in that case.) if app_url.endswith('/+'): # Must be requesting at least the path element (including /) if not path_info.startswith(app_url[:-1]): continue # ...but also must be requesting something after that / if len(path_info) < len(app_url): continue # Trim the /+ off the app_url to make it look "normal" for e.g. # proper splitting of SCRIPT_NAME and PATH_INFO. return app, app_url[:-2] # Normal (non-wildcarded) prefix match if (path_info == app_url or path_info.startswith(app_url + '/')): return app, app_url return None, None def _set_script_name(self, app, app_url): def wrap(environ, start_response): environ['SCRIPT_NAME'] += app_url return app(environ, start_response) return wrap def _munge_path(self, app, path_info, app_url): def wrap(environ, start_response): environ['SCRIPT_NAME'] += app_url environ['PATH_INFO'] = path_info[len(app_url):] return app(environ, start_response) return wrap def _path_strategy(self, host, port, path_info): """Check path suffix for MIME type and path prefix for API version.""" mime_type = app = app_url = None parts = path_info.rsplit('.', 1) if len(parts) > 1: possible_type = 'application/' + parts[1] if possible_type in wsgi.get_supported_content_types(): mime_type = possible_type parts = path_info.split('/') if len(parts) > 1: possible_app, possible_app_url = self._match(host, port, path_info) # Don't use prefix if it ends up matching default if possible_app and possible_app_url: app_url = possible_app_url app = self._munge_path(possible_app, path_info, app_url) return mime_type, app, app_url def _content_type_strategy(self, host, port, environ): """Check Content-Type header for API version.""" app = None params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1] if 'version' in params: app, app_url = self._match(host, port, '/v' + params['version']) if app: app = self._set_script_name(app, app_url) return app def _accept_strategy(self, host, port, environ, supported_content_types): """Check Accept header for best matching MIME type and API version.""" accept = Accept(environ.get('HTTP_ACCEPT', '')) app = None # Find the best match in the Accept header mime_type, params = accept.best_match(supported_content_types) if 'version' in params: app, app_url = self._match(host, port, '/v' + params['version']) if app: app = self._set_script_name(app, app_url) return mime_type, app def __call__(self, environ, start_response): host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower() if ':' in host: host, port = host.split(':', 1) else: if environ['wsgi.url_scheme'] == 'http': port = '80' else: port = '443' path_info = environ['PATH_INFO'] path_info = self.normalize_url(path_info, False)[1] # The MIME type for the response is determined in one of two ways: # 1) URL path suffix (eg /servers/detail.json) # 2) Accept header (eg application/json;q=0.8, application/xml;q=0.2) # The API version is determined in one of three ways: # 1) URL path prefix (eg /v1.1/tenant/servers/detail) # 2) Content-Type header (eg application/json;version=1.1) # 3) Accept header (eg application/json;q=0.8;version=1.1) supported_content_types = list(wsgi.get_supported_content_types()) mime_type, app, app_url = self._path_strategy(host, port, path_info) # Accept application/atom+xml for the index query of each API # version mount point as well as the root index if (app_url and app_url + '/' == path_info) or path_info == '/': supported_content_types.append('application/atom+xml') if not app: app = self._content_type_strategy(host, port, environ) if not mime_type or not app: possible_mime_type, possible_app = self._accept_strategy( host, port, environ, supported_content_types) if possible_mime_type and not mime_type: mime_type = possible_mime_type if possible_app and not app: app = possible_app if not mime_type: mime_type = 'application/json' if not app: # Didn't match a particular version, probably matches default app, app_url = self._match(host, port, path_info) if app: app = self._munge_path(app, path_info, app_url) if app: environ['nova.best_content_type'] = mime_type return app(environ, start_response) LOG.debug('Could not find application for %s', environ['PATH_INFO']) environ['paste.urlmap_object'] = self return self.not_found_application(environ, start_response)
apache-2.0
ikalnytskyi/sphinxcontrib-openapi
sphinxcontrib/openapi/renderers/_httpdomain_old.py
1
2292
"""Here lies still breathing and only renderer implementation.""" from docutils.parsers.rst import directives from . import abc from .. import openapi20, openapi30, utils class HttpdomainOldRenderer(abc.RestructuredTextRenderer): option_spec = { # A list of endpoints to be rendered. Endpoints must be whitespace # delimited. "paths": lambda s: s.split(), # Regular expression patterns to includes/excludes endpoints to/from # rendering. Similar to paths, the patterns must be whitespace # delimited. "include": lambda s: s.split(), "exclude": lambda s: s.split(), # Endpoints to be included based on HTTP method names. "methods": lambda s: s.split(), # Render the request body structure when passed. "request": directives.flag, # Render request/response examples when passed. "examples": directives.flag, # render examples when passed # Group endpoints by tags when passed. By default, no grouping is # applied and endpoints are rendered in the order they met in spec. "group": directives.flag, # Markup format to render OpenAPI descriptions. "format": str, } def __init__(self, state, options): self._state = state self._options = options def render_restructuredtext_markup(self, spec): # OpenAPI spec may contain JSON references, common properties, etc. # Trying to render the spec "As Is" will require to put multiple if-s # around the code. In order to simplify rendering flow, let's make it # have only one (expected) schema, i.e. normalize it. utils.normalize_spec(spec, **self._options) # We support both OpenAPI 2.0 (f.k.a. Swagger) and OpenAPI 3.0.0, so # determine which version we are parsing here. spec_version = spec.get("openapi", spec.get("swagger", "2.0")) if spec_version.startswith("2."): openapihttpdomain = openapi20.openapihttpdomain elif spec_version.startswith("3."): openapihttpdomain = openapi30.openapihttpdomain else: raise ValueError("Unsupported OpenAPI version (%s)" % spec_version) yield from openapihttpdomain(spec, **self._options)
bsd-2-clause
wbrefvem/openshift-ansible
roles/lib_openshift/src/class/oc_process.py
45
6679
# pylint: skip-file # flake8: noqa # pylint: disable=too-many-instance-attributes class OCProcess(OpenShiftCLI): ''' Class to wrap the oc command line tools ''' # pylint allows 5. we need 6 # pylint: disable=too-many-arguments def __init__(self, namespace, tname=None, params=None, create=False, kubeconfig='/etc/origin/master/admin.kubeconfig', tdata=None, verbose=False): ''' Constructor for OpenshiftOC ''' super(OCProcess, self).__init__(namespace, kubeconfig=kubeconfig, verbose=verbose) self.name = tname self.data = tdata self.params = params self.create = create self._template = None @property def template(self): '''template property''' if self._template is None: results = self._process(self.name, False, self.params, self.data) if results['returncode'] != 0: raise OpenShiftCLIError('Error processing template [%s]: %s' %(self.name, results)) self._template = results['results']['items'] return self._template def get(self): '''get the template''' results = self._get('template', self.name) if results['returncode'] != 0: # Does the template exist?? if 'not found' in results['stderr']: results['returncode'] = 0 results['exists'] = False results['results'] = [] return results def delete(self, obj): '''delete a resource''' return self._delete(obj['kind'], obj['metadata']['name']) def create_obj(self, obj): '''create a resource''' return self._create_from_content(obj['metadata']['name'], obj) def process(self, create=None): '''process a template''' do_create = False if create != None: do_create = create else: do_create = self.create return self._process(self.name, do_create, self.params, self.data) def exists(self): '''return whether the template exists''' # Always return true if we're being passed template data if self.data: return True t_results = self._get('template', self.name) if t_results['returncode'] != 0: # Does the template exist?? if 'not found' in t_results['stderr']: return False else: raise OpenShiftCLIError('Something went wrong. %s' % t_results) return True def needs_update(self): '''attempt to process the template and return it for comparison with oc objects''' obj_results = [] for obj in self.template: # build a list of types to skip skip = [] if obj['kind'] == 'ServiceAccount': skip.extend(['secrets', 'imagePullSecrets']) if obj['kind'] == 'BuildConfig': skip.extend(['lastTriggeredImageID']) if obj['kind'] == 'ImageStream': skip.extend(['generation']) if obj['kind'] == 'DeploymentConfig': skip.extend(['lastTriggeredImage']) # fetch the current object curr_obj_results = self._get(obj['kind'], obj['metadata']['name']) if curr_obj_results['returncode'] != 0: # Does the template exist?? if 'not found' in curr_obj_results['stderr']: obj_results.append((obj, True)) continue # check the generated object against the existing object if not Utils.check_def_equal(obj, curr_obj_results['results'][0], skip_keys=skip): obj_results.append((obj, True)) continue obj_results.append((obj, False)) return obj_results # pylint: disable=too-many-return-statements @staticmethod def run_ansible(params, check_mode): '''run the ansible idempotent code''' ocprocess = OCProcess(params['namespace'], params['template_name'], params['params'], params['create'], kubeconfig=params['kubeconfig'], tdata=params['content'], verbose=params['debug']) state = params['state'] api_rval = ocprocess.get() if state == 'list': if api_rval['returncode'] != 0: return {"failed": True, "msg" : api_rval} return {"changed" : False, "results": api_rval, "state": state} elif state == 'present': if check_mode and params['create']: return {"changed": True, 'msg': "CHECK_MODE: Would have processed template."} if not ocprocess.exists() or not params['reconcile']: #FIXME: this code will never get run in a way that succeeds when # module.params['reconcile'] is true. Because oc_process doesn't # create the actual template, the check of ocprocess.exists() # is meaningless. Either it's already here and this code # won't be run, or this code will fail because there is no # template available for oc process to use. Have we conflated # the template's existence with the existence of the objects # it describes? # Create it here api_rval = ocprocess.process() if api_rval['returncode'] != 0: return {"failed": True, "msg": api_rval} if params['create']: return {"changed": True, "results": api_rval, "state": state} return {"changed": False, "results": api_rval, "state": state} # verify results update = False rval = [] all_results = ocprocess.needs_update() for obj, status in all_results: if status: ocprocess.delete(obj) results = ocprocess.create_obj(obj) results['kind'] = obj['kind'] rval.append(results) update = True if not update: return {"changed": update, "results": api_rval, "state": state} for cmd in rval: if cmd['returncode'] != 0: return {"failed": True, "changed": update, "msg": rval, "state": state} return {"changed": update, "results": rval, "state": state}
apache-2.0
louishust/mysql5.6.14_tokudb
storage/tokudb/mysql-test/suite/tokudb/t/change_column_char.py
54
1339
#!/usr/bin/env python import sys def gen_test(n): print "CREATE TABLE t (a CHAR(%d));" % (n) for v in [ 'hi', 'there', 'people' ]: print "INSERT INTO t VALUES ('%s');" % (v) for i in range(2,256): if i < n: print "--replace_regex /MariaDB/XYZ/ /MySQL/XYZ/" print "--error ER_UNSUPPORTED_EXTENSION" else: print "CREATE TABLE ti LIKE t;" print "ALTER TABLE ti ENGINE=myisam;" print "INSERT INTO ti SELECT * FROM t;" print "ALTER TABLE ti CHANGE COLUMN a a CHAR(%d);" % (i) print "ALTER TABLE t CHANGE COLUMN a a CHAR(%d);" % (i) if i >= n: print "let $diff_tables=test.t, test.ti;" print "source include/diff_tables.inc;" print "DROP TABLE ti;" print "DROP TABLE t;" def main(): print "# this test is generated by change_char.py" print "# test char expansion" print "--disable_warnings" print "DROP TABLE IF EXISTS t,ti;" print "--enable_warnings" print "SET SESSION DEFAULT_STORAGE_ENGINE=\"TokuDB\";" print "SET SESSION TOKUDB_DISABLE_SLOW_ALTER=1;" # all n takes too long to run, so here is a subset of tests for n in [ 1, 2, 3, 4, 5, 6, 7, 8, 16, 31, 32, 63, 64, 127, 128, 254, 255 ]: gen_test(n) return 0 sys.exit(main())
gpl-2.0
shipci/sympy
sympy/mpmath/functions/expintegrals.py
47
11452
from .functions import defun, defun_wrapped @defun_wrapped def _erf_complex(ctx, z): z2 = ctx.square_exp_arg(z, -1) #z2 = -z**2 v = (2/ctx.sqrt(ctx.pi))*z * ctx.hyp1f1((1,2),(3,2), z2) if not ctx._re(z): v = ctx._im(v)*ctx.j return v @defun_wrapped def _erfc_complex(ctx, z): if ctx.re(z) > 2: z2 = ctx.square_exp_arg(z) nz2 = ctx.fneg(z2, exact=True) v = ctx.exp(nz2)/ctx.sqrt(ctx.pi) * ctx.hyperu((1,2),(1,2), z2) else: v = 1 - ctx._erf_complex(z) if not ctx._re(z): v = 1+ctx._im(v)*ctx.j return v @defun def erf(ctx, z): z = ctx.convert(z) if ctx._is_real_type(z): try: return ctx._erf(z) except NotImplementedError: pass if ctx._is_complex_type(z) and not z.imag: try: return type(z)(ctx._erf(z.real)) except NotImplementedError: pass return ctx._erf_complex(z) @defun def erfc(ctx, z): z = ctx.convert(z) if ctx._is_real_type(z): try: return ctx._erfc(z) except NotImplementedError: pass if ctx._is_complex_type(z) and not z.imag: try: return type(z)(ctx._erfc(z.real)) except NotImplementedError: pass return ctx._erfc_complex(z) @defun def square_exp_arg(ctx, z, mult=1, reciprocal=False): prec = ctx.prec*4+20 if reciprocal: z2 = ctx.fmul(z, z, prec=prec) z2 = ctx.fdiv(ctx.one, z2, prec=prec) else: z2 = ctx.fmul(z, z, prec=prec) if mult != 1: z2 = ctx.fmul(z2, mult, exact=True) return z2 @defun_wrapped def erfi(ctx, z): if not z: return z z2 = ctx.square_exp_arg(z) v = (2/ctx.sqrt(ctx.pi)*z) * ctx.hyp1f1((1,2), (3,2), z2) if not ctx._re(z): v = ctx._im(v)*ctx.j return v @defun_wrapped def erfinv(ctx, x): xre = ctx._re(x) if (xre != x) or (xre < -1) or (xre > 1): return ctx.bad_domain("erfinv(x) is defined only for -1 <= x <= 1") x = xre #if ctx.isnan(x): return x if not x: return x if x == 1: return ctx.inf if x == -1: return ctx.ninf if abs(x) < 0.9: a = 0.53728*x**3 + 0.813198*x else: # An asymptotic formula u = ctx.ln(2/ctx.pi/(abs(x)-1)**2) a = ctx.sign(x) * ctx.sqrt(u - ctx.ln(u))/ctx.sqrt(2) ctx.prec += 10 return ctx.findroot(lambda t: ctx.erf(t)-x, a) @defun_wrapped def npdf(ctx, x, mu=0, sigma=1): sigma = ctx.convert(sigma) return ctx.exp(-(x-mu)**2/(2*sigma**2)) / (sigma*ctx.sqrt(2*ctx.pi)) @defun_wrapped def ncdf(ctx, x, mu=0, sigma=1): a = (x-mu)/(sigma*ctx.sqrt(2)) if a < 0: return ctx.erfc(-a)/2 else: return (1+ctx.erf(a))/2 @defun_wrapped def betainc(ctx, a, b, x1=0, x2=1, regularized=False): if x1 == x2: v = 0 elif not x1: if x1 == 0 and x2 == 1: v = ctx.beta(a, b) else: v = x2**a * ctx.hyp2f1(a, 1-b, a+1, x2) / a else: m, d = ctx.nint_distance(a) if m <= 0: if d < -ctx.prec: h = +ctx.eps ctx.prec *= 2 a += h elif d < -4: ctx.prec -= d s1 = x2**a * ctx.hyp2f1(a,1-b,a+1,x2) s2 = x1**a * ctx.hyp2f1(a,1-b,a+1,x1) v = (s1 - s2) / a if regularized: v /= ctx.beta(a,b) return v @defun def gammainc(ctx, z, a=0, b=None, regularized=False): regularized = bool(regularized) z = ctx.convert(z) if a is None: a = ctx.zero lower_modified = False else: a = ctx.convert(a) lower_modified = a != ctx.zero if b is None: b = ctx.inf upper_modified = False else: b = ctx.convert(b) upper_modified = b != ctx.inf # Complete gamma function if not (upper_modified or lower_modified): if regularized: if ctx.re(z) < 0: return ctx.inf elif ctx.re(z) > 0: return ctx.one else: return ctx.nan return ctx.gamma(z) if a == b: return ctx.zero # Standardize if ctx.re(a) > ctx.re(b): return -ctx.gammainc(z, b, a, regularized) # Generalized gamma if upper_modified and lower_modified: return +ctx._gamma3(z, a, b, regularized) # Upper gamma elif lower_modified: return ctx._upper_gamma(z, a, regularized) # Lower gamma elif upper_modified: return ctx._lower_gamma(z, b, regularized) @defun def _lower_gamma(ctx, z, b, regularized=False): # Pole if ctx.isnpint(z): return type(z)(ctx.inf) G = [z] * regularized negb = ctx.fneg(b, exact=True) def h(z): T1 = [ctx.exp(negb), b, z], [1, z, -1], [], G, [1], [1+z], b return (T1,) return ctx.hypercomb(h, [z]) @defun def _upper_gamma(ctx, z, a, regularized=False): # Fast integer case, when available if ctx.isint(z): try: if regularized: # Gamma pole if ctx.isnpint(z): return type(z)(ctx.zero) orig = ctx.prec try: ctx.prec += 10 return ctx._gamma_upper_int(z, a) / ctx.gamma(z) finally: ctx.prec = orig else: return ctx._gamma_upper_int(z, a) except NotImplementedError: pass nega = ctx.fneg(a, exact=True) G = [z] * regularized # Use 2F0 series when possible; fall back to lower gamma representation try: def h(z): r = z-1 return [([ctx.exp(nega), a], [1, r], [], G, [1, -r], [], 1/nega)] return ctx.hypercomb(h, [z], force_series=True) except ctx.NoConvergence: def h(z): T1 = [], [1, z-1], [z], G, [], [], 0 T2 = [-ctx.exp(nega), a, z], [1, z, -1], [], G, [1], [1+z], a return T1, T2 return ctx.hypercomb(h, [z]) @defun def _gamma3(ctx, z, a, b, regularized=False): pole = ctx.isnpint(z) if regularized and pole: return ctx.zero try: ctx.prec += 15 # We don't know in advance whether it's better to write as a difference # of lower or upper gamma functions, so try both T1 = ctx.gammainc(z, a, regularized=regularized) T2 = ctx.gammainc(z, b, regularized=regularized) R = T1 - T2 if ctx.mag(R) - max(ctx.mag(T1), ctx.mag(T2)) > -10: return R if not pole: T1 = ctx.gammainc(z, 0, b, regularized=regularized) T2 = ctx.gammainc(z, 0, a, regularized=regularized) R = T1 - T2 # May be ok, but should probably at least print a warning # about possible cancellation if 1: #ctx.mag(R) - max(ctx.mag(T1), ctx.mag(T2)) > -10: return R finally: ctx.prec -= 15 raise NotImplementedError @defun_wrapped def expint(ctx, n, z): if ctx.isint(n) and ctx._is_real_type(z): try: return ctx._expint_int(n, z) except NotImplementedError: pass if ctx.isnan(n) or ctx.isnan(z): return z*n if z == ctx.inf: return 1/z if z == 0: # integral from 1 to infinity of t^n if ctx.re(n) <= 1: # TODO: reasonable sign of infinity return type(z)(ctx.inf) else: return ctx.one/(n-1) if n == 0: return ctx.exp(-z)/z if n == -1: return ctx.exp(-z)*(z+1)/z**2 return z**(n-1) * ctx.gammainc(1-n, z) @defun_wrapped def li(ctx, z, offset=False): if offset: if z == 2: return ctx.zero return ctx.ei(ctx.ln(z)) - ctx.ei(ctx.ln2) if not z: return z if z == 1: return ctx.ninf return ctx.ei(ctx.ln(z)) @defun def ei(ctx, z): try: return ctx._ei(z) except NotImplementedError: return ctx._ei_generic(z) @defun_wrapped def _ei_generic(ctx, z): # Note: the following is currently untested because mp and fp # both use special-case ei code if z == ctx.inf: return z if z == ctx.ninf: return ctx.zero if ctx.mag(z) > 1: try: r = ctx.one/z v = ctx.exp(z)*ctx.hyper([1,1],[],r, maxterms=ctx.prec, force_series=True)/z im = ctx._im(z) if im > 0: v += ctx.pi*ctx.j if im < 0: v -= ctx.pi*ctx.j return v except ctx.NoConvergence: pass v = z*ctx.hyp2f2(1,1,2,2,z) + ctx.euler if ctx._im(z): v += 0.5*(ctx.log(z) - ctx.log(ctx.one/z)) else: v += ctx.log(abs(z)) return v @defun def e1(ctx, z): try: return ctx._e1(z) except NotImplementedError: return ctx.expint(1, z) @defun def ci(ctx, z): try: return ctx._ci(z) except NotImplementedError: return ctx._ci_generic(z) @defun_wrapped def _ci_generic(ctx, z): if ctx.isinf(z): if z == ctx.inf: return ctx.zero if z == ctx.ninf: return ctx.pi*1j jz = ctx.fmul(ctx.j,z,exact=True) njz = ctx.fneg(jz,exact=True) v = 0.5*(ctx.ei(jz) + ctx.ei(njz)) zreal = ctx._re(z) zimag = ctx._im(z) if zreal == 0: if zimag > 0: v += ctx.pi*0.5j if zimag < 0: v -= ctx.pi*0.5j if zreal < 0: if zimag >= 0: v += ctx.pi*1j if zimag < 0: v -= ctx.pi*1j if ctx._is_real_type(z) and zreal > 0: v = ctx._re(v) return v @defun def si(ctx, z): try: return ctx._si(z) except NotImplementedError: return ctx._si_generic(z) @defun_wrapped def _si_generic(ctx, z): if ctx.isinf(z): if z == ctx.inf: return 0.5*ctx.pi if z == ctx.ninf: return -0.5*ctx.pi # Suffers from cancellation near 0 if ctx.mag(z) >= -1: jz = ctx.fmul(ctx.j,z,exact=True) njz = ctx.fneg(jz,exact=True) v = (-0.5j)*(ctx.ei(jz) - ctx.ei(njz)) zreal = ctx._re(z) if zreal > 0: v -= 0.5*ctx.pi if zreal < 0: v += 0.5*ctx.pi if ctx._is_real_type(z): v = ctx._re(v) return v else: return z*ctx.hyp1f2((1,2),(3,2),(3,2),-0.25*z*z) @defun_wrapped def chi(ctx, z): nz = ctx.fneg(z, exact=True) v = 0.5*(ctx.ei(z) + ctx.ei(nz)) zreal = ctx._re(z) zimag = ctx._im(z) if zimag > 0: v += ctx.pi*0.5j elif zimag < 0: v -= ctx.pi*0.5j elif zreal < 0: v += ctx.pi*1j return v @defun_wrapped def shi(ctx, z): # Suffers from cancellation near 0 if ctx.mag(z) >= -1: nz = ctx.fneg(z, exact=True) v = 0.5*(ctx.ei(z) - ctx.ei(nz)) zimag = ctx._im(z) if zimag > 0: v -= 0.5j*ctx.pi if zimag < 0: v += 0.5j*ctx.pi return v else: return z * ctx.hyp1f2((1,2),(3,2),(3,2),0.25*z*z) @defun_wrapped def fresnels(ctx, z): if z == ctx.inf: return ctx.mpf(0.5) if z == ctx.ninf: return ctx.mpf(-0.5) return ctx.pi*z**3/6*ctx.hyp1f2((3,4),(3,2),(7,4),-ctx.pi**2*z**4/16) @defun_wrapped def fresnelc(ctx, z): if z == ctx.inf: return ctx.mpf(0.5) if z == ctx.ninf: return ctx.mpf(-0.5) return z*ctx.hyp1f2((1,4),(1,2),(5,4),-ctx.pi**2*z**4/16)
bsd-3-clause
dyoung418/tensorflow
tensorflow/python/keras/_impl/keras/layers/gru_test.py
13
7074
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for GRU layer.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.keras._impl import keras from tensorflow.python.keras._impl.keras import testing_utils from tensorflow.python.platform import test class GRULayerTest(test.TestCase): def test_return_sequences_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 with self.test_session(): testing_utils.layer_test( keras.layers.GRU, kwargs={'units': units, 'return_sequences': True}, input_shape=(num_samples, timesteps, embedding_dim)) def test_dynamic_behavior_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 with self.test_session(): layer = keras.layers.GRU(units, input_shape=(None, embedding_dim)) model = keras.models.Sequential() model.add(layer) model.compile('sgd', 'mse') x = np.random.random((num_samples, timesteps, embedding_dim)) y = np.random.random((num_samples, units)) model.train_on_batch(x, y) def test_dropout_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 with self.test_session(): testing_utils.layer_test( keras.layers.GRU, kwargs={'units': units, 'dropout': 0.1, 'recurrent_dropout': 0.1}, input_shape=(num_samples, timesteps, embedding_dim)) def test_implementation_mode_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 with self.test_session(): for mode in [0, 1, 2]: testing_utils.layer_test( keras.layers.GRU, kwargs={'units': units, 'implementation': mode}, input_shape=(num_samples, timesteps, embedding_dim)) def test_statefulness_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 layer_class = keras.layers.GRU with self.test_session(): model = keras.models.Sequential() model.add( keras.layers.Embedding( 4, embedding_dim, mask_zero=True, input_length=timesteps, batch_input_shape=(num_samples, timesteps))) layer = layer_class( units, return_sequences=False, stateful=True, weights=None) model.add(layer) model.compile(optimizer='sgd', loss='mse') out1 = model.predict(np.ones((num_samples, timesteps))) self.assertEqual(out1.shape, (num_samples, units)) # train once so that the states change model.train_on_batch( np.ones((num_samples, timesteps)), np.ones((num_samples, units))) out2 = model.predict(np.ones((num_samples, timesteps))) # if the state is not reset, output should be different self.assertNotEqual(out1.max(), out2.max()) # check that output changes after states are reset # (even though the model itself didn't change) layer.reset_states() out3 = model.predict(np.ones((num_samples, timesteps))) self.assertNotEqual(out2.max(), out3.max()) # check that container-level reset_states() works model.reset_states() out4 = model.predict(np.ones((num_samples, timesteps))) np.testing.assert_allclose(out3, out4, atol=1e-5) # check that the call to `predict` updated the states out5 = model.predict(np.ones((num_samples, timesteps))) self.assertNotEqual(out4.max(), out5.max()) # Check masking layer.reset_states() left_padded_input = np.ones((num_samples, timesteps)) left_padded_input[0, :1] = 0 left_padded_input[1, :2] = 0 out6 = model.predict(left_padded_input) layer.reset_states() right_padded_input = np.ones((num_samples, timesteps)) right_padded_input[0, -1:] = 0 right_padded_input[1, -2:] = 0 out7 = model.predict(right_padded_input) np.testing.assert_allclose(out7, out6, atol=1e-5) def test_regularizers_GRU(self): embedding_dim = 4 layer_class = keras.layers.GRU with self.test_session(): layer = layer_class( 5, return_sequences=False, weights=None, input_shape=(None, embedding_dim), kernel_regularizer=keras.regularizers.l1(0.01), recurrent_regularizer=keras.regularizers.l1(0.01), bias_regularizer='l2', activity_regularizer='l1') layer.build((None, None, 2)) self.assertEqual(len(layer.losses), 3) layer(keras.backend.variable(np.ones((2, 3, 2)))) self.assertEqual(len(layer.losses), 4) def test_constraints_GRU(self): embedding_dim = 4 layer_class = keras.layers.GRU with self.test_session(): k_constraint = keras.constraints.max_norm(0.01) r_constraint = keras.constraints.max_norm(0.01) b_constraint = keras.constraints.max_norm(0.01) layer = layer_class( 5, return_sequences=False, weights=None, input_shape=(None, embedding_dim), kernel_constraint=k_constraint, recurrent_constraint=r_constraint, bias_constraint=b_constraint) layer.build((None, None, embedding_dim)) self.assertEqual(layer.kernel.constraint, k_constraint) self.assertEqual(layer.recurrent_kernel.constraint, r_constraint) self.assertEqual(layer.bias.constraint, b_constraint) def test_with_masking_layer_GRU(self): layer_class = keras.layers.GRU with self.test_session(): inputs = np.random.random((2, 3, 4)) targets = np.abs(np.random.random((2, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = keras.models.Sequential() model.add(keras.layers.Masking(input_shape=(3, 4))) model.add(layer_class(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=2, verbose=1) def test_from_config_GRU(self): layer_class = keras.layers.GRU for stateful in (False, True): l1 = layer_class(units=1, stateful=stateful) l2 = layer_class.from_config(l1.get_config()) assert l1.get_config() == l2.get_config() if __name__ == '__main__': test.main()
apache-2.0
jxs/servo
tests/wpt/css-tests/tools/pywebsocket/src/setup.py
434
2863
#!/usr/bin/env python # # Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Set up script for mod_pywebsocket. """ from distutils.core import setup, Extension import sys _PACKAGE_NAME = 'mod_pywebsocket' # Build and use a C++ extension for faster masking. SWIG is required. _USE_FAST_MASKING = False if sys.version < '2.3': print >> sys.stderr, '%s requires Python 2.3 or later.' % _PACKAGE_NAME sys.exit(1) if _USE_FAST_MASKING: setup(ext_modules=[ Extension( 'mod_pywebsocket/_fast_masking', ['mod_pywebsocket/fast_masking.i'], swig_opts=['-c++'])]) setup(author='Yuzo Fujishima', author_email='[email protected]', description='WebSocket extension for Apache HTTP Server.', long_description=( 'mod_pywebsocket is an Apache HTTP Server extension for ' 'the WebSocket Protocol (RFC 6455). ' 'See mod_pywebsocket/__init__.py for more detail.'), license='See COPYING', name=_PACKAGE_NAME, packages=[_PACKAGE_NAME, _PACKAGE_NAME + '.handshake'], url='http://code.google.com/p/pywebsocket/', # See the source of distutils.version, distutils.versionpredicate and # distutils.dist to understand how to name version numbers. version='0.7.9', ) # vi:sts=4 sw=4 et
mpl-2.0
rocky4570/moto
tests/test_ec2/test_general.py
8
1322
from __future__ import unicode_literals # Ensure 'assert_raises' context manager support for Python 2.6 import tests.backport_assert_raises from nose.tools import assert_raises import boto import boto3 from boto.exception import EC2ResponseError import sure # noqa from moto import mock_ec2_deprecated, mock_ec2 @mock_ec2_deprecated def test_console_output(): conn = boto.connect_ec2('the_key', 'the_secret') reservation = conn.run_instances('ami-1234abcd') instance_id = reservation.instances[0].id output = conn.get_console_output(instance_id) output.output.should_not.equal(None) @mock_ec2_deprecated def test_console_output_without_instance(): conn = boto.connect_ec2('the_key', 'the_secret') with assert_raises(EC2ResponseError) as cm: conn.get_console_output('i-1234abcd') cm.exception.code.should.equal('InvalidInstanceID.NotFound') cm.exception.status.should.equal(400) cm.exception.request_id.should_not.be.none @mock_ec2 def test_console_output_boto3(): conn = boto3.resource('ec2', 'us-east-1') instances = conn.create_instances(ImageId='ami-1234abcd', MinCount=1, MaxCount=1) output = instances[0].console_output() output.get('Output').should_not.equal(None)
apache-2.0
hofmannedv/training-python
commandline/fileinfo.py
1
1527
#!/usr/bin/python # ----------------------------------------------------------- # demonstrates how to work with commandline arguments using # docopt (http://docopt.org/) # # example fileinfo program # based my blog post here: # http://www.stackabuse.com/python-list-files-in-a-directory/ #o # (C) 2017 Frank Hofmann, Berlin, Germany # Released under GNU Public License (GPL) # email [email protected] # ----------------------------------------------------------- """Usage: ./fileinfo.py ./fileinfo.py [--help | -h] ./fileinfo.py [--verbose | -v] ./fileinfo.py [--files | -f] ./fileinfo.py [--dirs | -d] Options: --help -h display help information --verbose -v increase the verbosity of output --files -f display files only --dirs -d display directories only """ # include docopt module from docopt import docopt # import other modules import os, fnmatch if __name__ == '__main__': arguments = docopt(__doc__) #print (arguments) # define which information to show showFiles = True showDirectories = True path = "." selection = "*" verbose = False if arguments["--verbose"]: print("enabling verbose output ") verbose = True if arguments["--files"]: showDirectories = False if arguments["--dirs"]: showFiles = False # define display path displayPath = path for root, dirs, files in os.walk(displayPath): if showFiles: for filename in files: print(filename) if showDirectories: for dirname in dirs: print(dirname)
gpl-2.0
bennojoy/ansible
v1/ansible/utils/display_functions.py
147
2184
# (c) 2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import textwrap from ansible import constants as C from ansible import errors from ansible.callbacks import display __all__ = ['deprecated', 'warning', 'system_warning'] # list of all deprecation messages to prevent duplicate display deprecations = {} warns = {} def deprecated(msg, version, removed=False): ''' used to print out a deprecation message.''' if not removed and not C.DEPRECATION_WARNINGS: return if not removed: if version: new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in version %s." % (msg, version) else: new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in a future release." % (msg) new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n" else: raise errors.AnsibleError("[DEPRECATED]: %s. Please update your playbooks." % msg) wrapped = textwrap.wrap(new_msg, 79) new_msg = "\n".join(wrapped) + "\n" if new_msg not in deprecations: display(new_msg, color='purple', stderr=True) deprecations[new_msg] = 1 def warning(msg): new_msg = "\n[WARNING]: %s" % msg wrapped = textwrap.wrap(new_msg, 79) new_msg = "\n".join(wrapped) + "\n" if new_msg not in warns: display(new_msg, color='bright purple', stderr=True) warns[new_msg] = 1 def system_warning(msg): if C.SYSTEM_WARNINGS: warning(msg)
gpl-3.0
ryanvarley/ExoData
exodata/astroclasses.py
1
42589
""" Contains structural classes ie binary, star, planet etc which mimic the xml structure with objects """ import sys import math from pkg_resources import resource_stream import logging import numpy as np import astropy.coordinates import astropy.units as u from . import equations as eq from . import astroquantities as aq from . import assumptions as assum from . import flags from . import params as ed_params logger = logging.getLogger('') class _BaseObject(object): def __init__(self, params=None): self.children = [] self.parent = False self.classType = 'BaseObject' self.flags = flags.Flags() self.params = {} if params is not None: self._updateParams(params) # TODO value validator? def _addChild(self, child): self.children.append(child) def _updateParams(self, params): """ This method updates parameters allowing for any validation / unit additions in the near future """ self.params.update(params) def _getParentClass(self, startClass, parentClass): """ gets the parent class by calling successive parent classes with .parent until parentclass is matched. """ try: if not startClass: # reached system with no hits raise AttributeError except AttributeError: # i.e calling binary on an object without one raise HierarchyError('This object ({0}) has no {1} as a parent object'.format(self.name, parentClass)) if startClass.classType == parentClass: return startClass else: return self._getParentClass(startClass.parent, parentClass) @property def name(self): # TODO variable for altnames try: return self.params['name'] except KeyError: try: return self.parent.name except AttributeError: return 'Un-named ' + self.classType except AttributeError: return 'Un-named ' + self.classType def __repr__(self): return '{0}({1!r})'.format(self.classType, self.name) def getParam(self, paramKey): """ Fetches a parameter from the params dictionary. If it's not there it will return NaN. This allows the use of list comprehensions over the entire planet set without KeyErrors. NaN was used as unlike False and None, NaN < 1 and NaN > 1 are both False """ try: return self.params[paramKey] except KeyError: return np.NaN def __eq__(self, other): """ check the parameter dictionaries for both clases are the same (and both are of the same class) """ if type(self) == type(other): return self.params == other.params else: return False @property def system(self): return self._getParentClass(self.parent, 'System') class System(_BaseObject): def __init__(self, *args, **kwargs): _BaseObject.__init__(self, *args, **kwargs) self.classType = 'System' @property def ra(self): return self.getParam('rightascension') @ra.setter def ra(self, ra): self.params['rightascension'] = ra @property def dec(self): return self.getParam('declination') @dec.setter def dec(self, dec): self.params['declination'] = dec @property def d(self): return self.getParam('distance') @d.setter def d(self, d): d = d.rescale(aq.pc) self.params['distance'] = d @property def stars(self): return self.children # TODO child could be a binary or planet @property def epoch(self): return self.getParam('epoch') @epoch.setter def epoch(self, epoch): self.params['epoch'] = epoch class PlanetAndBinaryCommon(_BaseObject): def __init__(self, *args, **kwargs): _BaseObject.__init__(self, *args, **kwargs) self.classType = 'PlanetAndBinaryCommon' @property def i(self): return self.getParam('inclination') @i.setter def i(self, i): i = i.rescale(aq.deg) self.params['inclination'] = i @property def e(self): return self.getParam('eccentricity') @e.setter def e(self, e): self.params['eccentricity'] = e @property def P(self): period = self.getParam('period') if period is not np.nan: return period elif ed_params.estimateMissingValues: self.flags.addFlag('Calculated Period') return self.calcPeriod() else: return np.nan @P.setter def P(self, P): P = P.rescale(aq.day) self.params['period'] = P def calcPeriod(self): raise NotImplementedError('Only implemented for Binary and Planet child classes') @property def a(self): sma = self.getParam('semimajoraxis') if sma is np.nan and ed_params.estimateMissingValues: if self.getParam('period') is not np.nan: sma = self.calcSMA() # calc using period self.flags.addFlag('Calculated SMA') return sma else: return np.nan else: return sma @a.setter def a(self, a): a = a.rescale(aq.au) self.params['semimajoraxis'] = a def calcSMA(self): raise NotImplementedError('Only implemented for Binary and Planet child classes') @property def transittime(self): return self.getParam('transittime') @transittime.setter def transittime(self, transittime): self.params['transittime'] = transittime @property def periastron(self): peri = self.getParam('periastron') if math.isnan(peri) and self.e == 0: peri = 0 * aq.deg return peri @periastron.setter def periastron(self, periastron): self.params['periastron'] = periastron @property def longitude(self): return self.getParam('longitude') @longitude.setter def longitude(self, longitude): self.params['longitude'] = longitude @property def ascendingnode(self): return self.getParam('ascendingnode') @ascendingnode.setter def ascendingnode(self, ascendingnode): self.params['ascendingnode'] = ascendingnode @property def separation(self): return self.getParam('separation') @separation.setter def seperation(self, seperation): self.params['seperation'] = seperation class StarAndBinaryCommon(_BaseObject): def __init__(self, *args, **kwargs): _BaseObject.__init__(self, *args, **kwargs) self.classType = 'StarAndBinaryCommon' @property def magU(self): return self.getParam('magU') @magU.setter def magU(self, mag): self.params['magU'] = mag @property def magB(self): return self.getParam('magB') @magB.setter def magB(self, mag): self.params['magB'] = mag @property def magH(self): return self.getParam('magH') @magH.setter def magH(self, mag): self.params['magH'] = mag @property def magI(self): return self.getParam('magI') @magI.setter def magI(self, mag): self.params['magI'] = mag @property def magJ(self): return self.getParam('magJ') @magJ.setter def magJ(self, mag): self.params['magJ'] = mag @property def magK(self): return self.getParam('magK') @magK.setter def magK(self, mag): self.params['magK'] = mag @property def magV(self): return self.getParam('magV') @magV.setter def magV(self, mag): self.params['magV'] = mag @property def magL(self): return self.getParam('magL') @magL.setter def magL(self, mag): self.params['magL'] = mag @property def magM(self): return self.getParam('magM') @magM.setter def magM(self, mag): self.params['magM'] = mag @property def magN(self): return self.getParam('magN') @magN.setter def magN(self, mag): self.params['magN'] = mag class StarAndPlanetCommon(_BaseObject): def __init__(self, *args, **kwargs): _BaseObject.__init__(self, *args, **kwargs) self.classType = 'StarAndPlanetCommon' @property def age(self): return self.getParam('age') @age.setter def age(self, age): age = age.rescale(aq.Gyear) self.params['age'] = age @property # allows stars and planets to access system values by propagating up def ra(self): return self.parent.ra @ra.setter def ra(self, ra): self.parent.ra = ra @property def dec(self): return self.parent.dec @dec.setter def dec(self, dec): self.parent.dec = dec @property def d(self): return self.parent.d @d.setter def d(self, d): self.parent.d = d @property def R(self): return self.getParam('radius') @R.setter def R(self, R): self.params['radius'] = R @property def T(self): """ Looks for the temperature in the catalogue, if absent it calculates it using calcTemperature() :return: planet temperature """ paramTemp = self.getParam('temperature') if not paramTemp is np.nan: return paramTemp elif ed_params.estimateMissingValues: self.flags.addFlag('Calculated Temperature') return self.calcTemperature() else: return np.nan @T.setter def T(self, T): T = T.rescale(aq.K) self.params['temperature'] = T @property def M(self): return self.getParam('mass') @M.setter def M(self, M): M = M.rescale(aq.M_j) self.params['mass'] = M def calcTemperature(self): raise NotImplementedError('Only implemented for Stars and Planet child classes') @property def binary(self): return self._getParentClass(self, 'Binary') def calcSurfaceGravity(self): return eq.SurfaceGravity(self.M, self.R).g def calcLogg(self): return eq.Logg(self.M, self.R).logg def calcDensity(self): if self.M is np.nan or self.R is np.nan: return np.nan else: return eq.Density(self.M, self.R).density class Binary(PlanetAndBinaryCommon, StarAndBinaryCommon): # TODO add binary methods and variables, remove unused one from starcommon def __init__(self, *args, **kwargs): StarAndBinaryCommon.__init__(self, *args, **kwargs) PlanetAndBinaryCommon.__init__(self, *args, **kwargs) self.classType = 'Binary' @property def stars(self): return self.children @property def d(self): return self.parent.d def calcPeriod(self): raise NotImplementedError # TODO def calcSMA(self): raise NotImplementedError # TODO class Star(StarAndPlanetCommon, StarAndBinaryCommon): def __init__(self, *args, **kwargs): StarAndPlanetCommon.__init__(self, *args, **kwargs) self.classType = 'Star' @property def d(self): """ Note this should work from child parents as .d propergates, calculates using the star estimation method estimateDistance and estimateAbsoluteMagnitude """ # TODO this will only work from a star or below. good thing? d = self.parent.d if ed_params.estimateMissingValues: if d is np.nan: d = self.estimateDistance() if d is not np.nan: self.flags.addFlag('Estimated Distance') return d else: return np.nan def calcLuminosity(self): return eq.StellarLuminosity(self.R, self.T).L def calcTemperature(self): """ uses equations.starTemperature to estimate temperature based on main sequence relationship """ return eq.estimateStellarTemperature(self.M) def _get_or_convert_magnitude(self, mag_letter): """ Takes input of the magnitude letter and ouputs the magnitude fetched from the catalogue or a converted value :return: """ allowed_mags = "UBVJIHKLMN" catalogue_mags = 'BVIJHK' if mag_letter not in allowed_mags or not len(mag_letter) == 1: raise ValueError("Magnitude letter must be a single letter in {0}".format(allowed_mags)) mag_str = 'mag'+mag_letter mag_val = self.getParam(mag_str) if isNanOrNone(mag_val) and ed_params.estimateMissingValues: # then we need to estimate it! # old style dict comprehension for python 2.6 mag_dict = dict(('mag'+letter, self.getParam('mag'+letter)) for letter in catalogue_mags) mag_class = Magnitude(self.spectralType, **mag_dict) try: mag_conversion = mag_class.convert(mag_letter) # logger.debug('Star Class: Conversion to {0} successful, got {1}'.format(mag_str, mag_conversion)) self.flags.addFlag('Estimated mag{0}'.format(mag_letter)) return mag_conversion except ValueError as e: # cant convert logger.exception(e) # logger.debug('Cant convert to {0}'.format(mag_letter)) return np.nan else: # logger.debug('returning {0}={1} from catalogue'.format(mag_str, mag_val)) return mag_val @property def magU(self): return self._get_or_convert_magnitude('U') @property def magB(self): return self._get_or_convert_magnitude('B') @property def magV(self): return self._get_or_convert_magnitude('V') @property def magJ(self): return self._get_or_convert_magnitude('J') @property def magI(self): return self._get_or_convert_magnitude('I') @property def magH(self): return self._get_or_convert_magnitude('H') @property def magK(self): return self._get_or_convert_magnitude('K') @property def magL(self): return self._get_or_convert_magnitude('L') @property def magM(self): return self._get_or_convert_magnitude('M') @property def magN(self): return self._get_or_convert_magnitude('N') @property def Z(self): return self.getParam('metallicity') @Z.setter def Z(self, Z): self.params['metallicity'] = Z @property def spectralType(self): return self.getParam('spectraltype') @spectralType.setter def spectralType(self, spectraltype): self.params['spectraltype'] = spectraltype @property def planets(self): return self.children def getLimbdarkeningCoeff(self, wavelength=1.22): # TODO replace with pylightcurve """ Looks up quadratic limb darkening parameter from the star based on T, logg and metalicity. :param wavelength: microns :type wavelength: float :return: limb darkening coefficients 1 and 2 """ # TODO check this returns correct value - im not certain # The intervals of values in the tables tempind = [ 3500., 3750., 4000., 4250., 4500., 4750., 5000., 5250., 5500., 5750., 6000., 6250., 6500., 6750., 7000., 7250., 7500., 7750., 8000., 8250., 8500., 8750., 9000., 9250., 9500., 9750., 10000., 10250., 10500., 10750., 11000., 11250., 11500., 11750., 12000., 12250., 12500., 12750., 13000., 14000., 15000., 16000., 17000., 19000., 20000., 21000., 22000., 23000., 24000., 25000., 26000., 27000., 28000., 29000., 30000., 31000., 32000., 33000., 34000., 35000., 36000., 37000., 38000., 39000., 40000., 41000., 42000., 43000., 44000., 45000., 46000., 47000., 48000., 49000., 50000.] lggind = [0., 0.5, 1., 1.5, 2., 2.5, 3., 3.5, 4., 4.5, 5.] mhind = [-5., -4.5, -4., -3.5, -3., -2.5, -2., -1.5, -1., -0.5, -0.3, -0.2, -0.1, 0., 0.1, 0.2, 0.3, 0.5, 1.] # Choose the values in the table nearest our parameters tempselect = _findNearest(tempind, float(self.T)) lgselect = _findNearest(lggind, float(self.calcLogg())) mhselect = _findNearest(mhind, float(self.Z)) quadratic_filepath = resource_stream(__name__, 'data/quadratic.dat') coeffTable = np.loadtxt(quadratic_filepath) foundValues = False for i in range(len(coeffTable)): if coeffTable[i, 2] == lgselect and coeffTable[i, 3] == tempselect and coeffTable[i, 4] == mhselect: if coeffTable[i, 0] == 1: u1array = coeffTable[i, 8:] # Limb darkening parameter u1 for each wl in waveind u2array = coeffTable[i+1, 8:] foundValues = True break if not foundValues: raise ValueError('No limb darkening values could be found') # TODO replace with better exception waveind = [0.365, 0.445, 0.551, 0.658, 0.806, 1.22, 1.63, 2.19, 3.45] # Wavelengths available in table # Interpolates the value at wavelength from values in the table (waveind) u1AtWavelength = np.interp(wavelength, waveind, u1array, left=0, right=0) u2AtWavelength = np.interp(wavelength, waveind, u2array, left=0, right=0) return u1AtWavelength, u2AtWavelength def estimateAbsoluteMagnitude(self): return eq.estimateAbsoluteMagnitude(self.spectralType) def estimateDistance(self): # TODO handle other mags than V if self.magV is not np.nan: return eq.estimateDistance(self.magV, self.estimateAbsoluteMagnitude()) else: return np.nan class Planet(StarAndPlanetCommon, PlanetAndBinaryCommon): def __init__(self, *args, **kwargs): StarAndPlanetCommon.__init__(self, *args, **kwargs) PlanetAndBinaryCommon.__init__(self, *args, **kwargs) self.classType = 'Planet' @property def isTransiting(self): """ Checks the the istransiting tag to see if the planet transits. Note that this only works as of catalogue version ee12343381ae4106fd2db908e25ffc537a2ee98c (11th March 2014) where the istransiting tag was implemented """ try: isTransiting = self.params['istransiting'] except KeyError: return False if isTransiting == '1': return True else: return False def calcTransitDuration(self, circular=False): """ Estimation of the primary transit time assuming a circular orbit (see :py:func:`equations.transitDuration`) """ try: if circular: return eq.transitDurationCircular(self.P, self.star.R, self.R, self.a, self.i) else: return eq.TransitDuration(self.P, self.a, self.R, self.star.R, self.i, self.e, self.periastron).Td except (ValueError, AttributeError, # caused by trying to rescale nan i.e. missing i value HierarchyError): # i.e. planets that dont orbit stars return np.nan def calcScaleHeight(self): raise NotImplementedError # return eq.scaleHeight(self.T, , self.g) # TODO mu based on assumptions def calcTransitDepth(self): return eq.TransitDepth(self.star.R, self.R).depth def type(self): return assum.planetType(self.T, self.M, self.R) def massType(self): return assum.planetMassType(self.M) def radiusType(self): return assum.planetRadiusType(self.R) def tempType(self): return assum.planetTempType(self.T) @property def mu(self): # TODO make getter look in params first calc if not molweight = self.getParam('molweight') if molweight is np.nan: # Use assumptions if self.M is not np.nan: return assum.planetMu(self.massType()) elif self.R is not np.nan: return assum.planetMu(self.radiusType()) else: return np.nan else: return molweight @mu.setter def mu(self, mu): mu = mu.rescale(aq.atomic_mass_unit) self.params['moleight'] = mu @property def albedo(self): albedo = self.getParam('albedo') if albedo is not np.nan: return albedo elif self.getParam('temperature') is not np.nan: planetClass = self.tempType() elif self.M is not np.nan: planetClass = self.massType() elif self.R is not np.nan: planetClass = self.radiusType() else: return np.nan return assum.planetAlbedo(planetClass) @albedo.setter def albedo(self, albedo): albedo = albedo self.params['albedo'] = albedo def calcTemperature(self): """ Calculates the temperature using which uses equations.MeanPlanetTemp, albedo assumption and potentially equations.starTemperature. issues - you cant get the albedo assumption without temp but you need it to calculate the temp. """ try: return eq.MeanPlanetTemp(self.albedo, self.star.T, self.star.R, self.a).T_p except (ValueError, HierarchyError): # ie missing value (.a) returning nan return np.nan def estimateMass(self): density = assum.planetDensity(self.radiusType()) return eq.Density(None, self.R, density).M def calcSMA(self): """ Calculates the semi-major axis from Keplers Third Law """ try: return eq.KeplersThirdLaw(None, self.star.M, self.P).a except HierarchyError: return np.nan def calcSMAfromT(self, epsilon=0.7): """ Calculates the semi-major axis based on planet temperature """ return eq.MeanPlanetTemp(self.albedo(), self.star.T, self.star.R, epsilon, self.T).a def calcPeriod(self): """ calculates period using a and stellar mass """ return eq.KeplersThirdLaw(self.a, self.star.M).P @property def discoveryMethod(self): return self.getParam('discoverymethod') @discoveryMethod.setter def discoveryMethod(self, discoverymethod): self.params['discoverymethod'] = discoverymethod @property def discoveryYear(self): try: return int(self.getParam('discoveryyear')) except ValueError: # np.nan return self.getParam('discoveryyear') @discoveryYear.setter def discoveryYear(self, discoveryYear): self.params['discoveryyear'] = discoveryYear @property def lastUpdate(self): return self.getParam('lastupdate') @property def description(self): return self.getParam('description') @property def star(self): return self._getParentClass(self.parent, 'Star') class Parameters(object): # TODO would this subclassing dict be more preferable? """ A class to hold parameter dictionaries, the input can be validated, units added and handling of multi valued fields. In future this may be better as a child of dict. """ def __init__(self): self.params = { 'altnames': [], 'list': [], } self._defaultUnits = { # this holds quantities with no current or projected ambiguity about their unit 'age': aq.Gyear, 'distance': aq.pc, # TODO more specific unit handling here or in classes? 'magB': 1, 'magH': 1, 'magI': 1, 'magJ': 1, 'magK': 1, 'magV': 1, 'temperature': aq.K, } self.rejectTags = ('system', 'binary', 'star', 'planet', 'moon') # These are handled in their own classes def addParam(self, key, value, attrib=None): """ Checks the key dosnt already exist, adds alternate names to a seperate list Future - format input and add units - logging """ if key in self.rejectTags: return False # TODO Replace with exception # Temporary code to handle the seperation tag than can occur several times with different units. # TODO code a full multi unit solution (github issue #1) if key == 'separation': if attrib is None: return False # reject seperations without a unit try: if not attrib['unit'] == 'AU': return False # reject for now except KeyError: # a seperation attribute exists but not one for units return False if key in self.params: # if already exists if key == 'name': try: # if flagged as a primary or popular name use this one, an option should be made to use either if attrib['type'] == 'pri': # first names or popular names. oldname = self.params['name'] self.params['altnames'].append(oldname) self.params['name'] = value else: self.params['altnames'].append(value) except (KeyError, TypeError): # KeyError = no type key in attrib dict, TypeError = not a dict self.params['altnames'].append(value) elif key == 'list': self.params['list'].append(value) else: try: name = self.params['name'] except KeyError: name = 'Unnamed' print('rejected duplicate {0}: {1} in {2}'.format(key, value, name)) # TODO: log rejected value return False # TODO Replace with exception else: # If the key doesn't already exist and isn't rejected # Some tags have no value but a upperlimit in the attributes if value is None and attrib is not None: try: value = attrib['upperlimit'] except KeyError: try: value = attrib['lowerlimit'] except KeyError: return False if key == 'rightascension': value = _ra_string_to_unit(value) elif key == 'declination': value = _dec_string_to_unit(value) elif key in self._defaultUnits: try: value = float(value) * self._defaultUnits[key] except: print('caught an error with {0} - {1}'.format(key, value)) self.params[key] = value class BinaryParameters(Parameters): def __init__(self): Parameters.__init__(self) self._defaultUnits.update({ 'separation': aq.au, # TODO there is actually 2 different measurements (other is arcsec) 'periastron': aq.deg, }) class StarParameters(Parameters): def __init__(self): Parameters.__init__(self) self._defaultUnits.update({ 'mass': aq.M_s, 'metallicity': 1, 'radius': aq.R_s, }) class PlanetParameters(Parameters): def __init__(self): Parameters.__init__(self) self._defaultUnits.update({ 'discoveryyear': 1, 'mass': aq.M_j, 'radius': aq.R_j, 'inclination': aq.deg, 'eccentricity': 1, 'periastron': aq.deg, 'period': aq.day, 'semimajoraxis': aq.au, 'transittime': aq.JD, # TODO specific JD, MJF etc 'molweight': aq.atomic_mass_unit, 'separation': aq.au, # TODO there is actually 2 different measurements (other is arcsec) }) def _findNearest(arr, value): """ Finds the value in arr that value is closest to """ arr = np.array(arr) # find nearest value in array idx = (abs(arr-value)).argmin() return arr[idx] class SpectralType(object): """ Takes input of a spectral type as a string and interprets it into the luminosity class and stellar type. .. usage : self.lumType = Luminosity Class self.classLetter = Stellar Class (ie O B A etc) self.classNumber = Stellar Class number self.specClass = ie A8V will be A8 self.specType = ie A*V will be A8V (default for calling the class) self.original = the original string This class ignores spaces, only considers the first class if given multiple options (ie K0/K1V, GIV/V, F8-G0) ignores non-typical star classes (ie ) and ignores extra statements like G8 V+ """ def __init__(self, classString): self.original = classString self.lumType = '' self.classLetter = '' self.classNumber = '' self._parseSpecType(classString) @property def specClass(self): """ Spectral class ie A8V is A8 """ return self.classLetter + str(self.classNumber) @property def roundedSpecClass(self): """ Spectral class with rounded class number ie A8.5V is A9 """ try: classnumber = str(int(np.around(self.classNumber))) except TypeError: classnumber = str(self.classNumber) return self.classLetter + classnumber @property def specType(self): """ Spectral class ie A8V is A8V """ return self.classLetter + str(self.classNumber) + self.lumType @property def roundedSpecType(self): """ Spectral class with rounded class number ie A8.5V is A9V """ return self.roundedSpecClass + self.lumType def __repr__(self): return self.specType def _parseSpecType(self, classString): """ This class attempts to parse the spectral type. It should probably use more advanced matching use regex """ try: classString = str(classString) except UnicodeEncodeError: # This is for the benefit of 1RXS1609 which currently has the spectral type K7\pm 1V # TODO add unicode support and handling for this case / ammend the target return False # some initial cases if classString == '' or classString == 'nan': return False possNumbers = range(10) possLType = ('III', 'II', 'Iab', 'Ia0', 'Ia', 'Ib', 'IV', 'V') # in order of unique matches # remove spaces, remove slashes classString = classString.replace(' ', '') classString = classString.replace('-', '/') classString = classString.replace('\\', '/') classString = classString.split('/')[0] # TODO we do not consider slashed classes yet (intemediates) # check first 3 chars for spectral types stellarClass = classString[:3] if stellarClass in _possSpectralClasses: self.classLetter = stellarClass elif stellarClass[:2] in _possSpectralClasses: # needed because A5V wouldnt match before self.classLetter = stellarClass[:2] elif stellarClass[0] in _possSpectralClasses: self.classLetter = stellarClass[0] else: return False # assume a non standard class and fail # get number try: numIndex = len(self.classLetter) classNum = int(classString[numIndex]) if classNum in possNumbers: self.classNumber = int(classNum) # don't consider decimals here, done at the type check typeString = classString[numIndex+1:] else: return False # invalid number received except IndexError: # reached the end of the string return True except ValueError: # i.e its a letter - fail # TODO multi letter checking typeString = classString[1:] if typeString == '': # ie there is no more information as in 'A8' return True # Now check for a decimal and handle those cases if typeString[0] == '.': # handle decimal cases, we check each number in turn, add them as strings and then convert to float and add # to original number decimalNumbers = '.' for number in typeString[1:]: try: if int(number) in possNumbers: decimalNumbers += number else: print('Something went wrong in decimal checking') # TODO replace with logging return False # somethings gone wrong except ValueError: break # recevied a non-number (probably L class) # add decimal to classNum try: self.classNumber += float(decimalNumbers) except ValueError: # probably trying to convert '.' to a float pass typeString = typeString[len(decimalNumbers):] if len(typeString) is 0: return True # Handle luminosity class for possL in possLType: # match each possible case in turn (in order of uniqueness) Lcase = typeString[:len(possL)] # match from front with length to minimise matching say IV in '<3 CIV' if possL == Lcase: self.lumType = possL return True if not self.classNumber == '': return True else: # if there no number asumme we have a name ie 'Catac. var.' self.classLetter = '' self.classNumber = '' self.lumType = '' return False _ExampleSystemCount = 1 # Used by example.py - put here to enable global # main sequence _possSingleLetterClasses = ('O', 'B', 'A', 'F', 'G', 'K', 'M', 'L', 'T', 'Y', # dwarfs 'C', 'S', 'W', # Wolf-Rayet 'P', 'Q', # Non-stellar spectral types ) # skipped carbon stars with dashes ie C-R _possMultiLetterClasses = ('WNE', 'WNL', 'WCE', 'WCL', 'WO', 'WR', 'WN', 'WC', # Wolf-Rayet stars, WN/C skipped 'MS', 'MC', # intermediary carbon-related classes 'DAB', 'DAO', 'DAZ', 'DBZ', # Extended white dwarf spectral types 'DAV', 'DBV', 'DCV', # Variable star designations, GW Vir (DOV and PNNV) skipped 'DA', 'DB', 'DO', 'DQ', 'DZ', 'DC', 'DX', # white dwarf spectral types ) _possSpectralClasses = _possMultiLetterClasses + _possSingleLetterClasses # multi first class Magnitude(object): """ Holds measured magnitudes and can convert between them given a spectral class. """ def __init__(self, spectral_type, magU=None, magB=None, magV=None, magI=None, magJ=None, magH=None, magK=None, magL=None, magM=None, magN=None): if isinstance(spectral_type, SpectralType): self.spectral_type = spectral_type else: self.spectral_type = SpectralType(spectral_type) self.magU = magU self.magB = magB self.magV = magV self.magI = magI self.magJ = magJ self.magH = magH self.magK = magK self.magL = magL self.magM = magM self.magN = magN # For magDict, these should probably be grouped together self.column_for_V_conversion = { # mag column, sign (most are V-Mag (+1), some are Mag-V (-1)) 'U': (2, -1), 'B': (3, -1), 'J': (8, +1), 'H': (9, +1), 'K': (10, +1), 'L': (11, +1), 'M': (12, +1), 'N': (13, +1), } def convert(self, to_mag, from_mag=None): """ Converts magnitudes using UBVRIJHKLMNQ photometry in Taurus-Auriga (Kenyon+ 1995) ReadMe+ftp1995ApJS..101..117K Colors for main-sequence stars If from_mag isn't specified the program will cycle through provided magnitudes and choose one. Note that all magnitudes are first converted to V, and then to the requested magnitude. :param to_mag: magnitude to convert to :param from_mag: magnitude to convert from :return: """ allowed_mags = "UBVJIHKLMN" if from_mag: if to_mag == 'V': # If V mag is requested (1/3) - from mag specified return self._convert_to_from('V', from_mag) if from_mag == 'V': magV = self.magV else: magV = self._convert_to_from('V', from_mag) return self._convert_to_from(to_mag, 'V', magV) # if we can convert from any magnitude, try V first elif not isNanOrNone(self.magV): if to_mag == 'V': # If V mag is requested (2/3) - no need to convert return self.magV else: return self._convert_to_from(to_mag, 'V', self.magV) else: # Otherwise lets try all other magnitudes in turn order = "UBJHKLMN" # V is the intermediate step from the others, done by default if possible for mag_letter in order: try: magV = self._convert_to_from('V', mag_letter) if to_mag == 'V': # If V mag is requested (3/3) - try all other mags to convert logging.debug('Converted to magV from {0} got {1}'.format(mag_letter, magV)) return magV else: mag_val = self._convert_to_from(to_mag, 'V', magV) logging.debug('Converted to mag{0} from {1} got {2}'.format(to_mag, mag_letter, mag_val)) return mag_val except ValueError: continue # this conversion may not be possible, try another raise ValueError('Could not convert from any provided magnitudes') def _convert_to_from(self, to_mag, from_mag, fromVMag=None): """ Converts from or to V mag using the conversion tables :param to_mag: uppercase magnitude letter i.e. 'V' or 'K' :param from_mag: uppercase magnitude letter i.e. 'V' or 'K' :param fromVMag: MagV if from_mag is 'V' :return: estimated magnitude for to_mag from from_mag """ lumtype = self.spectral_type.lumType # rounds decimal types, TODO perhaps we should interpolate? specClass = self.spectral_type.roundedSpecClass if not specClass: # TODO investigate implications of this raise ValueError('Can not convert when no spectral class is given') if lumtype not in ('V', ''): raise ValueError("Can only convert for main sequence stars. Got {0} type".format(lumtype)) if to_mag == 'V': col, sign = self.column_for_V_conversion[from_mag] try: # TODO replace with pandas table offset = float(magDict[specClass][col]) except KeyError: raise ValueError('No data available to convert those magnitudes for that spectral type') if math.isnan(offset): raise ValueError('No data available to convert those magnitudes for that spectral type') else: from_mag_val = self.__dict__['mag'+from_mag] # safer than eval if isNanOrNone(from_mag_val): # logger.debug('2 '+from_mag) raise ValueError('You cannot convert from a magnitude you have not specified in class') return from_mag_val + (offset*sign) elif from_mag == 'V': if fromVMag is None: # trying to second guess here could mess up a K->B calulation by using the intermediate measured V. While # this would probably be preferable it is not was was asked and therefore could give unexpected results raise ValueError('Must give fromVMag, even if it is self.magV') col, sign = self.column_for_V_conversion[to_mag] try: offset = float(magDict[specClass][col]) except KeyError: raise ValueError('No data available to convert those magnitudes for that spectral type') if math.isnan(offset): raise ValueError('No data available to convert those magnitudes for that spectral type') else: return fromVMag + (offset*sign*-1) # -1 as we are now converting the other way else: raise ValueError('Can only convert from and to V magnitude. Use .convert() instead') def _createMagConversionDict(): """ loads magnitude_conversion.dat which is table A% 1995ApJS..101..117K """ magnitude_conversion_filepath = resource_stream(__name__, 'data/magnitude_conversion.dat') raw_table = np.loadtxt(magnitude_conversion_filepath, '|S5') magDict = {} for row in raw_table: if sys.hexversion >= 0x03000000: starClass = row[1].decode("utf-8") # otherwise we get byte ints or b' caused by 2to3 tableData = [x.decode("utf-8") for x in row[3:]] else: starClass = row[1] tableData = row[3:] magDict[starClass] = tableData return magDict magDict = _createMagConversionDict() def isNanOrNone(val): """ Tests if val is float('nan') or None using math.isnan and is None. Needed as isnan fails if a non float is given. :param val: :return: """ if val is None: return True else: try: return math.isnan(val) except TypeError: # not a float return False def _ra_string_to_unit(ra_string): ra_split = ra_string.split(' ') hour, min, sec = ra_split ra_astropy_format = '{}h{}m{}s'.format(hour, min, sec) ra_unit = astropy.coordinates.Longitude(ra_astropy_format, unit=u.deg) return ra_unit def _dec_string_to_unit(dec_string): deg_split = dec_string.split(' ') deg, arcmin, arcsec = deg_split deg_astropy_format = '{}d{}m{}s'.format(deg, arcmin, arcsec) dec_unit = astropy.coordinates.Latitude(deg_astropy_format, unit=u.deg) return dec_unit class HierarchyError(ed_params.ExoDataError): pass
mit
racker/cloud-init-debian-pkg
doc/rtd/conf.py
6
2327
import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../../')) sys.path.insert(0, os.path.abspath('../')) sys.path.insert(0, os.path.abspath('./')) sys.path.insert(0, os.path.abspath('.')) from cloudinit import version # Supress warnings for docs that aren't used yet #unused_docs = [ #] # General information about the project. project = 'Cloud-Init' # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.intersphinx', ] intersphinx_mapping = { 'sphinx': ('http://sphinx.pocoo.org', None) } # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. version = version.version_string() release = version # Set the default Pygments syntax highlight_language = 'python' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { "bodyfont": "Arial, sans-serif", "headfont": "Arial, sans-serif" } # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = 'static/logo.png'
gpl-3.0
4Kaylum/SkyBot
cogs/shitpost_generator.py
1
17584
import random import voxelbotutils as utils arr1 = ["it is dangerous to","cries bc i","do you think shrek would","there is no ethical consumption when you","i must","do NOT","this is a warning: do not","why would you","tfw you","im just small and i","i long for the day when i can finally","why doesnt tumblr allow you to openly","its times like this that i wonder why we cant all just","dont be sad! youre a small leaf and its ok to want to","sometimes i just want to","i will block and unfollow you if you","watch as i","hey everyone!","its 2015 and ppl still","tbh i dont actually","can we have an anime about girls who","its time to ask the real questions: is it ok 2","i think its wrong to","why cant we all just get along and","(flopping around on the mossy forest floor) i dont","(shoveling handfuls of moss into my mouth) i","has anyone ever even tried to","i for one","what im trying to say is that i dont trust anyone who doesnt","ive made a lot of mistakes but at least i dont","what if you were at the grocery store and then you started to","(Sweating a lot)","i wrote a poem about those times where i","my new album is called:","(thinking about my gender)","im warning you not to","i had a nightmare about this guy who wanted to","its impossible to","you cant","you can","(sighing wistfully) its time to","me:","transtrender voice: im gonna","transtrender voice:","i love to","truscum voice:","truscum voice: i hate when people","i dont have any friends because i","i","im going to go back in time and","(sighing sadly) i guess i will","(sighing sadly) i","(sighing sadly) i dont","(looking pensively at the sky) sometimes we forget to","i hate birds because they","i love fish because they","i only know how to","when i wake up i immediately","whats up?","just dont","is it a crime to","rats are the coolest. they are small and furry + they","the lyrics are: take me to church / i'll","the lyrics are: i'm gonna pop some tags /","i dont care about boys who","i dont care about people who","i love to sin and","i saw a bird once and you can't tell me they dont","back in my scene days i tried to","my favorite panic at the disco song is:","do you think i","i dont trust horses bc they are too tall. and they","in times like this i ask myself... what would a dog do? it'd probably","i get pissed sometimes because lizards","isnt being vegan when you dont eat meat and you","the lyrics are: because you know im all about that bass/bout that bass/","the difference between crows and ravens is that crows","slugs dont","when i grow up i want to","it turns out that vaccines actually cause you to","bath and body works RANT!! i went to bath and body works and instead of buying lotion i just went to sit in a corner and","there arent enough people at my school who","on my farm we will","today someone called me ugly and i was like well at least i","is your bed made? is your sweater on? do you want to","ok but consider:","the gays dont accept my support but i literally bought a rainbow packpack. i also","i am not a model i just","for just 5 easy payments of 9.99 you can","This friendly old tree has stood here for as long as Neopia Central has been around- maybe even longer. Neopians come here to","open rp: sonic the hedgehog is walking into the white house. he is looking for obama. it is time to","my neopet would NEVER","i am at peace. i","i am a fox. i break into the farmers chicken coop. i","the goal:","it is not wrong to","˙ ͜>˙","whats my favorite dinosaur? i like any dinosaur that wants to",";) i guess you could say i","in medieval times did people","i guess its irresponsible that i use all of my money to","if i were a furry i would be a squirrel because squirrels","is it ethical to","CIS stands for","never gonna give you up / never gonna let you down / never gonna","This is someone dying while having an MRI scan. Before you die your brain releases tons and tons of endorphins that make you","what do you mean when you say my time management is bad? at 8am i wake up. i have breakfast and brush my teeth. then from 9am to 8pm i","if i were john green i would","I just want to point out one thing here: When was the last time the girl kissed the boy in a teen romance? Ever? Has it happened ever? I seriously","im not good at making friends and i dont","OPEN THIS PIT UP i shout as i","the DEATH MOON HELL RITUAL will begin after we","i am writing a wikihow article about how to","im hiding in plants now because i fear the world and dont want to","*points to a huge boulder* this is my beautiful wife...together we","im gay for anyone who can","lets get down to business to","wake up america...big business is always trying to","performance art in which i","(Asmr) Watch me","what do you mean walmart doesnt accept neopoints? you just lost my business...im going to","do you think that houndoom the pokemon would","a little known fact is that the bush administration tried to","at my sleepover we will engage in fun activities. for example we can","a little known fact is that there is a worm version of every holiday. for example on worm valentines day we celebrate worm love and","i didnt know it was illegal to","tfw ur trying to relax but u cant help but","we need to start a fundraiser in order to","all proceeds from this bake sale will go toward my efforts to","whenever i try to get some relaxation around here those darn kids always come in my yard and","the sjws have taken everything from me. my dignity. my happiness. and they","reblog this if you","i dont feel love or affection for humankind. i only","i think hipsters are those people who go to whole foods and","im not a person. im a bunch of legos and i","im not a person. im a robot and i","if aliens came to earth i think the first thing theyd do would be","You may say I'm a dreamer... But I'm not the only one. I hope someday you'll","it hurts my feelings when people","a little known fact is that there is a worm version of every holiday. for example on worm christmas we give worm related gifts and","i think we take it for granted that i","it is not polite to","i’m a jaded teenage girl. i’ve been through shit that you wouldn’t even dream of. i","whenever i play the sims i","i only use the 'bleeding cowboys' font because i","According to all known laws of aviation there is no way a bee should be able to","if i were stranded on an island i would probably","im going to write a modern version of romeo and juliet where they both","les miserables would be way better if instead of singing they just","one day more... another day another opportunity to","do you hear the people sing? singing the song of angry men? it is the music of me when i","'unleash the crabs!' i bellow. thousands of crabs scurry in and","(leading you on a tour of the house) and here is the room where i","i am not gentle or kind. i","i try to be productive and integrate into society but i always wind up slithering in the street trying to","a dating show like the bachelor but for possums looking to","if there were two guys on the moon and one killed the other with a rock would they","hello this is the popsicle police. yes sir youve been stopped today because you keep trying to use your popsicles to","welcome to the potion store. this potion allows you to","i am trapped in the heinous slime den and i","today we sat down for a talk with god. in the exclusive interview god stated the following: the only thing i regret is making people who","makeup can really transform your appearance. for example i use makeup to","i broke all my limbs trying to","my beak is my most beautiful feature. i use it to","i only feel attraction to boys who have a strong beak and claws and","i got banned from the neopets rp boards because i made my oc","i got a tattoo across my ass that says:","team beck or team beyonce? im team","imagine your otp preparing to","imagine your otp fusing into one more powerful being that will take over the world and","when people hit on me i try to shake them off by talking about how i","i love my cat even though she smells like pee and i know she doesnt","every time you SIN you gain ONE SINPOINT. i SIN frequently and gain SINPOINTS often because i","ill take a potato chip...and","im not like those other guys. i","i always wind up making friends who","im sweating. im glistening. im laying on the floor ready to","yeah sure we have bath bombs that sparkle. bath bombs that change color. we even have ones that have surprises inside. but when will they make a bath bomb that can","light yagami is NOT gay. he is NOt. Lighj t yheeagamia is gonna","god i love eels. god i love eels so much i love eels because they","science is unnecessary. you cant use science to","im horrible. im terrible. i","this post generator is alright i guess but if i could be doing anything right now i would rather","im trapped in a babies r us right now which is substantially worse than a toys r us. i havent seen daylight for months and i cannot escape. please rescue me from baby hell so i can once again","all those rpg video games are the same. leveling up and fighting dragons. i want something different... a video game where u","am i gay? well if it answers your question i","what could be better? the warm sand of the beach... the nice cool ocean water... the ability to","it's finally time. i have completed my collection of 18000 small plastic shrimp and now i can","its hard to relax when all u wanna do is","people always used to tell me that i set goals that would be too hard for me to achieve. however i have finally found my true calling:","i gaze solemnly out of the car window. droplets of rain slide down and collect on its surface. the pigeon-grey sky is clouded over and thick with cold crisp moisture. i turn to you and say what we have both been thinking: i want to","i always try to make a statement with my fashion...thats why today i wore a crop top with a pic of richard nixon captioned:","GGGG GG OTtA GO TO THE SL UG RACe gOt TO GO FAST AT TH e s lug race...","(chewing on a branch) i","(wearing a black and red corset with a black leather skirt and black fishnet stockings with combat boots) i","(leading u on a secluded path thru the woods) yeah i guess you could say i","aaa!! i","aaa!! its time to","aaa!! im late for my appointment to","dear diary....today i am going to","dear diary....im embarrassed bc my crush heard about how i","emotion is not necessary. we do not need fears or dreams or feelings about anything. come with me to a pure and improved world where we only","to-do list: - make bed. - do homework. - wash dishes. - call mom. -","one thing ive learned is that its surprisingly difficult to","one thing ive learned is that its surprisingly easy to","(reclining in an antique armchair and sipping a thick black liquid from a wine glass) ah. i suppose its time to","welcome to the dark sludge pit. down here we","im fun. im flirty and cute. im gonna","god i love the sweet taste of POISON. i love to POISON myself and","welcome to the potion store. this potion makes you","i might bite you but its worth it to be in my presence ... like yeah u might get chomped but its worth it to hang out with me +","you can buy any goods and services with bitcoin and i mean any. you can probably even use bitcoin to","L might be gay to be honest like that scene where he rubs lights feet......a little gay...a little...the only thing that might be gayer would be if he were to","who NEEDS ethically sourced memes when you can just","excuse me but to be TRULY trans you have to","a bumper sticker that reads: ask me about how i","a bumper sticker that reads: honk if you","do not fear for the end is coming soon. we can revel in the thick clotted mud of the fields. we can","i went in the gym and a guy asked me if i even lift. BRO have you SEEN my MASCLES? i will punish him and","i will hand down righteous judgment and smite all who","sometimes i just roll in lotion and","sometimes i just wanna slather lotion all over my body and","we... are the crystal gems! well always save the day! and if you think we cant... we will","i say my dog is perfect but she does have two flaws. 1. her paws smell like doritos and 2. she always tries to","elves are better than gnomes because they","elves are better than dwarves because they","im tired of people thinking that i support superwholock. i am against superwholock. i want to","*sees another human being* time to","*sees another human being*","your fave is problematic: they","like this post if you","evidence bruno mars am gay: he","ive seen geese. i know what they can do to a person. i saw one of them","winter is coming. that means we have to","ive been called many names. but you can just know me as the man who will","to decalcify your pineal gland you have to",""] arr2 = ["romanticize","support","enjoy","destroy","flirt with","be","descend into","reject the agenda of","sink into the warm embrace of","shitpost about","get along with others despite differing opinions about","cry over","hide from","befriend","dream about","smell like","check out","admire","engage in heated debate with","betray","spy on","battle","thrash on the floor and yell about","expect good things out of","trust","disobey","paint a picture of","blog about","endorse","try to cheer up by thinking about","pretend to be","worship","manipulate","understand","prevent","meet","interact with","love","disregard","pretend to care about","pretend to enjoy","pretend to be","pretend to like","have","eat","only eat","live in a cave with","believe in","accuse me of enjoying","accuse me of hating","accuse me of betraying","drown in a sea of","bask in a torrential downpour of","draw","tolerate","reblog","buy perfume that smells like","only care about","hate","look like","regret","purchase","buy","sell","defend","discuss","be as beautiful as","receive","harm","illegally torrent","acquire","gain","be sad and desire","run away from","create","fund","go on a date with","work on my new book about","set fire to","experience","fight and win against","kiss","popularize","feed off of","think about","kinkshame","serenade","growl at","escape","emulate","cuddle","think deeply about","infiltrate","use","fear","fill the void left by","improve","try to ban","savor the rapturous caress of","prepare for","avoid","release","attack","declare war against","roll across the plains and spread the good word about","wriggle in the swamp and love","write an article about","pass the Deadly Five Trials Of Horror And","celebrate","pray to","call me","contour the face of"] arr3 = ["communism","leafs","capitalism","shrek","superwholock","late capitalism","my fursona","bronies","mras","meninism","LEAFS","the bog","mud","the liberal media","fursonas","furries","grass","the swamp","my vast cache of ketchup packets","nge","beyonce","the slime hovel","neurotypicals","popular sitcom 'friends'","a really boring red fox fursona","a realy tiny seashell","anime","my own ass","a lemur","our benevolent lizard overlords","friendship","concentrated fruit juices","hell","the void","anything with more than 4 legs","this loathsome flesh suit","memes","flesh","meat","blood","the president","dragons","gender","genders","a decent person","transtrenders","a transtrender","me","cisgenders","the cis","knives","love","the feelings of others","algae","the pit","truscum","homosexuality","friends","the spider farm","trash","garbage","the sewer","the plague","disease","wolves","birds","happiness","video games","boys","bones","hair","skin","rats","berries","space","fire","reddit","sin","emos","horses","the chunky dunk hydrating lippie","mushrooms","hamburgers","reagan","america","corn","good things","bad things","stuff","humans","liberalism","adam sandler","ham","neopets","nature","the environment","trees","chicken nuggets","elves","my ass","marx","heterosexuality","heterosexuals","john green","the earth","my house","homestuck","civilization","death","life","girls","monsters","werewolves","macklemore","worms","beetles","maggots","bugs","mysterious unsourced meat","worm hell","gamergate","poop emoji","'in the aeroplane over the sea'","aliens","the moon","the sun","beard oil","your crimes","everything good in the world","soap","milk","chaos","my problems","blood worms","sonic the hedgehog","a beak","my beak","the white house","the death note","sweat","eels","crime","science","FULL COMMUNISM","goats","shrimp","every single person on the earth","5000 crickets","20 eagles","165 parakeets","a swarm of fast-moving grubs","my birth","the store 'hot topic'","my aesthetic","two sparkly dolphins","poison","black sludge","the dark sludge pit","snake venom","lizards","bitcoins","light yagami","a sweater made of meat","misery","the end of days","you","a cranky owl","fifty eels","peperony and chease","crime elves","elf criminals","a spicy meatball","taylor swift","it","pearl","garnet","amethyst","peridot","the crystal gems","steven universe","jasper","bruno mars","kylie jenner","contouring","chaotic geese","game of thrones","brumbpo tungus","nipple"] arr4 = ["","","","","","","","?"] class ShitpostGenerator(utils.Cog): @utils.command() async def shitpost(self, ctx:utils.Context): """ Outputs a classic Tumblr-style shitpost. """ text = ' '.join([ random.choice(arr1), random.choice(arr2), random.choice(arr3), random.choice(arr4), ]) if text.endswith(' ?'): text = text[:-2] + '?' return await ctx.send(text.strip()) def setup(bot:utils.Bot): x = ShitpostGenerator(bot) bot.add_cog(x)
gpl-3.0
linjoahow/lego_Automatic-assembly
static/Brython3.1.0-20150301-090019/Lib/unittest/util.py
794
4157
"""Various utility functions.""" from collections import namedtuple, OrderedDict __unittest = True _MAX_LENGTH = 80 def safe_repr(obj, short=False): try: result = repr(obj) except Exception: result = object.__repr__(obj) if not short or len(result) < _MAX_LENGTH: return result return result[:_MAX_LENGTH] + ' [truncated]...' def strclass(cls): return "%s.%s" % (cls.__module__, cls.__name__) def sorted_list_difference(expected, actual): """Finds elements in only one or the other of two, sorted input lists. Returns a two-element tuple of lists. The first list contains those elements in the "expected" list but not in the "actual" list, and the second contains those elements in the "actual" list but not in the "expected" list. Duplicate elements in either input list are ignored. """ i = j = 0 missing = [] unexpected = [] while True: try: e = expected[i] a = actual[j] if e < a: missing.append(e) i += 1 while expected[i] == e: i += 1 elif e > a: unexpected.append(a) j += 1 while actual[j] == a: j += 1 else: i += 1 try: while expected[i] == e: i += 1 finally: j += 1 while actual[j] == a: j += 1 except IndexError: missing.extend(expected[i:]) unexpected.extend(actual[j:]) break return missing, unexpected def unorderable_list_difference(expected, actual): """Same behavior as sorted_list_difference but for lists of unorderable items (like dicts). As it does a linear search per item (remove) it has O(n*n) performance.""" missing = [] while expected: item = expected.pop() try: actual.remove(item) except ValueError: missing.append(item) # anything left in actual is unexpected return missing, actual def three_way_cmp(x, y): """Return -1 if x < y, 0 if x == y and 1 if x > y""" return (x > y) - (x < y) _Mismatch = namedtuple('Mismatch', 'actual expected value') def _count_diff_all_purpose(actual, expected): 'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ' # elements need not be hashable s, t = list(actual), list(expected) m, n = len(s), len(t) NULL = object() result = [] for i, elem in enumerate(s): if elem is NULL: continue cnt_s = cnt_t = 0 for j in range(i, m): if s[j] == elem: cnt_s += 1 s[j] = NULL for j, other_elem in enumerate(t): if other_elem == elem: cnt_t += 1 t[j] = NULL if cnt_s != cnt_t: diff = _Mismatch(cnt_s, cnt_t, elem) result.append(diff) for i, elem in enumerate(t): if elem is NULL: continue cnt_t = 0 for j in range(i, n): if t[j] == elem: cnt_t += 1 t[j] = NULL diff = _Mismatch(0, cnt_t, elem) result.append(diff) return result def _ordered_count(iterable): 'Return dict of element counts, in the order they were first seen' c = OrderedDict() for elem in iterable: c[elem] = c.get(elem, 0) + 1 return c def _count_diff_hashable(actual, expected): 'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ' # elements must be hashable s, t = _ordered_count(actual), _ordered_count(expected) result = [] for elem, cnt_s in s.items(): cnt_t = t.get(elem, 0) if cnt_s != cnt_t: diff = _Mismatch(cnt_s, cnt_t, elem) result.append(diff) for elem, cnt_t in t.items(): if elem not in s: diff = _Mismatch(0, cnt_t, elem) result.append(diff) return result
gpl-3.0
xyuanmu/XX-Net
python3.8.2/Lib/unittest/test/test_case.py
6
73098
import contextlib import difflib import pprint import pickle import re import sys import logging import warnings import weakref import inspect from copy import deepcopy from test import support import unittest from unittest.test.support import ( TestEquality, TestHashing, LoggingResult, LegacyLoggingResult, ResultWithNoStartTestRunStopTestRun ) from test.support import captured_stderr log_foo = logging.getLogger('foo') log_foobar = logging.getLogger('foo.bar') log_quux = logging.getLogger('quux') class Test(object): "Keep these TestCase classes out of the main namespace" class Foo(unittest.TestCase): def runTest(self): pass def test1(self): pass class Bar(Foo): def test2(self): pass class LoggingTestCase(unittest.TestCase): """A test case which logs its calls.""" def __init__(self, events): super(Test.LoggingTestCase, self).__init__('test') self.events = events def setUp(self): self.events.append('setUp') def test(self): self.events.append('test') def tearDown(self): self.events.append('tearDown') class Test_TestCase(unittest.TestCase, TestEquality, TestHashing): ### Set up attributes used by inherited tests ################################################################ # Used by TestHashing.test_hash and TestEquality.test_eq eq_pairs = [(Test.Foo('test1'), Test.Foo('test1'))] # Used by TestEquality.test_ne ne_pairs = [(Test.Foo('test1'), Test.Foo('runTest')), (Test.Foo('test1'), Test.Bar('test1')), (Test.Foo('test1'), Test.Bar('test2'))] ################################################################ ### /Set up attributes used by inherited tests # "class TestCase([methodName])" # ... # "Each instance of TestCase will run a single test method: the # method named methodName." # ... # "methodName defaults to "runTest"." # # Make sure it really is optional, and that it defaults to the proper # thing. def test_init__no_test_name(self): class Test(unittest.TestCase): def runTest(self): raise MyException() def test(self): pass self.assertEqual(Test().id()[-13:], '.Test.runTest') # test that TestCase can be instantiated with no args # primarily for use at the interactive interpreter test = unittest.TestCase() test.assertEqual(3, 3) with test.assertRaises(test.failureException): test.assertEqual(3, 2) with self.assertRaises(AttributeError): test.run() # "class TestCase([methodName])" # ... # "Each instance of TestCase will run a single test method: the # method named methodName." def test_init__test_name__valid(self): class Test(unittest.TestCase): def runTest(self): raise MyException() def test(self): pass self.assertEqual(Test('test').id()[-10:], '.Test.test') # "class TestCase([methodName])" # ... # "Each instance of TestCase will run a single test method: the # method named methodName." def test_init__test_name__invalid(self): class Test(unittest.TestCase): def runTest(self): raise MyException() def test(self): pass try: Test('testfoo') except ValueError: pass else: self.fail("Failed to raise ValueError") # "Return the number of tests represented by the this test object. For # TestCase instances, this will always be 1" def test_countTestCases(self): class Foo(unittest.TestCase): def test(self): pass self.assertEqual(Foo('test').countTestCases(), 1) # "Return the default type of test result object to be used to run this # test. For TestCase instances, this will always be # unittest.TestResult; subclasses of TestCase should # override this as necessary." def test_defaultTestResult(self): class Foo(unittest.TestCase): def runTest(self): pass result = Foo().defaultTestResult() self.assertEqual(type(result), unittest.TestResult) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if setUp() raises # an exception. def test_run_call_order__error_in_setUp(self): events = [] result = LoggingResult(events) class Foo(Test.LoggingTestCase): def setUp(self): super(Foo, self).setUp() raise RuntimeError('raised by Foo.setUp') Foo(events).run(result) expected = ['startTest', 'setUp', 'addError', 'stopTest'] self.assertEqual(events, expected) # "With a temporary result stopTestRun is called when setUp errors. def test_run_call_order__error_in_setUp_default_result(self): events = [] class Foo(Test.LoggingTestCase): def defaultTestResult(self): return LoggingResult(self.events) def setUp(self): super(Foo, self).setUp() raise RuntimeError('raised by Foo.setUp') Foo(events).run() expected = ['startTestRun', 'startTest', 'setUp', 'addError', 'stopTest', 'stopTestRun'] self.assertEqual(events, expected) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if the test raises # an error (as opposed to a failure). def test_run_call_order__error_in_test(self): events = [] result = LoggingResult(events) class Foo(Test.LoggingTestCase): def test(self): super(Foo, self).test() raise RuntimeError('raised by Foo.test') expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError', 'stopTest'] Foo(events).run(result) self.assertEqual(events, expected) # "With a default result, an error in the test still results in stopTestRun # being called." def test_run_call_order__error_in_test_default_result(self): events = [] class Foo(Test.LoggingTestCase): def defaultTestResult(self): return LoggingResult(self.events) def test(self): super(Foo, self).test() raise RuntimeError('raised by Foo.test') expected = ['startTestRun', 'startTest', 'setUp', 'test', 'tearDown', 'addError', 'stopTest', 'stopTestRun'] Foo(events).run() self.assertEqual(events, expected) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if the test signals # a failure (as opposed to an error). def test_run_call_order__failure_in_test(self): events = [] result = LoggingResult(events) class Foo(Test.LoggingTestCase): def test(self): super(Foo, self).test() self.fail('raised by Foo.test') expected = ['startTest', 'setUp', 'test', 'tearDown', 'addFailure', 'stopTest'] Foo(events).run(result) self.assertEqual(events, expected) # "When a test fails with a default result stopTestRun is still called." def test_run_call_order__failure_in_test_default_result(self): class Foo(Test.LoggingTestCase): def defaultTestResult(self): return LoggingResult(self.events) def test(self): super(Foo, self).test() self.fail('raised by Foo.test') expected = ['startTestRun', 'startTest', 'setUp', 'test', 'tearDown', 'addFailure', 'stopTest', 'stopTestRun'] events = [] Foo(events).run() self.assertEqual(events, expected) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if tearDown() raises # an exception. def test_run_call_order__error_in_tearDown(self): events = [] result = LoggingResult(events) class Foo(Test.LoggingTestCase): def tearDown(self): super(Foo, self).tearDown() raise RuntimeError('raised by Foo.tearDown') Foo(events).run(result) expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError', 'stopTest'] self.assertEqual(events, expected) # "When tearDown errors with a default result stopTestRun is still called." def test_run_call_order__error_in_tearDown_default_result(self): class Foo(Test.LoggingTestCase): def defaultTestResult(self): return LoggingResult(self.events) def tearDown(self): super(Foo, self).tearDown() raise RuntimeError('raised by Foo.tearDown') events = [] Foo(events).run() expected = ['startTestRun', 'startTest', 'setUp', 'test', 'tearDown', 'addError', 'stopTest', 'stopTestRun'] self.assertEqual(events, expected) # "TestCase.run() still works when the defaultTestResult is a TestResult # that does not support startTestRun and stopTestRun. def test_run_call_order_default_result(self): class Foo(unittest.TestCase): def defaultTestResult(self): return ResultWithNoStartTestRunStopTestRun() def test(self): pass Foo('test').run() def _check_call_order__subtests(self, result, events, expected_events): class Foo(Test.LoggingTestCase): def test(self): super(Foo, self).test() for i in [1, 2, 3]: with self.subTest(i=i): if i == 1: self.fail('failure') for j in [2, 3]: with self.subTest(j=j): if i * j == 6: raise RuntimeError('raised by Foo.test') 1 / 0 # Order is the following: # i=1 => subtest failure # i=2, j=2 => subtest success # i=2, j=3 => subtest error # i=3, j=2 => subtest error # i=3, j=3 => subtest success # toplevel => error Foo(events).run(result) self.assertEqual(events, expected_events) def test_run_call_order__subtests(self): events = [] result = LoggingResult(events) expected = ['startTest', 'setUp', 'test', 'tearDown', 'addSubTestFailure', 'addSubTestSuccess', 'addSubTestFailure', 'addSubTestFailure', 'addSubTestSuccess', 'addError', 'stopTest'] self._check_call_order__subtests(result, events, expected) def test_run_call_order__subtests_legacy(self): # With a legacy result object (without an addSubTest method), # text execution stops after the first subtest failure. events = [] result = LegacyLoggingResult(events) expected = ['startTest', 'setUp', 'test', 'tearDown', 'addFailure', 'stopTest'] self._check_call_order__subtests(result, events, expected) def _check_call_order__subtests_success(self, result, events, expected_events): class Foo(Test.LoggingTestCase): def test(self): super(Foo, self).test() for i in [1, 2]: with self.subTest(i=i): for j in [2, 3]: with self.subTest(j=j): pass Foo(events).run(result) self.assertEqual(events, expected_events) def test_run_call_order__subtests_success(self): events = [] result = LoggingResult(events) # The 6 subtest successes are individually recorded, in addition # to the whole test success. expected = (['startTest', 'setUp', 'test', 'tearDown'] + 6 * ['addSubTestSuccess'] + ['addSuccess', 'stopTest']) self._check_call_order__subtests_success(result, events, expected) def test_run_call_order__subtests_success_legacy(self): # With a legacy result, only the whole test success is recorded. events = [] result = LegacyLoggingResult(events) expected = ['startTest', 'setUp', 'test', 'tearDown', 'addSuccess', 'stopTest'] self._check_call_order__subtests_success(result, events, expected) def test_run_call_order__subtests_failfast(self): events = [] result = LoggingResult(events) result.failfast = True class Foo(Test.LoggingTestCase): def test(self): super(Foo, self).test() with self.subTest(i=1): self.fail('failure') with self.subTest(i=2): self.fail('failure') self.fail('failure') expected = ['startTest', 'setUp', 'test', 'tearDown', 'addSubTestFailure', 'stopTest'] Foo(events).run(result) self.assertEqual(events, expected) def test_subtests_failfast(self): # Ensure proper test flow with subtests and failfast (issue #22894) events = [] class Foo(unittest.TestCase): def test_a(self): with self.subTest(): events.append('a1') events.append('a2') def test_b(self): with self.subTest(): events.append('b1') with self.subTest(): self.fail('failure') events.append('b2') def test_c(self): events.append('c') result = unittest.TestResult() result.failfast = True suite = unittest.makeSuite(Foo) suite.run(result) expected = ['a1', 'a2', 'b1'] self.assertEqual(events, expected) def test_subtests_debug(self): # Test debug() with a test that uses subTest() (bpo-34900) events = [] class Foo(unittest.TestCase): def test_a(self): events.append('test case') with self.subTest(): events.append('subtest 1') Foo('test_a').debug() self.assertEqual(events, ['test case', 'subtest 1']) # "This class attribute gives the exception raised by the test() method. # If a test framework needs to use a specialized exception, possibly to # carry additional information, it must subclass this exception in # order to ``play fair'' with the framework. The initial value of this # attribute is AssertionError" def test_failureException__default(self): class Foo(unittest.TestCase): def test(self): pass self.assertIs(Foo('test').failureException, AssertionError) # "This class attribute gives the exception raised by the test() method. # If a test framework needs to use a specialized exception, possibly to # carry additional information, it must subclass this exception in # order to ``play fair'' with the framework." # # Make sure TestCase.run() respects the designated failureException def test_failureException__subclassing__explicit_raise(self): events = [] result = LoggingResult(events) class Foo(unittest.TestCase): def test(self): raise RuntimeError() failureException = RuntimeError self.assertIs(Foo('test').failureException, RuntimeError) Foo('test').run(result) expected = ['startTest', 'addFailure', 'stopTest'] self.assertEqual(events, expected) # "This class attribute gives the exception raised by the test() method. # If a test framework needs to use a specialized exception, possibly to # carry additional information, it must subclass this exception in # order to ``play fair'' with the framework." # # Make sure TestCase.run() respects the designated failureException def test_failureException__subclassing__implicit_raise(self): events = [] result = LoggingResult(events) class Foo(unittest.TestCase): def test(self): self.fail("foo") failureException = RuntimeError self.assertIs(Foo('test').failureException, RuntimeError) Foo('test').run(result) expected = ['startTest', 'addFailure', 'stopTest'] self.assertEqual(events, expected) # "The default implementation does nothing." def test_setUp(self): class Foo(unittest.TestCase): def runTest(self): pass # ... and nothing should happen Foo().setUp() # "The default implementation does nothing." def test_tearDown(self): class Foo(unittest.TestCase): def runTest(self): pass # ... and nothing should happen Foo().tearDown() # "Return a string identifying the specific test case." # # Because of the vague nature of the docs, I'm not going to lock this # test down too much. Really all that can be asserted is that the id() # will be a string (either 8-byte or unicode -- again, because the docs # just say "string") def test_id(self): class Foo(unittest.TestCase): def runTest(self): pass self.assertIsInstance(Foo().id(), str) # "If result is omitted or None, a temporary result object is created, # used, and is made available to the caller. As TestCase owns the # temporary result startTestRun and stopTestRun are called. def test_run__uses_defaultTestResult(self): events = [] defaultResult = LoggingResult(events) class Foo(unittest.TestCase): def test(self): events.append('test') def defaultTestResult(self): return defaultResult # Make run() find a result object on its own result = Foo('test').run() self.assertIs(result, defaultResult) expected = ['startTestRun', 'startTest', 'test', 'addSuccess', 'stopTest', 'stopTestRun'] self.assertEqual(events, expected) # "The result object is returned to run's caller" def test_run__returns_given_result(self): class Foo(unittest.TestCase): def test(self): pass result = unittest.TestResult() retval = Foo('test').run(result) self.assertIs(retval, result) # "The same effect [as method run] may be had by simply calling the # TestCase instance." def test_call__invoking_an_instance_delegates_to_run(self): resultIn = unittest.TestResult() resultOut = unittest.TestResult() class Foo(unittest.TestCase): def test(self): pass def run(self, result): self.assertIs(result, resultIn) return resultOut retval = Foo('test')(resultIn) self.assertIs(retval, resultOut) def testShortDescriptionWithoutDocstring(self): self.assertIsNone(self.shortDescription()) @unittest.skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") def testShortDescriptionWithOneLineDocstring(self): """Tests shortDescription() for a method with a docstring.""" self.assertEqual( self.shortDescription(), 'Tests shortDescription() for a method with a docstring.') @unittest.skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") def testShortDescriptionWithMultiLineDocstring(self): """Tests shortDescription() for a method with a longer docstring. This method ensures that only the first line of a docstring is returned used in the short description, no matter how long the whole thing is. """ self.assertEqual( self.shortDescription(), 'Tests shortDescription() for a method with a longer ' 'docstring.') def testShortDescriptionWhitespaceTrimming(self): """ Tests shortDescription() whitespace is trimmed, so that the first line of nonwhite-space text becomes the docstring. """ self.assertEqual( self.shortDescription(), 'Tests shortDescription() whitespace is trimmed, so that the first') def testAddTypeEqualityFunc(self): class SadSnake(object): """Dummy class for test_addTypeEqualityFunc.""" s1, s2 = SadSnake(), SadSnake() self.assertFalse(s1 == s2) def AllSnakesCreatedEqual(a, b, msg=None): return type(a) == type(b) == SadSnake self.addTypeEqualityFunc(SadSnake, AllSnakesCreatedEqual) self.assertEqual(s1, s2) # No this doesn't clean up and remove the SadSnake equality func # from this TestCase instance but since it's local nothing else # will ever notice that. def testAssertIs(self): thing = object() self.assertIs(thing, thing) self.assertRaises(self.failureException, self.assertIs, thing, object()) def testAssertIsNot(self): thing = object() self.assertIsNot(thing, object()) self.assertRaises(self.failureException, self.assertIsNot, thing, thing) def testAssertIsInstance(self): thing = [] self.assertIsInstance(thing, list) self.assertRaises(self.failureException, self.assertIsInstance, thing, dict) def testAssertNotIsInstance(self): thing = [] self.assertNotIsInstance(thing, dict) self.assertRaises(self.failureException, self.assertNotIsInstance, thing, list) def testAssertIn(self): animals = {'monkey': 'banana', 'cow': 'grass', 'seal': 'fish'} self.assertIn('a', 'abc') self.assertIn(2, [1, 2, 3]) self.assertIn('monkey', animals) self.assertNotIn('d', 'abc') self.assertNotIn(0, [1, 2, 3]) self.assertNotIn('otter', animals) self.assertRaises(self.failureException, self.assertIn, 'x', 'abc') self.assertRaises(self.failureException, self.assertIn, 4, [1, 2, 3]) self.assertRaises(self.failureException, self.assertIn, 'elephant', animals) self.assertRaises(self.failureException, self.assertNotIn, 'c', 'abc') self.assertRaises(self.failureException, self.assertNotIn, 1, [1, 2, 3]) self.assertRaises(self.failureException, self.assertNotIn, 'cow', animals) def testAssertDictContainsSubset(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) self.assertDictContainsSubset({}, {}) self.assertDictContainsSubset({}, {'a': 1}) self.assertDictContainsSubset({'a': 1}, {'a': 1}) self.assertDictContainsSubset({'a': 1}, {'a': 1, 'b': 2}) self.assertDictContainsSubset({'a': 1, 'b': 2}, {'a': 1, 'b': 2}) with self.assertRaises(self.failureException): self.assertDictContainsSubset({1: "one"}, {}) with self.assertRaises(self.failureException): self.assertDictContainsSubset({'a': 2}, {'a': 1}) with self.assertRaises(self.failureException): self.assertDictContainsSubset({'c': 1}, {'a': 1}) with self.assertRaises(self.failureException): self.assertDictContainsSubset({'a': 1, 'c': 1}, {'a': 1}) with self.assertRaises(self.failureException): self.assertDictContainsSubset({'a': 1, 'c': 1}, {'a': 1}) one = ''.join(chr(i) for i in range(255)) # this used to cause a UnicodeDecodeError constructing the failure msg with self.assertRaises(self.failureException): self.assertDictContainsSubset({'foo': one}, {'foo': '\uFFFD'}) def testAssertEqual(self): equal_pairs = [ ((), ()), ({}, {}), ([], []), (set(), set()), (frozenset(), frozenset())] for a, b in equal_pairs: # This mess of try excepts is to test the assertEqual behavior # itself. try: self.assertEqual(a, b) except self.failureException: self.fail('assertEqual(%r, %r) failed' % (a, b)) try: self.assertEqual(a, b, msg='foo') except self.failureException: self.fail('assertEqual(%r, %r) with msg= failed' % (a, b)) try: self.assertEqual(a, b, 'foo') except self.failureException: self.fail('assertEqual(%r, %r) with third parameter failed' % (a, b)) unequal_pairs = [ ((), []), ({}, set()), (set([4,1]), frozenset([4,2])), (frozenset([4,5]), set([2,3])), (set([3,4]), set([5,4]))] for a, b in unequal_pairs: self.assertRaises(self.failureException, self.assertEqual, a, b) self.assertRaises(self.failureException, self.assertEqual, a, b, 'foo') self.assertRaises(self.failureException, self.assertEqual, a, b, msg='foo') def testEquality(self): self.assertListEqual([], []) self.assertTupleEqual((), ()) self.assertSequenceEqual([], ()) a = [0, 'a', []] b = [] self.assertRaises(unittest.TestCase.failureException, self.assertListEqual, a, b) self.assertRaises(unittest.TestCase.failureException, self.assertListEqual, tuple(a), tuple(b)) self.assertRaises(unittest.TestCase.failureException, self.assertSequenceEqual, a, tuple(b)) b.extend(a) self.assertListEqual(a, b) self.assertTupleEqual(tuple(a), tuple(b)) self.assertSequenceEqual(a, tuple(b)) self.assertSequenceEqual(tuple(a), b) self.assertRaises(self.failureException, self.assertListEqual, a, tuple(b)) self.assertRaises(self.failureException, self.assertTupleEqual, tuple(a), b) self.assertRaises(self.failureException, self.assertListEqual, None, b) self.assertRaises(self.failureException, self.assertTupleEqual, None, tuple(b)) self.assertRaises(self.failureException, self.assertSequenceEqual, None, tuple(b)) self.assertRaises(self.failureException, self.assertListEqual, 1, 1) self.assertRaises(self.failureException, self.assertTupleEqual, 1, 1) self.assertRaises(self.failureException, self.assertSequenceEqual, 1, 1) self.assertDictEqual({}, {}) c = { 'x': 1 } d = {} self.assertRaises(unittest.TestCase.failureException, self.assertDictEqual, c, d) d.update(c) self.assertDictEqual(c, d) d['x'] = 0 self.assertRaises(unittest.TestCase.failureException, self.assertDictEqual, c, d, 'These are unequal') self.assertRaises(self.failureException, self.assertDictEqual, None, d) self.assertRaises(self.failureException, self.assertDictEqual, [], d) self.assertRaises(self.failureException, self.assertDictEqual, 1, 1) def testAssertSequenceEqualMaxDiff(self): self.assertEqual(self.maxDiff, 80*8) seq1 = 'a' + 'x' * 80**2 seq2 = 'b' + 'x' * 80**2 diff = '\n'.join(difflib.ndiff(pprint.pformat(seq1).splitlines(), pprint.pformat(seq2).splitlines())) # the +1 is the leading \n added by assertSequenceEqual omitted = unittest.case.DIFF_OMITTED % (len(diff) + 1,) self.maxDiff = len(diff)//2 try: self.assertSequenceEqual(seq1, seq2) except self.failureException as e: msg = e.args[0] else: self.fail('assertSequenceEqual did not fail.') self.assertLess(len(msg), len(diff)) self.assertIn(omitted, msg) self.maxDiff = len(diff) * 2 try: self.assertSequenceEqual(seq1, seq2) except self.failureException as e: msg = e.args[0] else: self.fail('assertSequenceEqual did not fail.') self.assertGreater(len(msg), len(diff)) self.assertNotIn(omitted, msg) self.maxDiff = None try: self.assertSequenceEqual(seq1, seq2) except self.failureException as e: msg = e.args[0] else: self.fail('assertSequenceEqual did not fail.') self.assertGreater(len(msg), len(diff)) self.assertNotIn(omitted, msg) def testTruncateMessage(self): self.maxDiff = 1 message = self._truncateMessage('foo', 'bar') omitted = unittest.case.DIFF_OMITTED % len('bar') self.assertEqual(message, 'foo' + omitted) self.maxDiff = None message = self._truncateMessage('foo', 'bar') self.assertEqual(message, 'foobar') self.maxDiff = 4 message = self._truncateMessage('foo', 'bar') self.assertEqual(message, 'foobar') def testAssertDictEqualTruncates(self): test = unittest.TestCase('assertEqual') def truncate(msg, diff): return 'foo' test._truncateMessage = truncate try: test.assertDictEqual({}, {1: 0}) except self.failureException as e: self.assertEqual(str(e), 'foo') else: self.fail('assertDictEqual did not fail') def testAssertMultiLineEqualTruncates(self): test = unittest.TestCase('assertEqual') def truncate(msg, diff): return 'foo' test._truncateMessage = truncate try: test.assertMultiLineEqual('foo', 'bar') except self.failureException as e: self.assertEqual(str(e), 'foo') else: self.fail('assertMultiLineEqual did not fail') def testAssertEqual_diffThreshold(self): # check threshold value self.assertEqual(self._diffThreshold, 2**16) # disable madDiff to get diff markers self.maxDiff = None # set a lower threshold value and add a cleanup to restore it old_threshold = self._diffThreshold self._diffThreshold = 2**5 self.addCleanup(lambda: setattr(self, '_diffThreshold', old_threshold)) # under the threshold: diff marker (^) in error message s = 'x' * (2**4) with self.assertRaises(self.failureException) as cm: self.assertEqual(s + 'a', s + 'b') self.assertIn('^', str(cm.exception)) self.assertEqual(s + 'a', s + 'a') # over the threshold: diff not used and marker (^) not in error message s = 'x' * (2**6) # if the path that uses difflib is taken, _truncateMessage will be # called -- replace it with explodingTruncation to verify that this # doesn't happen def explodingTruncation(message, diff): raise SystemError('this should not be raised') old_truncate = self._truncateMessage self._truncateMessage = explodingTruncation self.addCleanup(lambda: setattr(self, '_truncateMessage', old_truncate)) s1, s2 = s + 'a', s + 'b' with self.assertRaises(self.failureException) as cm: self.assertEqual(s1, s2) self.assertNotIn('^', str(cm.exception)) self.assertEqual(str(cm.exception), '%r != %r' % (s1, s2)) self.assertEqual(s + 'a', s + 'a') def testAssertEqual_shorten(self): # set a lower threshold value and add a cleanup to restore it old_threshold = self._diffThreshold self._diffThreshold = 0 self.addCleanup(lambda: setattr(self, '_diffThreshold', old_threshold)) s = 'x' * 100 s1, s2 = s + 'a', s + 'b' with self.assertRaises(self.failureException) as cm: self.assertEqual(s1, s2) c = 'xxxx[35 chars]' + 'x' * 61 self.assertEqual(str(cm.exception), "'%sa' != '%sb'" % (c, c)) self.assertEqual(s + 'a', s + 'a') p = 'y' * 50 s1, s2 = s + 'a' + p, s + 'b' + p with self.assertRaises(self.failureException) as cm: self.assertEqual(s1, s2) c = 'xxxx[85 chars]xxxxxxxxxxx' self.assertEqual(str(cm.exception), "'%sa%s' != '%sb%s'" % (c, p, c, p)) p = 'y' * 100 s1, s2 = s + 'a' + p, s + 'b' + p with self.assertRaises(self.failureException) as cm: self.assertEqual(s1, s2) c = 'xxxx[91 chars]xxxxx' d = 'y' * 40 + '[56 chars]yyyy' self.assertEqual(str(cm.exception), "'%sa%s' != '%sb%s'" % (c, d, c, d)) def testAssertCountEqual(self): a = object() self.assertCountEqual([1, 2, 3], [3, 2, 1]) self.assertCountEqual(['foo', 'bar', 'baz'], ['bar', 'baz', 'foo']) self.assertCountEqual([a, a, 2, 2, 3], (a, 2, 3, a, 2)) self.assertCountEqual([1, "2", "a", "a"], ["a", "2", True, "a"]) self.assertRaises(self.failureException, self.assertCountEqual, [1, 2] + [3] * 100, [1] * 100 + [2, 3]) self.assertRaises(self.failureException, self.assertCountEqual, [1, "2", "a", "a"], ["a", "2", True, 1]) self.assertRaises(self.failureException, self.assertCountEqual, [10], [10, 11]) self.assertRaises(self.failureException, self.assertCountEqual, [10, 11], [10]) self.assertRaises(self.failureException, self.assertCountEqual, [10, 11, 10], [10, 11]) # Test that sequences of unhashable objects can be tested for sameness: self.assertCountEqual([[1, 2], [3, 4], 0], [False, [3, 4], [1, 2]]) # Test that iterator of unhashable objects can be tested for sameness: self.assertCountEqual(iter([1, 2, [], 3, 4]), iter([1, 2, [], 3, 4])) # hashable types, but not orderable self.assertRaises(self.failureException, self.assertCountEqual, [], [divmod, 'x', 1, 5j, 2j, frozenset()]) # comparing dicts self.assertCountEqual([{'a': 1}, {'b': 2}], [{'b': 2}, {'a': 1}]) # comparing heterogeneous non-hashable sequences self.assertCountEqual([1, 'x', divmod, []], [divmod, [], 'x', 1]) self.assertRaises(self.failureException, self.assertCountEqual, [], [divmod, [], 'x', 1, 5j, 2j, set()]) self.assertRaises(self.failureException, self.assertCountEqual, [[1]], [[2]]) # Same elements, but not same sequence length self.assertRaises(self.failureException, self.assertCountEqual, [1, 1, 2], [2, 1]) self.assertRaises(self.failureException, self.assertCountEqual, [1, 1, "2", "a", "a"], ["2", "2", True, "a"]) self.assertRaises(self.failureException, self.assertCountEqual, [1, {'b': 2}, None, True], [{'b': 2}, True, None]) # Same elements which don't reliably compare, in # different order, see issue 10242 a = [{2,4}, {1,2}] b = a[::-1] self.assertCountEqual(a, b) # test utility functions supporting assertCountEqual() diffs = set(unittest.util._count_diff_all_purpose('aaabccd', 'abbbcce')) expected = {(3,1,'a'), (1,3,'b'), (1,0,'d'), (0,1,'e')} self.assertEqual(diffs, expected) diffs = unittest.util._count_diff_all_purpose([[]], []) self.assertEqual(diffs, [(1, 0, [])]) diffs = set(unittest.util._count_diff_hashable('aaabccd', 'abbbcce')) expected = {(3,1,'a'), (1,3,'b'), (1,0,'d'), (0,1,'e')} self.assertEqual(diffs, expected) def testAssertSetEqual(self): set1 = set() set2 = set() self.assertSetEqual(set1, set2) self.assertRaises(self.failureException, self.assertSetEqual, None, set2) self.assertRaises(self.failureException, self.assertSetEqual, [], set2) self.assertRaises(self.failureException, self.assertSetEqual, set1, None) self.assertRaises(self.failureException, self.assertSetEqual, set1, []) set1 = set(['a']) set2 = set() self.assertRaises(self.failureException, self.assertSetEqual, set1, set2) set1 = set(['a']) set2 = set(['a']) self.assertSetEqual(set1, set2) set1 = set(['a']) set2 = set(['a', 'b']) self.assertRaises(self.failureException, self.assertSetEqual, set1, set2) set1 = set(['a']) set2 = frozenset(['a', 'b']) self.assertRaises(self.failureException, self.assertSetEqual, set1, set2) set1 = set(['a', 'b']) set2 = frozenset(['a', 'b']) self.assertSetEqual(set1, set2) set1 = set() set2 = "foo" self.assertRaises(self.failureException, self.assertSetEqual, set1, set2) self.assertRaises(self.failureException, self.assertSetEqual, set2, set1) # make sure any string formatting is tuple-safe set1 = set([(0, 1), (2, 3)]) set2 = set([(4, 5)]) self.assertRaises(self.failureException, self.assertSetEqual, set1, set2) def testInequality(self): # Try ints self.assertGreater(2, 1) self.assertGreaterEqual(2, 1) self.assertGreaterEqual(1, 1) self.assertLess(1, 2) self.assertLessEqual(1, 2) self.assertLessEqual(1, 1) self.assertRaises(self.failureException, self.assertGreater, 1, 2) self.assertRaises(self.failureException, self.assertGreater, 1, 1) self.assertRaises(self.failureException, self.assertGreaterEqual, 1, 2) self.assertRaises(self.failureException, self.assertLess, 2, 1) self.assertRaises(self.failureException, self.assertLess, 1, 1) self.assertRaises(self.failureException, self.assertLessEqual, 2, 1) # Try Floats self.assertGreater(1.1, 1.0) self.assertGreaterEqual(1.1, 1.0) self.assertGreaterEqual(1.0, 1.0) self.assertLess(1.0, 1.1) self.assertLessEqual(1.0, 1.1) self.assertLessEqual(1.0, 1.0) self.assertRaises(self.failureException, self.assertGreater, 1.0, 1.1) self.assertRaises(self.failureException, self.assertGreater, 1.0, 1.0) self.assertRaises(self.failureException, self.assertGreaterEqual, 1.0, 1.1) self.assertRaises(self.failureException, self.assertLess, 1.1, 1.0) self.assertRaises(self.failureException, self.assertLess, 1.0, 1.0) self.assertRaises(self.failureException, self.assertLessEqual, 1.1, 1.0) # Try Strings self.assertGreater('bug', 'ant') self.assertGreaterEqual('bug', 'ant') self.assertGreaterEqual('ant', 'ant') self.assertLess('ant', 'bug') self.assertLessEqual('ant', 'bug') self.assertLessEqual('ant', 'ant') self.assertRaises(self.failureException, self.assertGreater, 'ant', 'bug') self.assertRaises(self.failureException, self.assertGreater, 'ant', 'ant') self.assertRaises(self.failureException, self.assertGreaterEqual, 'ant', 'bug') self.assertRaises(self.failureException, self.assertLess, 'bug', 'ant') self.assertRaises(self.failureException, self.assertLess, 'ant', 'ant') self.assertRaises(self.failureException, self.assertLessEqual, 'bug', 'ant') # Try bytes self.assertGreater(b'bug', b'ant') self.assertGreaterEqual(b'bug', b'ant') self.assertGreaterEqual(b'ant', b'ant') self.assertLess(b'ant', b'bug') self.assertLessEqual(b'ant', b'bug') self.assertLessEqual(b'ant', b'ant') self.assertRaises(self.failureException, self.assertGreater, b'ant', b'bug') self.assertRaises(self.failureException, self.assertGreater, b'ant', b'ant') self.assertRaises(self.failureException, self.assertGreaterEqual, b'ant', b'bug') self.assertRaises(self.failureException, self.assertLess, b'bug', b'ant') self.assertRaises(self.failureException, self.assertLess, b'ant', b'ant') self.assertRaises(self.failureException, self.assertLessEqual, b'bug', b'ant') def testAssertMultiLineEqual(self): sample_text = """\ http://www.python.org/doc/2.3/lib/module-unittest.html test case A test case is the smallest unit of testing. [...] """ revised_sample_text = """\ http://www.python.org/doc/2.4.1/lib/module-unittest.html test case A test case is the smallest unit of testing. [...] You may provide your own implementation that does not subclass from TestCase, of course. """ sample_text_error = """\ - http://www.python.org/doc/2.3/lib/module-unittest.html ? ^ + http://www.python.org/doc/2.4.1/lib/module-unittest.html ? ^^^ test case - A test case is the smallest unit of testing. [...] + A test case is the smallest unit of testing. [...] You may provide your ? +++++++++++++++++++++ + own implementation that does not subclass from TestCase, of course. """ self.maxDiff = None try: self.assertMultiLineEqual(sample_text, revised_sample_text) except self.failureException as e: # need to remove the first line of the error message error = str(e).split('\n', 1)[1] self.assertEqual(sample_text_error, error) def testAssertEqualSingleLine(self): sample_text = "laden swallows fly slowly" revised_sample_text = "unladen swallows fly quickly" sample_text_error = """\ - laden swallows fly slowly ? ^^^^ + unladen swallows fly quickly ? ++ ^^^^^ """ try: self.assertEqual(sample_text, revised_sample_text) except self.failureException as e: # need to remove the first line of the error message error = str(e).split('\n', 1)[1] self.assertEqual(sample_text_error, error) def testEqualityBytesWarning(self): if sys.flags.bytes_warning: def bytes_warning(): return self.assertWarnsRegex(BytesWarning, 'Comparison between bytes and string') else: def bytes_warning(): return contextlib.ExitStack() with bytes_warning(), self.assertRaises(self.failureException): self.assertEqual('a', b'a') with bytes_warning(): self.assertNotEqual('a', b'a') a = [0, 'a'] b = [0, b'a'] with bytes_warning(), self.assertRaises(self.failureException): self.assertListEqual(a, b) with bytes_warning(), self.assertRaises(self.failureException): self.assertTupleEqual(tuple(a), tuple(b)) with bytes_warning(), self.assertRaises(self.failureException): self.assertSequenceEqual(a, tuple(b)) with bytes_warning(), self.assertRaises(self.failureException): self.assertSequenceEqual(tuple(a), b) with bytes_warning(), self.assertRaises(self.failureException): self.assertSequenceEqual('a', b'a') with bytes_warning(), self.assertRaises(self.failureException): self.assertSetEqual(set(a), set(b)) with self.assertRaises(self.failureException): self.assertListEqual(a, tuple(b)) with self.assertRaises(self.failureException): self.assertTupleEqual(tuple(a), b) a = [0, b'a'] b = [0] with self.assertRaises(self.failureException): self.assertListEqual(a, b) with self.assertRaises(self.failureException): self.assertTupleEqual(tuple(a), tuple(b)) with self.assertRaises(self.failureException): self.assertSequenceEqual(a, tuple(b)) with self.assertRaises(self.failureException): self.assertSequenceEqual(tuple(a), b) with self.assertRaises(self.failureException): self.assertSetEqual(set(a), set(b)) a = [0] b = [0, b'a'] with self.assertRaises(self.failureException): self.assertListEqual(a, b) with self.assertRaises(self.failureException): self.assertTupleEqual(tuple(a), tuple(b)) with self.assertRaises(self.failureException): self.assertSequenceEqual(a, tuple(b)) with self.assertRaises(self.failureException): self.assertSequenceEqual(tuple(a), b) with self.assertRaises(self.failureException): self.assertSetEqual(set(a), set(b)) with bytes_warning(), self.assertRaises(self.failureException): self.assertDictEqual({'a': 0}, {b'a': 0}) with self.assertRaises(self.failureException): self.assertDictEqual({}, {b'a': 0}) with self.assertRaises(self.failureException): self.assertDictEqual({b'a': 0}, {}) with self.assertRaises(self.failureException): self.assertCountEqual([b'a', b'a'], [b'a', b'a', b'a']) with bytes_warning(): self.assertCountEqual(['a', b'a'], ['a', b'a']) with bytes_warning(), self.assertRaises(self.failureException): self.assertCountEqual(['a', 'a'], [b'a', b'a']) with bytes_warning(), self.assertRaises(self.failureException): self.assertCountEqual(['a', 'a', []], [b'a', b'a', []]) def testAssertIsNone(self): self.assertIsNone(None) self.assertRaises(self.failureException, self.assertIsNone, False) self.assertIsNotNone('DjZoPloGears on Rails') self.assertRaises(self.failureException, self.assertIsNotNone, None) def testAssertRegex(self): self.assertRegex('asdfabasdf', r'ab+') self.assertRaises(self.failureException, self.assertRegex, 'saaas', r'aaaa') def testAssertRaisesCallable(self): class ExceptionMock(Exception): pass def Stub(): raise ExceptionMock('We expect') self.assertRaises(ExceptionMock, Stub) # A tuple of exception classes is accepted self.assertRaises((ValueError, ExceptionMock), Stub) # *args and **kwargs also work self.assertRaises(ValueError, int, '19', base=8) # Failure when no exception is raised with self.assertRaises(self.failureException): self.assertRaises(ExceptionMock, lambda: 0) # Failure when the function is None with self.assertRaises(TypeError): self.assertRaises(ExceptionMock, None) # Failure when another exception is raised with self.assertRaises(ExceptionMock): self.assertRaises(ValueError, Stub) def testAssertRaisesContext(self): class ExceptionMock(Exception): pass def Stub(): raise ExceptionMock('We expect') with self.assertRaises(ExceptionMock): Stub() # A tuple of exception classes is accepted with self.assertRaises((ValueError, ExceptionMock)) as cm: Stub() # The context manager exposes caught exception self.assertIsInstance(cm.exception, ExceptionMock) self.assertEqual(cm.exception.args[0], 'We expect') # *args and **kwargs also work with self.assertRaises(ValueError): int('19', base=8) # Failure when no exception is raised with self.assertRaises(self.failureException): with self.assertRaises(ExceptionMock): pass # Custom message with self.assertRaisesRegex(self.failureException, 'foobar'): with self.assertRaises(ExceptionMock, msg='foobar'): pass # Invalid keyword argument with self.assertRaisesRegex(TypeError, 'foobar'): with self.assertRaises(ExceptionMock, foobar=42): pass # Failure when another exception is raised with self.assertRaises(ExceptionMock): self.assertRaises(ValueError, Stub) def testAssertRaisesNoExceptionType(self): with self.assertRaises(TypeError): self.assertRaises() with self.assertRaises(TypeError): self.assertRaises(1) with self.assertRaises(TypeError): self.assertRaises(object) with self.assertRaises(TypeError): self.assertRaises((ValueError, 1)) with self.assertRaises(TypeError): self.assertRaises((ValueError, object)) def testAssertRaisesRefcount(self): # bpo-23890: assertRaises() must not keep objects alive longer # than expected def func() : try: raise ValueError except ValueError: raise ValueError refcount = sys.getrefcount(func) self.assertRaises(ValueError, func) self.assertEqual(refcount, sys.getrefcount(func)) def testAssertRaisesRegex(self): class ExceptionMock(Exception): pass def Stub(): raise ExceptionMock('We expect') self.assertRaisesRegex(ExceptionMock, re.compile('expect$'), Stub) self.assertRaisesRegex(ExceptionMock, 'expect$', Stub) with self.assertRaises(TypeError): self.assertRaisesRegex(ExceptionMock, 'expect$', None) def testAssertNotRaisesRegex(self): self.assertRaisesRegex( self.failureException, '^Exception not raised by <lambda>$', self.assertRaisesRegex, Exception, re.compile('x'), lambda: None) self.assertRaisesRegex( self.failureException, '^Exception not raised by <lambda>$', self.assertRaisesRegex, Exception, 'x', lambda: None) # Custom message with self.assertRaisesRegex(self.failureException, 'foobar'): with self.assertRaisesRegex(Exception, 'expect', msg='foobar'): pass # Invalid keyword argument with self.assertRaisesRegex(TypeError, 'foobar'): with self.assertRaisesRegex(Exception, 'expect', foobar=42): pass def testAssertRaisesRegexInvalidRegex(self): # Issue 20145. class MyExc(Exception): pass self.assertRaises(TypeError, self.assertRaisesRegex, MyExc, lambda: True) def testAssertWarnsRegexInvalidRegex(self): # Issue 20145. class MyWarn(Warning): pass self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True) def testAssertRaisesRegexMismatch(self): def Stub(): raise Exception('Unexpected') self.assertRaisesRegex( self.failureException, r'"\^Expected\$" does not match "Unexpected"', self.assertRaisesRegex, Exception, '^Expected$', Stub) self.assertRaisesRegex( self.failureException, r'"\^Expected\$" does not match "Unexpected"', self.assertRaisesRegex, Exception, re.compile('^Expected$'), Stub) def testAssertRaisesExcValue(self): class ExceptionMock(Exception): pass def Stub(foo): raise ExceptionMock(foo) v = "particular value" ctx = self.assertRaises(ExceptionMock) with ctx: Stub(v) e = ctx.exception self.assertIsInstance(e, ExceptionMock) self.assertEqual(e.args[0], v) def testAssertRaisesRegexNoExceptionType(self): with self.assertRaises(TypeError): self.assertRaisesRegex() with self.assertRaises(TypeError): self.assertRaisesRegex(ValueError) with self.assertRaises(TypeError): self.assertRaisesRegex(1, 'expect') with self.assertRaises(TypeError): self.assertRaisesRegex(object, 'expect') with self.assertRaises(TypeError): self.assertRaisesRegex((ValueError, 1), 'expect') with self.assertRaises(TypeError): self.assertRaisesRegex((ValueError, object), 'expect') def testAssertWarnsCallable(self): def _runtime_warn(): warnings.warn("foo", RuntimeWarning) # Success when the right warning is triggered, even several times self.assertWarns(RuntimeWarning, _runtime_warn) self.assertWarns(RuntimeWarning, _runtime_warn) # A tuple of warning classes is accepted self.assertWarns((DeprecationWarning, RuntimeWarning), _runtime_warn) # *args and **kwargs also work self.assertWarns(RuntimeWarning, warnings.warn, "foo", category=RuntimeWarning) # Failure when no warning is triggered with self.assertRaises(self.failureException): self.assertWarns(RuntimeWarning, lambda: 0) # Failure when the function is None with self.assertRaises(TypeError): self.assertWarns(RuntimeWarning, None) # Failure when another warning is triggered with warnings.catch_warnings(): # Force default filter (in case tests are run with -We) warnings.simplefilter("default", RuntimeWarning) with self.assertRaises(self.failureException): self.assertWarns(DeprecationWarning, _runtime_warn) # Filters for other warnings are not modified with warnings.catch_warnings(): warnings.simplefilter("error", RuntimeWarning) with self.assertRaises(RuntimeWarning): self.assertWarns(DeprecationWarning, _runtime_warn) def testAssertWarnsContext(self): # Believe it or not, it is preferable to duplicate all tests above, # to make sure the __warningregistry__ $@ is circumvented correctly. def _runtime_warn(): warnings.warn("foo", RuntimeWarning) _runtime_warn_lineno = inspect.getsourcelines(_runtime_warn)[1] with self.assertWarns(RuntimeWarning) as cm: _runtime_warn() # A tuple of warning classes is accepted with self.assertWarns((DeprecationWarning, RuntimeWarning)) as cm: _runtime_warn() # The context manager exposes various useful attributes self.assertIsInstance(cm.warning, RuntimeWarning) self.assertEqual(cm.warning.args[0], "foo") self.assertIn("test_case.py", cm.filename) self.assertEqual(cm.lineno, _runtime_warn_lineno + 1) # Same with several warnings with self.assertWarns(RuntimeWarning): _runtime_warn() _runtime_warn() with self.assertWarns(RuntimeWarning): warnings.warn("foo", category=RuntimeWarning) # Failure when no warning is triggered with self.assertRaises(self.failureException): with self.assertWarns(RuntimeWarning): pass # Custom message with self.assertRaisesRegex(self.failureException, 'foobar'): with self.assertWarns(RuntimeWarning, msg='foobar'): pass # Invalid keyword argument with self.assertRaisesRegex(TypeError, 'foobar'): with self.assertWarns(RuntimeWarning, foobar=42): pass # Failure when another warning is triggered with warnings.catch_warnings(): # Force default filter (in case tests are run with -We) warnings.simplefilter("default", RuntimeWarning) with self.assertRaises(self.failureException): with self.assertWarns(DeprecationWarning): _runtime_warn() # Filters for other warnings are not modified with warnings.catch_warnings(): warnings.simplefilter("error", RuntimeWarning) with self.assertRaises(RuntimeWarning): with self.assertWarns(DeprecationWarning): _runtime_warn() def testAssertWarnsNoExceptionType(self): with self.assertRaises(TypeError): self.assertWarns() with self.assertRaises(TypeError): self.assertWarns(1) with self.assertRaises(TypeError): self.assertWarns(object) with self.assertRaises(TypeError): self.assertWarns((UserWarning, 1)) with self.assertRaises(TypeError): self.assertWarns((UserWarning, object)) with self.assertRaises(TypeError): self.assertWarns((UserWarning, Exception)) def testAssertWarnsRegexCallable(self): def _runtime_warn(msg): warnings.warn(msg, RuntimeWarning) self.assertWarnsRegex(RuntimeWarning, "o+", _runtime_warn, "foox") # Failure when no warning is triggered with self.assertRaises(self.failureException): self.assertWarnsRegex(RuntimeWarning, "o+", lambda: 0) # Failure when the function is None with self.assertRaises(TypeError): self.assertWarnsRegex(RuntimeWarning, "o+", None) # Failure when another warning is triggered with warnings.catch_warnings(): # Force default filter (in case tests are run with -We) warnings.simplefilter("default", RuntimeWarning) with self.assertRaises(self.failureException): self.assertWarnsRegex(DeprecationWarning, "o+", _runtime_warn, "foox") # Failure when message doesn't match with self.assertRaises(self.failureException): self.assertWarnsRegex(RuntimeWarning, "o+", _runtime_warn, "barz") # A little trickier: we ask RuntimeWarnings to be raised, and then # check for some of them. It is implementation-defined whether # non-matching RuntimeWarnings are simply re-raised, or produce a # failureException. with warnings.catch_warnings(): warnings.simplefilter("error", RuntimeWarning) with self.assertRaises((RuntimeWarning, self.failureException)): self.assertWarnsRegex(RuntimeWarning, "o+", _runtime_warn, "barz") def testAssertWarnsRegexContext(self): # Same as above, but with assertWarnsRegex as a context manager def _runtime_warn(msg): warnings.warn(msg, RuntimeWarning) _runtime_warn_lineno = inspect.getsourcelines(_runtime_warn)[1] with self.assertWarnsRegex(RuntimeWarning, "o+") as cm: _runtime_warn("foox") self.assertIsInstance(cm.warning, RuntimeWarning) self.assertEqual(cm.warning.args[0], "foox") self.assertIn("test_case.py", cm.filename) self.assertEqual(cm.lineno, _runtime_warn_lineno + 1) # Failure when no warning is triggered with self.assertRaises(self.failureException): with self.assertWarnsRegex(RuntimeWarning, "o+"): pass # Custom message with self.assertRaisesRegex(self.failureException, 'foobar'): with self.assertWarnsRegex(RuntimeWarning, 'o+', msg='foobar'): pass # Invalid keyword argument with self.assertRaisesRegex(TypeError, 'foobar'): with self.assertWarnsRegex(RuntimeWarning, 'o+', foobar=42): pass # Failure when another warning is triggered with warnings.catch_warnings(): # Force default filter (in case tests are run with -We) warnings.simplefilter("default", RuntimeWarning) with self.assertRaises(self.failureException): with self.assertWarnsRegex(DeprecationWarning, "o+"): _runtime_warn("foox") # Failure when message doesn't match with self.assertRaises(self.failureException): with self.assertWarnsRegex(RuntimeWarning, "o+"): _runtime_warn("barz") # A little trickier: we ask RuntimeWarnings to be raised, and then # check for some of them. It is implementation-defined whether # non-matching RuntimeWarnings are simply re-raised, or produce a # failureException. with warnings.catch_warnings(): warnings.simplefilter("error", RuntimeWarning) with self.assertRaises((RuntimeWarning, self.failureException)): with self.assertWarnsRegex(RuntimeWarning, "o+"): _runtime_warn("barz") def testAssertWarnsRegexNoExceptionType(self): with self.assertRaises(TypeError): self.assertWarnsRegex() with self.assertRaises(TypeError): self.assertWarnsRegex(UserWarning) with self.assertRaises(TypeError): self.assertWarnsRegex(1, 'expect') with self.assertRaises(TypeError): self.assertWarnsRegex(object, 'expect') with self.assertRaises(TypeError): self.assertWarnsRegex((UserWarning, 1), 'expect') with self.assertRaises(TypeError): self.assertWarnsRegex((UserWarning, object), 'expect') with self.assertRaises(TypeError): self.assertWarnsRegex((UserWarning, Exception), 'expect') @contextlib.contextmanager def assertNoStderr(self): with captured_stderr() as buf: yield self.assertEqual(buf.getvalue(), "") def assertLogRecords(self, records, matches): self.assertEqual(len(records), len(matches)) for rec, match in zip(records, matches): self.assertIsInstance(rec, logging.LogRecord) for k, v in match.items(): self.assertEqual(getattr(rec, k), v) def testAssertLogsDefaults(self): # defaults: root logger, level INFO with self.assertNoStderr(): with self.assertLogs() as cm: log_foo.info("1") log_foobar.debug("2") self.assertEqual(cm.output, ["INFO:foo:1"]) self.assertLogRecords(cm.records, [{'name': 'foo'}]) def testAssertLogsTwoMatchingMessages(self): # Same, but with two matching log messages with self.assertNoStderr(): with self.assertLogs() as cm: log_foo.info("1") log_foobar.debug("2") log_quux.warning("3") self.assertEqual(cm.output, ["INFO:foo:1", "WARNING:quux:3"]) self.assertLogRecords(cm.records, [{'name': 'foo'}, {'name': 'quux'}]) def checkAssertLogsPerLevel(self, level): # Check level filtering with self.assertNoStderr(): with self.assertLogs(level=level) as cm: log_foo.warning("1") log_foobar.error("2") log_quux.critical("3") self.assertEqual(cm.output, ["ERROR:foo.bar:2", "CRITICAL:quux:3"]) self.assertLogRecords(cm.records, [{'name': 'foo.bar'}, {'name': 'quux'}]) def testAssertLogsPerLevel(self): self.checkAssertLogsPerLevel(logging.ERROR) self.checkAssertLogsPerLevel('ERROR') def checkAssertLogsPerLogger(self, logger): # Check per-logger filtering with self.assertNoStderr(): with self.assertLogs(level='DEBUG') as outer_cm: with self.assertLogs(logger, level='DEBUG') as cm: log_foo.info("1") log_foobar.debug("2") log_quux.warning("3") self.assertEqual(cm.output, ["INFO:foo:1", "DEBUG:foo.bar:2"]) self.assertLogRecords(cm.records, [{'name': 'foo'}, {'name': 'foo.bar'}]) # The outer catchall caught the quux log self.assertEqual(outer_cm.output, ["WARNING:quux:3"]) def testAssertLogsPerLogger(self): self.checkAssertLogsPerLogger(logging.getLogger('foo')) self.checkAssertLogsPerLogger('foo') def testAssertLogsFailureNoLogs(self): # Failure due to no logs with self.assertNoStderr(): with self.assertRaises(self.failureException): with self.assertLogs(): pass def testAssertLogsFailureLevelTooHigh(self): # Failure due to level too high with self.assertNoStderr(): with self.assertRaises(self.failureException): with self.assertLogs(level='WARNING'): log_foo.info("1") def testAssertLogsFailureMismatchingLogger(self): # Failure due to mismatching logger (and the logged message is # passed through) with self.assertLogs('quux', level='ERROR'): with self.assertRaises(self.failureException): with self.assertLogs('foo'): log_quux.error("1") def testDeprecatedMethodNames(self): """ Test that the deprecated methods raise a DeprecationWarning. See #9424. """ old = ( (self.failIfEqual, (3, 5)), (self.assertNotEquals, (3, 5)), (self.failUnlessEqual, (3, 3)), (self.assertEquals, (3, 3)), (self.failUnlessAlmostEqual, (2.0, 2.0)), (self.assertAlmostEquals, (2.0, 2.0)), (self.failIfAlmostEqual, (3.0, 5.0)), (self.assertNotAlmostEquals, (3.0, 5.0)), (self.failUnless, (True,)), (self.assert_, (True,)), (self.failUnlessRaises, (TypeError, lambda _: 3.14 + 'spam')), (self.failIf, (False,)), (self.assertDictContainsSubset, (dict(a=1, b=2), dict(a=1, b=2, c=3))), (self.assertRaisesRegexp, (KeyError, 'foo', lambda: {}['foo'])), (self.assertRegexpMatches, ('bar', 'bar')), ) for meth, args in old: with self.assertWarns(DeprecationWarning): meth(*args) # disable this test for now. When the version where the fail* methods will # be removed is decided, re-enable it and update the version def _testDeprecatedFailMethods(self): """Test that the deprecated fail* methods get removed in 3.x""" if sys.version_info[:2] < (3, 3): return deprecated_names = [ 'failIfEqual', 'failUnlessEqual', 'failUnlessAlmostEqual', 'failIfAlmostEqual', 'failUnless', 'failUnlessRaises', 'failIf', 'assertDictContainsSubset', ] for deprecated_name in deprecated_names: with self.assertRaises(AttributeError): getattr(self, deprecated_name) # remove these in 3.x def testDeepcopy(self): # Issue: 5660 class TestableTest(unittest.TestCase): def testNothing(self): pass test = TestableTest('testNothing') # This shouldn't blow up deepcopy(test) def testPickle(self): # Issue 10326 # Can't use TestCase classes defined in Test class as # pickle does not work with inner classes test = unittest.TestCase('run') for protocol in range(pickle.HIGHEST_PROTOCOL + 1): # blew up prior to fix pickled_test = pickle.dumps(test, protocol=protocol) unpickled_test = pickle.loads(pickled_test) self.assertEqual(test, unpickled_test) # exercise the TestCase instance in a way that will invoke # the type equality lookup mechanism unpickled_test.assertEqual(set(), set()) def testKeyboardInterrupt(self): def _raise(self=None): raise KeyboardInterrupt def nothing(self): pass class Test1(unittest.TestCase): test_something = _raise class Test2(unittest.TestCase): setUp = _raise test_something = nothing class Test3(unittest.TestCase): test_something = nothing tearDown = _raise class Test4(unittest.TestCase): def test_something(self): self.addCleanup(_raise) for klass in (Test1, Test2, Test3, Test4): with self.assertRaises(KeyboardInterrupt): klass('test_something').run() def testSkippingEverywhere(self): def _skip(self=None): raise unittest.SkipTest('some reason') def nothing(self): pass class Test1(unittest.TestCase): test_something = _skip class Test2(unittest.TestCase): setUp = _skip test_something = nothing class Test3(unittest.TestCase): test_something = nothing tearDown = _skip class Test4(unittest.TestCase): def test_something(self): self.addCleanup(_skip) for klass in (Test1, Test2, Test3, Test4): result = unittest.TestResult() klass('test_something').run(result) self.assertEqual(len(result.skipped), 1) self.assertEqual(result.testsRun, 1) def testSystemExit(self): def _raise(self=None): raise SystemExit def nothing(self): pass class Test1(unittest.TestCase): test_something = _raise class Test2(unittest.TestCase): setUp = _raise test_something = nothing class Test3(unittest.TestCase): test_something = nothing tearDown = _raise class Test4(unittest.TestCase): def test_something(self): self.addCleanup(_raise) for klass in (Test1, Test2, Test3, Test4): result = unittest.TestResult() klass('test_something').run(result) self.assertEqual(len(result.errors), 1) self.assertEqual(result.testsRun, 1) @support.cpython_only def testNoCycles(self): case = unittest.TestCase() wr = weakref.ref(case) with support.disable_gc(): del case self.assertFalse(wr()) def test_no_exception_leak(self): # Issue #19880: TestCase.run() should not keep a reference # to the exception class MyException(Exception): ninstance = 0 def __init__(self): MyException.ninstance += 1 Exception.__init__(self) def __del__(self): MyException.ninstance -= 1 class TestCase(unittest.TestCase): def test1(self): raise MyException() @unittest.expectedFailure def test2(self): raise MyException() for method_name in ('test1', 'test2'): testcase = TestCase(method_name) testcase.run() self.assertEqual(MyException.ninstance, 0) if __name__ == "__main__": unittest.main()
bsd-2-clause
EdDev/vdsm
vdsm_hooks/vmfex/before_vm_migrate_destination.py
1
4071
#!/usr/bin/python2 import os import sys import traceback import fcntl from xml.dom import minidom try: # 3.0 compat import libvirtconnection libvirtconnection except ImportError: # 3.1 compat from vdsm import libvirtconnection ''' Placed in before_vm_migrate_destination vmfex hook on migration destination: Set up a dynamic NIC pool for incoming migrations to use <network> <name>direct-pool</name> <forward mode="passthrough"> <interface dev="eth3"/> <interface dev="eth4"/> <interface dev="eth5"/> <interface dev="eth6"/> <interface dev="eth7"/> <interface dev="eth8"/> <interface dev="eth9"/> <interface dev="eth10"/> <interface dev="eth11"/> </forward> </network> Using libvirt, the network is defined like this: virsh net-define /tmp/direct-pool.xml virsh net-start direct-pool virsh net-autostart direct-pool (where /tmp/direct-pool.xml contains the xml above) (everything else is autogenerated, and shouldn't be specified when defining a guest (but whatever is there after definition should be left in place, e.g. the PCI address)). Note that these interface definitions are completely static - you never need to modify them due to migration, or starting up/shutting down the guest. ''' def getUsableNics(): # Scan localhost for physical NICs and return list of physical nics # that have all zeroes MAC. These NICs are the ones that can be used # with VMFEX. # Example ['eth0','eth1'] nics = [] for root, dirs, names in os.walk('/sys/devices/'): if 'address' in names and 'pci' in root: with open(root + '/address', 'r') as f: mac = f.readlines()[0].strip() if mac == '00:00:00:00:00:00': eth = root.split('/')[-1] nics.append(eth) return nics def createDirectPool(conn): if 'direct-pool' in conn.listNetworks(): dpool = conn.networkLookupByName('direct-pool') # destroy and undefine direct-pool dpool.destroy() dpool.undefine() sys.stderr.write('vmfex: removed direct-pool \n') # create a new direct-pool xmlstr = '''<network> <name>direct-pool</name> <forward mode="passthrough"> ''' for i in getUsableNics(): xmlstr += '<interface dev="' + i + '"/> \n' xmlstr += ' </forward> \n </network> ' conn.networkDefineXML(xmlstr) dpool = conn.networkLookupByName('direct-pool') dpool.setAutostart(1) dpool.create() sys.stderr.write('vmfex: created Direct-Pool Net \n') sys.stderr.write(xmlstr + '\n') def qbhInUse(conn): for vm in conn.listDomainsID(): domxml = minidom.parseString(conn.lookupByID(vm).XMLDesc(0)) for vport in domxml.getElementsByTagName('virtualport'): if vport.getAttribute('type') == '802.1Qbh': return True return False def validateDPool(conn): # Compare direct-pool to the list of available NICs dpool = conn.networkLookupByName('direct-pool') definedNics = [] dpoolxml = minidom.parseString(dpool.XMLDesc(0)) for iface in dpoolxml.getElementsByTagName('interface'): definedNics.append(iface.getAttribute('dev')) if set(definedNics) == set(getUsableNics()): return True else: return False def handleDirectPool(conn): with open('/var/run/vdsm/hook-vmfex.lock', 'w') as f: fcntl.flock(f.fileno(), fcntl.LOCK_EX) try: if 'direct-pool' not in conn.listNetworks(): createDirectPool(conn) elif not qbhInUse(conn) and not validateDPool(conn): createDirectPool(conn) finally: fcntl.flock(f.fileno(), fcntl.LOCK_UN) if 'vmfex' in os.environ: try: # connect to libvirtd conn = libvirtconnection.get() handleDirectPool(conn) except: sys.stderr.write('vmfex: ERROR: %s\n' % traceback.format_exc()) sys.exit(2)
gpl-2.0
Shanec132006/Lab4
lib/flask/testsuite/__init__.py
564
7022
# -*- coding: utf-8 -*- """ flask.testsuite ~~~~~~~~~~~~~~~ Tests Flask itself. The majority of Flask is already tested as part of Werkzeug. :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ from __future__ import print_function import os import sys import flask import warnings import unittest from functools import update_wrapper from contextlib import contextmanager from werkzeug.utils import import_string, find_modules from flask._compat import reraise, StringIO def add_to_path(path): """Adds an entry to sys.path if it's not already there. This does not append it but moves it to the front so that we can be sure it is loaded. """ if not os.path.isdir(path): raise RuntimeError('Tried to add nonexisting path') def _samefile(x, y): if x == y: return True try: return os.path.samefile(x, y) except (IOError, OSError, AttributeError): # Windows has no samefile return False sys.path[:] = [x for x in sys.path if not _samefile(path, x)] sys.path.insert(0, path) def iter_suites(): """Yields all testsuites.""" for module in find_modules(__name__): mod = import_string(module) if hasattr(mod, 'suite'): yield mod.suite() def find_all_tests(suite): """Yields all the tests and their names from a given suite.""" suites = [suite] while suites: s = suites.pop() try: suites.extend(s) except TypeError: yield s, '%s.%s.%s' % ( s.__class__.__module__, s.__class__.__name__, s._testMethodName ) @contextmanager def catch_warnings(): """Catch warnings in a with block in a list""" # make sure deprecation warnings are active in tests warnings.simplefilter('default', category=DeprecationWarning) filters = warnings.filters warnings.filters = filters[:] old_showwarning = warnings.showwarning log = [] def showwarning(message, category, filename, lineno, file=None, line=None): log.append(locals()) try: warnings.showwarning = showwarning yield log finally: warnings.filters = filters warnings.showwarning = old_showwarning @contextmanager def catch_stderr(): """Catch stderr in a StringIO""" old_stderr = sys.stderr sys.stderr = rv = StringIO() try: yield rv finally: sys.stderr = old_stderr def emits_module_deprecation_warning(f): def new_f(self, *args, **kwargs): with catch_warnings() as log: f(self, *args, **kwargs) self.assert_true(log, 'expected deprecation warning') for entry in log: self.assert_in('Modules are deprecated', str(entry['message'])) return update_wrapper(new_f, f) class FlaskTestCase(unittest.TestCase): """Baseclass for all the tests that Flask uses. Use these methods for testing instead of the camelcased ones in the baseclass for consistency. """ def ensure_clean_request_context(self): # make sure we're not leaking a request context since we are # testing flask internally in debug mode in a few cases leaks = [] while flask._request_ctx_stack.top is not None: leaks.append(flask._request_ctx_stack.pop()) self.assert_equal(leaks, []) def setup(self): pass def teardown(self): pass def setUp(self): self.setup() def tearDown(self): unittest.TestCase.tearDown(self) self.ensure_clean_request_context() self.teardown() def assert_equal(self, x, y): return self.assertEqual(x, y) def assert_raises(self, exc_type, callable=None, *args, **kwargs): catcher = _ExceptionCatcher(self, exc_type) if callable is None: return catcher with catcher: callable(*args, **kwargs) def assert_true(self, x, msg=None): self.assertTrue(x, msg) def assert_false(self, x, msg=None): self.assertFalse(x, msg) def assert_in(self, x, y): self.assertIn(x, y) def assert_not_in(self, x, y): self.assertNotIn(x, y) if sys.version_info[:2] == (2, 6): def assertIn(self, x, y): assert x in y, "%r unexpectedly not in %r" % (x, y) def assertNotIn(self, x, y): assert x not in y, "%r unexpectedly in %r" % (x, y) class _ExceptionCatcher(object): def __init__(self, test_case, exc_type): self.test_case = test_case self.exc_type = exc_type def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): exception_name = self.exc_type.__name__ if exc_type is None: self.test_case.fail('Expected exception of type %r' % exception_name) elif not issubclass(exc_type, self.exc_type): reraise(exc_type, exc_value, tb) return True class BetterLoader(unittest.TestLoader): """A nicer loader that solves two problems. First of all we are setting up tests from different sources and we're doing this programmatically which breaks the default loading logic so this is required anyways. Secondly this loader has a nicer interpolation for test names than the default one so you can just do ``run-tests.py ViewTestCase`` and it will work. """ def getRootSuite(self): return suite() def loadTestsFromName(self, name, module=None): root = self.getRootSuite() if name == 'suite': return root all_tests = [] for testcase, testname in find_all_tests(root): if testname == name or \ testname.endswith('.' + name) or \ ('.' + name + '.') in testname or \ testname.startswith(name + '.'): all_tests.append(testcase) if not all_tests: raise LookupError('could not find test case for "%s"' % name) if len(all_tests) == 1: return all_tests[0] rv = unittest.TestSuite() for test in all_tests: rv.addTest(test) return rv def setup_path(): add_to_path(os.path.abspath(os.path.join( os.path.dirname(__file__), 'test_apps'))) def suite(): """A testsuite that has all the Flask tests. You can use this function to integrate the Flask tests into your own testsuite in case you want to test that monkeypatches to Flask do not break it. """ setup_path() suite = unittest.TestSuite() for other_suite in iter_suites(): suite.addTest(other_suite) return suite def main(): """Runs the testsuite as command line application.""" try: unittest.main(testLoader=BetterLoader(), defaultTest='suite') except Exception as e: print('Error: %s' % e)
apache-2.0
Bad-ptr/parser.py
utils.py
1
2870
#!/usr/bin/env python # coding UTF-8 # Copyright 2013 Constantin Kulikov # # Author: Constantin Kulikov (Bad_ptr) <[email protected]> # Date: 2013/06/21 11:21:11 # License: GPL either version 2 or any later version import sys def SetRecursionLimit(n=5000): sys.setrecursionlimit(n) def _or(smth, defv): if None is smth: return defv else: return smth def get_from_dictstack(name="", *dict_stack, dstack=None): stack = _or(dstack, dict_stack) for d in stack: try: return d[name] except KeyError: continue return None def merge_nested_dicts(a, b): if(isinstance(a, dict) and isinstance(b, dict)): merged = {} merged.update(a) for (k,v) in b.items(): merged[k] = merge_nested_dicts(merged.get(k,{}), v) return merged else: return b def get_from_nested_dict(di, *path): if path == (): return di if not isinstance(di, dict): return None return get_from_nested_dict(di.get(path[0]), *path[1:]) def set_to_nested_dict(di, value, *path): if path == (): return value elif not isinstance(di, dict): return di else: cd = di.get(path[0]) rest = path[1:] if None is not cd: di[path[0]] = set_to_nested_dict(cd, value, rest) return di def add_hook(hook_type="", hook_name="", hook=None, hook_dict=None): if None is not hook: hook_dict = _or(hook_dict, {}) if None is hook_dict.get(hook_type): hook_dict[hook_type] = {} hook_type_dict = hook_dict[hook_type] oldhs = hook_type_dict.get(hook_name, []) if not isinstance(oldhs, list): oldhs = [oldh] oldhs.append(hook) hook_type_dict[hook_name] = oldhs return hook_dict def run_hooks(hook_type="", hook_name="", hook_dict=None, vargs=None, kwargs=None): if None is not hook_dict: hook_type_dict = hook_dict.get(hook_type) if None is not hook_type_dict: if hook_name == "*": for hooks in hook_type_dict.values(): for hook in hooks: hook(*_or(vargs,()), **_or(kwargs,{})) else: hooks = hook_type_dict.get(hook_name) if None is not hooks: for hook in hooks: hook(*_or(vargs,()), **_or(kwargs,{})) def run_pre_hooks(hook_name="", hook_dict=None, vargs=None, kwargs=None): return run_hooks(hook_type="pre", hook_name=hook_name, hook_dict=hook_dict , vargs=vargs, kwargs=kwargs) def run_post_hooks(hook_name="", hook_dict=None, vargs=None, kwargs=None): return run_hooks(hook_type="post", hook_name=hook_name, hook_dict=hook_dict , vargs=vargs, kwargs=kwargs)
gpl-2.0
nickpack/django-oscar
tests/integration/auth_tests.py
48
2558
import unittest from django.test import TestCase from django.contrib.auth import authenticate from django.core import mail from oscar.core.compat import get_user_model User = get_user_model() class TestEmailAuthBackend(TestCase): def test_authenticates_multiple_users(self): password = 'lookmanohands' users = [ User.objects.create_user(email, email, password=password) for email in ['[email protected]', '[email protected]']] for created_user in users: user = authenticate(username=created_user.email, password=password) self.assertEqual(user, created_user) def test_authenticates_different_email_spelling(self): email = password = '[email protected]' created_user = User.objects.create_user( 'user1', email, password=password) for email_variation in [ '[email protected]', '[email protected]', '[email protected]']: user = authenticate(username=email_variation, password=password) self.assertEqual(user, created_user) # Skip these tests for now as they only make sense when there isn't a unique # index on the user class. The test suite currently uses a custom model that # *does* have a unique index on email. When I figure out how to swap the user # model per test, we can re-enable this testcase. @unittest.skip class TestEmailAuthBackendWhenUsersShareAnEmail(TestCase): def test_authenticates_when_passwords_are_different(self): # Create two users with the same email address email = '[email protected]' for username in ['user1', 'user2']: User.objects.create_user(username, email, password=username) user = authenticate(username=email, password='user1') self.assertTrue(user is not None) def test_rejects_when_passwords_match(self): # Create two users with the same email address email = '[email protected]' for username in ['user1', 'user2']: User.objects.create_user(username, email, password='password') user = authenticate(username=email, password='password') self.assertTrue(user is None) def test_mails_admins_when_passwords_match(self): # Create two users with the same email address email = '[email protected]' for username in ['user1', 'user2']: User.objects.create_user(username, email, password='password') authenticate(username=email, password='password') self.assertEqual(1, len(mail.outbox))
bsd-3-clause
kjs73/pele
pele/systems/ljcluster.py
3
4106
import tempfile from pele.systems import AtomicCluster from pele.potentials import LJ from pele.utils.xyz import write_xyz __all__ = ["LJCluster"] class LJCluster(AtomicCluster): """ define the System class for a Lennard-Jones cluster Parameters ---------- natoms : int See Also -------- BaseSystem, AtomicCluster """ def __init__(self, natoms): super(LJCluster, self).__init__() self.natoms = natoms self.params.database.accuracy = 1e-3 self.params.basinhopping["temperature"] = 1.0 # self.params.double_ended_connect.NEBparams.reinterpolate = 1 def get_permlist(self): return [range(self.natoms)] def get_potential(self): return LJ() def get_system_properties(self): return dict(natoms=int(self.natoms), potential="LJ cluster", ) # # below here is stuff only for the gui # def draw(self, coordslinear, index, subtract_com=True): # pragma: no cover """ tell the gui how to represent your system using openGL objects Parameters ---------- coords : array index : int we can have more than one molecule on the screen at one time. index tells which one to draw. They are viewed at the same time, so they should be visually distinct, e.g. different colors. accepted values are 1 or 2 """ from _opengl_tools import draw_atomic_single_atomtype draw_atomic_single_atomtype(coordslinear, index, subtract_com=subtract_com) def load_coords_pymol(self, coordslist, oname, index=1): # pragma: no cover """load the coords into pymol the new object must be named oname so we can manipulate it later Parameters ---------- coordslist : list of arrays oname : str the new pymol object must be named oname so it can be manipulated later index : int we can have more than one molecule on the screen at one time. index tells which one to draw. They are viewed at the same time, so should be visually distinct, e.g. different colors. accepted values are 1 or 2 Notes ----- the implementation here is a bit hacky. we create a temporary xyz file from coords and load the molecule in pymol from this file. """ # pymol is imported here so you can do, e.g. basinhopping without installing pymol import pymol # create the temporary file suffix = ".xyz" f = tempfile.NamedTemporaryFile(mode="w", suffix=suffix) fname = f.name # write the coords into the xyz file from pele.mindist import CoMToOrigin for coords in coordslist: coords = CoMToOrigin(coords.copy()) write_xyz(f, coords, title=oname, atomtypes=["LA"]) f.flush() # load the molecule from the temporary file pymol.cmd.load(fname) # get name of the object just create and change it to oname objects = pymol.cmd.get_object_list() objectname = objects[-1] pymol.cmd.set_name(objectname, oname) # set the representation pymol.cmd.hide("everything", oname) pymol.cmd.show("spheres", oname) # set the color according to index if index == 1: pymol.cmd.color("red", oname) else: pymol.cmd.color("gray", oname) # # only for testing below here # def run(): # pragma: no cover # create the system object sys = LJCluster(15) # create a database db = sys.create_database() # do a short basinhopping run bh = sys.get_basinhopping(database=db, outstream=None) while len(db.minima()) < 2: bh.run(100) # try to connect the lowest two minima min1, min2 = db.minima()[:2] connect = sys.get_double_ended_connect(min1, min2, db) connect.connect() if __name__ == "__main__": run()
gpl-3.0
abhattad4/Digi-Menu
digimenu2/tests/template_tests/filter_tests/test_floatformat.py
345
4480
# -*- coding: utf-8 -*- from __future__ import unicode_literals from decimal import Decimal, localcontext from unittest import expectedFailure from django.template.defaultfilters import floatformat from django.test import SimpleTestCase from django.utils import six from django.utils.safestring import mark_safe from ..utils import setup class FloatformatTests(SimpleTestCase): @setup({'floatformat01': '{% autoescape off %}{{ a|floatformat }} {{ b|floatformat }}{% endautoescape %}'}) def test_floatformat01(self): output = self.engine.render_to_string('floatformat01', {"a": "1.42", "b": mark_safe("1.42")}) self.assertEqual(output, "1.4 1.4") @setup({'floatformat02': '{{ a|floatformat }} {{ b|floatformat }}'}) def test_floatformat02(self): output = self.engine.render_to_string('floatformat02', {"a": "1.42", "b": mark_safe("1.42")}) self.assertEqual(output, "1.4 1.4") class FunctionTests(SimpleTestCase): def test_inputs(self): self.assertEqual(floatformat(7.7), '7.7') self.assertEqual(floatformat(7.0), '7') self.assertEqual(floatformat(0.7), '0.7') self.assertEqual(floatformat(0.07), '0.1') self.assertEqual(floatformat(0.007), '0.0') self.assertEqual(floatformat(0.0), '0') self.assertEqual(floatformat(7.7, 3), '7.700') self.assertEqual(floatformat(6.000000, 3), '6.000') self.assertEqual(floatformat(6.200000, 3), '6.200') self.assertEqual(floatformat(6.200000, -3), '6.200') self.assertEqual(floatformat(13.1031, -3), '13.103') self.assertEqual(floatformat(11.1197, -2), '11.12') self.assertEqual(floatformat(11.0000, -2), '11') self.assertEqual(floatformat(11.000001, -2), '11.00') self.assertEqual(floatformat(8.2798, 3), '8.280') self.assertEqual(floatformat(5555.555, 2), '5555.56') self.assertEqual(floatformat(001.3000, 2), '1.30') self.assertEqual(floatformat(0.12345, 2), '0.12') self.assertEqual(floatformat(Decimal('555.555'), 2), '555.56') self.assertEqual(floatformat(Decimal('09.000')), '9') self.assertEqual(floatformat('foo'), '') self.assertEqual(floatformat(13.1031, 'bar'), '13.1031') self.assertEqual(floatformat(18.125, 2), '18.13') self.assertEqual(floatformat('foo', 'bar'), '') self.assertEqual(floatformat('¿Cómo esta usted?'), '') self.assertEqual(floatformat(None), '') def test_zero_values(self): """ Check that we're not converting to scientific notation. """ self.assertEqual(floatformat(0, 6), '0.000000') self.assertEqual(floatformat(0, 7), '0.0000000') self.assertEqual(floatformat(0, 10), '0.0000000000') self.assertEqual(floatformat(0.000000000000000000015, 20), '0.00000000000000000002') def test_infinity(self): pos_inf = float(1e30000) self.assertEqual(floatformat(pos_inf), six.text_type(pos_inf)) neg_inf = float(-1e30000) self.assertEqual(floatformat(neg_inf), six.text_type(neg_inf)) nan = pos_inf / pos_inf self.assertEqual(floatformat(nan), six.text_type(nan)) def test_float_dunder_method(self): class FloatWrapper(object): def __init__(self, value): self.value = value def __float__(self): return self.value self.assertEqual(floatformat(FloatWrapper(11.000001), -2), '11.00') def test_low_decimal_precision(self): """ #15789 """ with localcontext() as ctx: ctx.prec = 2 self.assertEqual(floatformat(1.2345, 2), '1.23') self.assertEqual(floatformat(15.2042, -3), '15.204') self.assertEqual(floatformat(1.2345, '2'), '1.23') self.assertEqual(floatformat(15.2042, '-3'), '15.204') self.assertEqual(floatformat(Decimal('1.2345'), 2), '1.23') self.assertEqual(floatformat(Decimal('15.2042'), -3), '15.204') def test_many_zeroes(self): self.assertEqual(floatformat(1.00000000000000015, 16), '1.0000000000000002') if six.PY2: # The above test fails because of Python 2's float handling. Floats # with many zeroes after the decimal point should be passed in as # another type such as unicode or Decimal. test_many_zeroes = expectedFailure(test_many_zeroes)
bsd-3-clause
Matthie456/Bon_DenDuijn
SpatialDecision/external/networkx/drawing/layout.py
11
18390
""" ****** Layout ****** Node positioning algorithms for graph drawing. """ # Copyright (C) 2004-2015 by # Aric Hagberg <[email protected]> # Dan Schult <[email protected]> # Pieter Swart <[email protected]> # All rights reserved. # BSD license. import collections import networkx as nx __author__ = """Aric Hagberg ([email protected])\nDan Schult([email protected])""" __all__ = ['circular_layout', 'random_layout', 'shell_layout', 'spring_layout', 'spectral_layout', 'fruchterman_reingold_layout'] def process_params(G, center, dim): # Some boilerplate code. import numpy as np if not isinstance(G, nx.Graph): empty_graph = nx.Graph() empty_graph.add_nodes_from(G) G = empty_graph if center is None: center = np.zeros(dim) else: center = np.asarray(center) if len(center) != dim: msg = "length of center coordinates must match dimension of layout" raise ValueError(msg) return G, center def random_layout(G, dim=2, center=None): """Position nodes uniformly at random in the unit square. For every node, a position is generated by choosing each of dim coordinates uniformly at random on the interval [0.0, 1.0). NumPy (http://scipy.org) is required for this function. Parameters ---------- G : NetworkX graph or list of nodes A position will be assigned to every node in G. dim : int Dimension of layout. center : array-like or None Coordinate pair around which to center the layout. Returns ------- pos : dict A dictionary of positions keyed by node Examples -------- >>> G = nx.lollipop_graph(4, 3) >>> pos = nx.random_layout(G) """ import numpy as np G, center = process_params(G, center, dim) shape = (len(G), dim) pos = np.random.random(shape) + center pos = pos.astype(np.float32) pos = dict(zip(G, pos)) return pos def circular_layout(G, dim=2, scale=1, center=None): # dim=2 only """Position nodes on a circle. Parameters ---------- G : NetworkX graph or list of nodes dim : int Dimension of layout, currently only dim=2 is supported scale : float Scale factor for positions center : array-like or None Coordinate pair around which to center the layout. Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> pos=nx.circular_layout(G) Notes ------ This algorithm currently only works in two dimensions and does not try to minimize edge crossings. """ import numpy as np G, center = process_params(G, center, dim) if len(G) == 0: pos = {} elif len(G) == 1: pos = {G.nodes()[0]: center} else: # Discard the extra angle since it matches 0 radians. theta = np.linspace(0, 1, len(G) + 1)[:-1] * 2 * np.pi theta = theta.astype(np.float32) pos = np.column_stack([np.cos(theta), np.sin(theta)]) pos = _rescale_layout(pos, scale=scale) + center pos = dict(zip(G, pos)) return pos def shell_layout(G, nlist=None, dim=2, scale=1, center=None): """Position nodes in concentric circles. Parameters ---------- G : NetworkX graph or list of nodes nlist : list of lists List of node lists for each shell. dim : int Dimension of layout, currently only dim=2 is supported scale : float Scale factor for positions center : array-like or None Coordinate pair around which to center the layout. Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G = nx.path_graph(4) >>> shells = [[0], [1,2,3]] >>> pos = nx.shell_layout(G, shells) Notes ------ This algorithm currently only works in two dimensions and does not try to minimize edge crossings. """ import numpy as np G, center = process_params(G, center, dim) if len(G) == 0: return {} elif len(G) == 1: return {G.nodes()[0]: center} if nlist is None: # draw the whole graph in one shell nlist = [list(G.nodes())] if len(nlist[0]) == 1: # single node at center radius = 0.0 else: # else start at r=1 radius = 1.0 npos={} for nodes in nlist: # Discard the extra angle since it matches 0 radians. theta = np.linspace(0, 1, len(nodes) + 1)[:-1] * 2 * np.pi theta = theta.astype(np.float32) pos = np.column_stack([np.cos(theta), np.sin(theta)]) pos = _rescale_layout(pos, scale=scale * radius / len(nlist)) + center npos.update(zip(nodes, pos)) radius += 1.0 return npos def fruchterman_reingold_layout(G,dim=2,k=None, pos=None, fixed=None, iterations=50, weight='weight', scale=1.0, center=None): """Position nodes using Fruchterman-Reingold force-directed algorithm. Parameters ---------- G : NetworkX graph or list of nodes dim : int Dimension of layout k : float (default=None) Optimal distance between nodes. If None the distance is set to 1/sqrt(n) where n is the number of nodes. Increase this value to move nodes farther apart. pos : dict or None optional (default=None) Initial positions for nodes as a dictionary with node as keys and values as a list or tuple. If None, then use random initial positions. fixed : list or None optional (default=None) Nodes to keep fixed at initial position. iterations : int optional (default=50) Number of iterations of spring-force relaxation weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for the edge weight. If None, then all edge weights are 1. scale : float (default=1.0) Scale factor for positions. The nodes are positioned in a box of size [0,scale] x [0,scale]. center : array-like or None Coordinate pair around which to center the layout. Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> pos=nx.spring_layout(G) # The same using longer function name >>> pos=nx.fruchterman_reingold_layout(G) """ import numpy as np G, center = process_params(G, center, dim) if fixed is not None: nfixed = dict(zip(G, range(len(G)))) fixed = np.asarray([nfixed[v] for v in fixed]) if pos is not None: # Determine size of existing domain to adjust initial positions dom_size = max(flatten(pos.values())) shape = (len(G), dim) pos_arr = np.random.random(shape) * dom_size + center for i,n in enumerate(G): if n in pos: pos_arr[i] = np.asarray(pos[n]) else: pos_arr=None if len(G) == 0: return {} if len(G) == 1: return {G.nodes()[0]: center} try: # Sparse matrix if len(G) < 500: # sparse solver for large graphs raise ValueError A = nx.to_scipy_sparse_matrix(G,weight=weight,dtype='f') if k is None and fixed is not None: # We must adjust k by domain size for layouts that are not near 1x1 nnodes,_ = A.shape k = dom_size / np.sqrt(nnodes) pos = _sparse_fruchterman_reingold(A, dim, k, pos_arr, fixed, iterations) except: A = nx.to_numpy_matrix(G,weight=weight) if k is None and fixed is not None: # We must adjust k by domain size for layouts that are not near 1x1 nnodes,_ = A.shape k = dom_size / np.sqrt(nnodes) pos = _fruchterman_reingold(A, dim, k, pos_arr, fixed, iterations) if fixed is None: pos = _rescale_layout(pos, scale=scale) + center pos = dict(zip(G,pos)) return pos spring_layout=fruchterman_reingold_layout def _fruchterman_reingold(A, dim=2, k=None, pos=None, fixed=None, iterations=50): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() try: import numpy as np except ImportError: raise ImportError("_fruchterman_reingold() requires numpy: http://scipy.org/ ") try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError( "fruchterman_reingold() takes an adjacency matrix as input") A=np.asarray(A) # make sure we have an array instead of a matrix if pos==None: # random initial positions pos=np.asarray(np.random.random((nnodes,dim)),dtype=A.dtype) else: # make sure positions are of same type as matrix pos=pos.astype(A.dtype) # optimal distance between nodes if k is None: k=np.sqrt(1.0/nnodes) # the initial "temperature" is about .1 of domain area (=1x1) # this is the largest step allowed in the dynamics. # We need to calculate this in case our fixed positions force our domain # to be much bigger than 1x1 t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1]))*0.1 # simple cooling scheme. # linearly step down by dt on each iteration so last iteration is size dt. dt=t/float(iterations+1) delta = np.zeros((pos.shape[0],pos.shape[0],pos.shape[1]),dtype=A.dtype) # the inscrutable (but fast) version # this is still O(V^2) # could use multilevel methods to speed this up significantly for iteration in range(iterations): # matrix of difference between points for i in range(pos.shape[1]): delta[:,:,i]= pos[:,i,None]-pos[:,i] # distance between points distance=np.sqrt((delta**2).sum(axis=-1)) # enforce minimum distance of 0.01 distance=np.where(distance<0.01,0.01,distance) # displacement "force" displacement=np.transpose(np.transpose(delta)*\ (k*k/distance**2-A*distance/k))\ .sum(axis=1) # update positions length=np.sqrt((displacement**2).sum(axis=1)) length=np.where(length<0.01,0.1,length) delta_pos=np.transpose(np.transpose(displacement)*t/length) if fixed is not None: # don't change positions of fixed nodes delta_pos[fixed]=0.0 pos+=delta_pos # cool temperature t-=dt return pos def _sparse_fruchterman_reingold(A, dim=2, k=None, pos=None, fixed=None, iterations=50): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() # Sparse version try: import numpy as np except ImportError: raise ImportError("_sparse_fruchterman_reingold() requires numpy: http://scipy.org/ ") try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError( "fruchterman_reingold() takes an adjacency matrix as input") try: from scipy.sparse import spdiags,coo_matrix except ImportError: raise ImportError("_sparse_fruchterman_reingold() scipy numpy: http://scipy.org/ ") # make sure we have a LIst of Lists representation try: A=A.tolil() except: A=(coo_matrix(A)).tolil() if pos==None: # random initial positions pos=np.asarray(np.random.random((nnodes,dim)),dtype=A.dtype) else: # make sure positions are of same type as matrix pos=pos.astype(A.dtype) # no fixed nodes if fixed==None: fixed=[] # optimal distance between nodes if k is None: k=np.sqrt(1.0/nnodes) # the initial "temperature" is about .1 of domain area (=1x1) # this is the largest step allowed in the dynamics. t=0.1 # simple cooling scheme. # linearly step down by dt on each iteration so last iteration is size dt. dt=t/float(iterations+1) displacement=np.zeros((dim,nnodes)) for iteration in range(iterations): displacement*=0 # loop over rows for i in range(A.shape[0]): if i in fixed: continue # difference between this row's node position and all others delta=(pos[i]-pos).T # distance between points distance=np.sqrt((delta**2).sum(axis=0)) # enforce minimum distance of 0.01 distance=np.where(distance<0.01,0.01,distance) # the adjacency matrix row Ai=np.asarray(A.getrowview(i).toarray()) # displacement "force" displacement[:,i]+=\ (delta*(k*k/distance**2-Ai*distance/k)).sum(axis=1) # update positions length=np.sqrt((displacement**2).sum(axis=0)) length=np.where(length<0.01,0.1,length) pos+=(displacement*t/length).T # cool temperature t-=dt return pos def spectral_layout(G, dim=2, weight='weight', scale=1, center=None): """Position nodes using the eigenvectors of the graph Laplacian. Parameters ---------- G : NetworkX graph or list of nodes dim : int Dimension of layout weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for the edge weight. If None, then all edge weights are 1. scale : float Scale factor for positions center : array-like or None Coordinate pair around which to center the layout. Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> pos=nx.spectral_layout(G) Notes ----- Directed graphs will be considered as undirected graphs when positioning the nodes. For larger graphs (>500 nodes) this will use the SciPy sparse eigenvalue solver (ARPACK). """ # handle some special cases that break the eigensolvers import numpy as np G, center = process_params(G, center, dim) if len(G) <= 2: if len(G) == 0: pos = np.array([]) elif len(G) == 1: pos = np.array([center]) else: pos = np.array([np.zeros(dim), np.array(center)*2.0]) return dict(zip(G,pos)) try: # Sparse matrix if len(G)< 500: # dense solver is faster for small graphs raise ValueError A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='d') # Symmetrize directed graphs if G.is_directed(): A = A + np.transpose(A) pos = _sparse_spectral(A,dim) except (ImportError, ValueError): # Dense matrix A = nx.to_numpy_matrix(G, weight=weight) # Symmetrize directed graphs if G.is_directed(): A = A + np.transpose(A) pos = _spectral(A, dim) pos = _rescale_layout(pos, scale) + center pos = dict(zip(G,pos)) return pos def _spectral(A, dim=2): # Input adjacency matrix A # Uses dense eigenvalue solver from numpy try: import numpy as np except ImportError: raise ImportError("spectral_layout() requires numpy: http://scipy.org/ ") try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError(\ "spectral() takes an adjacency matrix as input") # form Laplacian matrix # make sure we have an array instead of a matrix A=np.asarray(A) I=np.identity(nnodes,dtype=A.dtype) D=I*np.sum(A,axis=1) # diagonal of degrees L=D-A eigenvalues,eigenvectors=np.linalg.eig(L) # sort and keep smallest nonzero index=np.argsort(eigenvalues)[1:dim+1] # 0 index is zero eigenvalue return np.real(eigenvectors[:,index]) def _sparse_spectral(A,dim=2): # Input adjacency matrix A # Uses sparse eigenvalue solver from scipy # Could use multilevel methods here, see Koren "On spectral graph drawing" try: import numpy as np from scipy.sparse import spdiags except ImportError: raise ImportError("_sparse_spectral() requires scipy & numpy: http://scipy.org/ ") try: from scipy.sparse.linalg.eigen import eigsh except ImportError: # scipy <0.9.0 names eigsh differently from scipy.sparse.linalg import eigen_symmetric as eigsh try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError(\ "sparse_spectral() takes an adjacency matrix as input") # form Laplacian matrix data=np.asarray(A.sum(axis=1).T) D=spdiags(data,0,nnodes,nnodes) L=D-A k=dim+1 # number of Lanczos vectors for ARPACK solver.What is the right scaling? ncv=max(2*k+1,int(np.sqrt(nnodes))) # return smallest k eigenvalues and eigenvectors eigenvalues,eigenvectors=eigsh(L,k,which='SM',ncv=ncv) index=np.argsort(eigenvalues)[1:k] # 0 index is zero eigenvalue return np.real(eigenvectors[:,index]) def _rescale_layout(pos,scale=1): # rescale to (-scale,scale) in all axes # shift origin to (0,0) lim=0 # max coordinate for all axes for i in range(pos.shape[1]): pos[:,i]-=pos[:,i].mean() lim=max(pos[:,i].max(),lim) # rescale to (-scale,scale) in all directions, preserves aspect for i in range(pos.shape[1]): pos[:,i]*=scale/lim return pos # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") def flatten(l): try: bs = basestring except NameError: # Py3k bs = str for el in l: if isinstance(el, collections.Iterable) and not isinstance(el, bs): for sub in flatten(el): yield sub else: yield el
gpl-2.0
Crystalnix/house-of-life-chromium
third_party/mesa/MesaLib/src/gallium/tests/python/retrace/model.py
32
5631
#!/usr/bin/env python ########################################################################## # # Copyright 2008 Tungsten Graphics, Inc., Cedar Park, Texas. # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sub license, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice (including the # next paragraph) shall be included in all copies or substantial portions # of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. # IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR # ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # ########################################################################## '''Trace data model.''' import sys import string import format try: from cStringIO import StringIO except ImportError: from StringIO import StringIO class Node: def visit(self, visitor): raise NotImplementedError def __str__(self): stream = StringIO() formatter = format.DefaultFormatter(stream) pretty_printer = PrettyPrinter(formatter) self.visit(pretty_printer) return stream.getvalue() class Literal(Node): def __init__(self, value): self.value = value def visit(self, visitor): visitor.visit_literal(self) class NamedConstant(Node): def __init__(self, name): self.name = name def visit(self, visitor): visitor.visit_named_constant(self) class Array(Node): def __init__(self, elements): self.elements = elements def visit(self, visitor): visitor.visit_array(self) class Struct(Node): def __init__(self, name, members): self.name = name self.members = members def visit(self, visitor): visitor.visit_struct(self) class Pointer(Node): def __init__(self, address): self.address = address def visit(self, visitor): visitor.visit_pointer(self) class Call: def __init__(self, no, klass, method, args, ret): self.no = no self.klass = klass self.method = method self.args = args self.ret = ret def visit(self, visitor): visitor.visit_call(self) class Trace: def __init__(self, calls): self.calls = calls def visit(self, visitor): visitor.visit_trace(self) class Visitor: def visit_literal(self, node): raise NotImplementedError def visit_named_constant(self, node): raise NotImplementedError def visit_array(self, node): raise NotImplementedError def visit_struct(self, node): raise NotImplementedError def visit_pointer(self, node): raise NotImplementedError def visit_call(self, node): raise NotImplementedError def visit_trace(self, node): raise NotImplementedError class PrettyPrinter: def __init__(self, formatter): self.formatter = formatter def visit_literal(self, node): if isinstance(node.value, basestring): if len(node.value) >= 4096 or node.value.strip(string.printable): self.formatter.text('...') return self.formatter.literal('"' + node.value + '"') return self.formatter.literal(repr(node.value)) def visit_named_constant(self, node): self.formatter.literal(node.name) def visit_array(self, node): self.formatter.text('{') sep = '' for value in node.elements: self.formatter.text(sep) value.visit(self) sep = ', ' self.formatter.text('}') def visit_struct(self, node): self.formatter.text('{') sep = '' for name, value in node.members: self.formatter.text(sep) self.formatter.variable(name) self.formatter.text(' = ') value.visit(self) sep = ', ' self.formatter.text('}') def visit_pointer(self, node): self.formatter.address(node.address) def visit_call(self, node): self.formatter.text('%s ' % node.no) if node.klass is not None: self.formatter.function(node.klass + '::' + node.method) else: self.formatter.function(node.method) self.formatter.text('(') sep = '' for name, value in node.args: self.formatter.text(sep) self.formatter.variable(name) self.formatter.text(' = ') value.visit(self) sep = ', ' self.formatter.text(')') if node.ret is not None: self.formatter.text(' = ') node.ret.visit(self) def visit_trace(self, node): for call in node.calls: call.visit(self) self.formatter.newline()
bsd-3-clause
XuQiufeng/kernel_msm
Documentation/target/tcm_mod_builder.py
4981
41422
#!/usr/bin/python # The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD # # Copyright (c) 2010 Rising Tide Systems # Copyright (c) 2010 Linux-iSCSI.org # # Author: [email protected] # import os, sys import subprocess as sub import string import re import optparse tcm_dir = "" fabric_ops = [] fabric_mod_dir = "" fabric_mod_port = "" fabric_mod_init_port = "" def tcm_mod_err(msg): print msg sys.exit(1) def tcm_mod_create_module_subdir(fabric_mod_dir_var): if os.path.isdir(fabric_mod_dir_var) == True: return 1 print "Creating fabric_mod_dir: " + fabric_mod_dir_var ret = os.mkdir(fabric_mod_dir_var) if ret: tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var) return def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n" buf += " u64 nport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n" buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* FC lport target portal group tag for TCM */\n" buf += " u16 lport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n" buf += " struct " + fabric_mod_name + "_lport *lport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_lport {\n" buf += " /* SCSI protocol the lport is providing */\n" buf += " u8 lport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n" buf += " u64 lport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Target Lport */\n" buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n" buf += " struct se_wwn lport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "lport" fabric_mod_init_port = "nport" return def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n" buf += " u64 iport_wwpn;\n" buf += " /* ASCII formatted WWPN for Sas Initiator port */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* SAS port target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for SAS Target port */\n" buf += " u64 tport_wwpn;\n" buf += " /* ASCII formatted WWPN for SAS Target port */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* ASCII formatted InitiatorName */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* iSCSI target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* ASCII formatted TargetName for IQN */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name): if proto_ident == "FC": tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "SAS": tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "iSCSI": tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name) else: print "Unsupported proto_ident: " + proto_ident sys.exit(1) return def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#include <linux/module.h>\n" buf += "#include <linux/moduleparam.h>\n" buf += "#include <linux/version.h>\n" buf += "#include <generated/utsrelease.h>\n" buf += "#include <linux/utsname.h>\n" buf += "#include <linux/init.h>\n" buf += "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/configfs.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_fabric.h>\n" buf += "#include <target/target_core_fabric_configfs.h>\n" buf += "#include <target/target_core_configfs.h>\n" buf += "#include <target/configfs_macros.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "/* Local pointer to allocated TCM configfs fabric module */\n" buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n" buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n" buf += " u32 nexus_depth;\n\n" buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n" buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n" buf += " if (!se_nacl_new)\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n" buf += " nexus_depth = 1;\n" buf += " /*\n" buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n" buf += " * when converting a NodeACL from demo mode -> explict\n" buf += " */\n" buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n" buf += " name, nexus_depth);\n" buf += " if (IS_ERR(se_nacl)) {\n" buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n" buf += " return se_nacl;\n" buf += " }\n" buf += " /*\n" buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n" buf += " */\n" buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return se_nacl;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n" buf += " kfree(nacl);\n" buf += "}\n\n" buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n" buf += " struct se_wwn *wwn,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n" buf += " struct " + fabric_mod_name + "_tpg *tpg;\n" buf += " unsigned long tpgt;\n" buf += " int ret;\n\n" buf += " if (strstr(name, \"tpgt_\") != name)\n" buf += " return ERR_PTR(-EINVAL);\n" buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n" buf += " return ERR_PTR(-EINVAL);\n\n" buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n" buf += " if (!tpg) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n" buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n" buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n" buf += " &tpg->se_tpg, (void *)tpg,\n" buf += " TRANSPORT_TPG_TYPE_NORMAL);\n" buf += " if (ret < 0) {\n" buf += " kfree(tpg);\n" buf += " return NULL;\n" buf += " }\n" buf += " return &tpg->se_tpg;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n" buf += " core_tpg_deregister(se_tpg);\n" buf += " kfree(tpg);\n" buf += "}\n\n" buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n\n" buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n\n" buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n" buf += " if (!" + fabric_mod_port + ") {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n" buf += " kfree(" + fabric_mod_port + ");\n" buf += "}\n\n" buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " char *page)\n" buf += "{\n" buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += "}\n\n" buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n" buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n" buf += " &" + fabric_mod_name + "_wwn_version.attr,\n" buf += " NULL,\n" buf += "};\n\n" buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n" buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n" buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n" buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n" buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n" buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n" buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n" buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n" buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n" buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n" buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n" buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n" buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n" buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n" buf += " .close_session = " + fabric_mod_name + "_close_session,\n" buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n" buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n" buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n" buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n" buf += " .sess_get_initiator_sid = NULL,\n" buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n" buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n" buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n" buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n" buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n" buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n" buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n" buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n" buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n" buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n" buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n" buf += " /*\n" buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n" buf += " */\n" buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n" buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n" buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n" buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n" buf += " .fabric_post_link = NULL,\n" buf += " .fabric_pre_unlink = NULL,\n" buf += " .fabric_make_np = NULL,\n" buf += " .fabric_drop_np = NULL,\n" buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n" buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n" buf += "};\n\n" buf += "static int " + fabric_mod_name + "_register_configfs(void)\n" buf += "{\n" buf += " struct target_fabric_configfs *fabric;\n" buf += " int ret;\n\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += " /*\n" buf += " * Register the top level struct config_item_type with TCM core\n" buf += " */\n" buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n" buf += " if (IS_ERR(fabric)) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n" buf += " return PTR_ERR(fabric);\n" buf += " }\n" buf += " /*\n" buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n" buf += " */\n" buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n" buf += " /*\n" buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n" buf += " */\n" buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n" buf += " /*\n" buf += " * Register the fabric for use within TCM\n" buf += " */\n" buf += " ret = target_fabric_configfs_register(fabric);\n" buf += " if (ret < 0) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n" buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n" buf += " return ret;\n" buf += " }\n" buf += " /*\n" buf += " * Setup our local pointer to *fabric\n" buf += " */\n" buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n" buf += "{\n" buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n" buf += " return;\n\n" buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n" buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += "};\n\n" buf += "static int __init " + fabric_mod_name + "_init(void)\n" buf += "{\n" buf += " int ret;\n\n" buf += " ret = " + fabric_mod_name + "_register_configfs();\n" buf += " if (ret < 0)\n" buf += " return ret;\n\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void __exit " + fabric_mod_name + "_exit(void)\n" buf += "{\n" buf += " " + fabric_mod_name + "_deregister_configfs();\n" buf += "};\n\n" buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n" buf += "MODULE_LICENSE(\"GPL\");\n" buf += "module_init(" + fabric_mod_name + "_init);\n" buf += "module_exit(" + fabric_mod_name + "_exit);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_scan_fabric_ops(tcm_dir): fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h" print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api process_fo = 0; p = open(fabric_ops_api, 'r') line = p.readline() while line: if process_fo == 0 and re.search('struct target_core_fabric_ops {', line): line = p.readline() continue if process_fo == 0: process_fo = 1; line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) continue line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) p.close() return def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" bufi = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h" print "Writing file: " + fi pi = open(fi, 'w') if not pi: tcm_mod_err("Unable to open file: " + fi) buf = "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/list.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n" buf += "#include <scsi/scsi.h>\n" buf += "#include <scsi/scsi_host.h>\n" buf += "#include <scsi/scsi_device.h>\n" buf += "#include <scsi/scsi_cmnd.h>\n" buf += "#include <scsi/libfc.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_fabric.h>\n" buf += "#include <target/target_core_configfs.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n" buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n" total_fabric_ops = len(fabric_ops) i = 0 while i < total_fabric_ops: fo = fabric_ops[i] i += 1 # print "fabric_ops: " + fo if re.search('get_fabric_name', fo): buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n" buf += "{\n" buf += " return \"" + fabric_mod_name[4:] + "\";\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n" continue if re.search('get_fabric_proto_ident', fo): buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " u8 proto_id;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" buf += " }\n\n" buf += " return proto_id;\n" buf += "}\n\n" bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n" if re.search('get_wwn', fo): buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n" if re.search('get_tag', fo): buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " return tpg->" + fabric_mod_port + "_tpgt;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n" if re.search('get_default_depth', fo): buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n" if re.search('get_pr_transport_id\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code,\n" buf += " unsigned char *buf)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *, unsigned char *);\n" if re.search('get_pr_transport_id_len\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *);\n" if re.search('parse_pr_out_transport_id\)\(', fo): buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " const char *buf,\n" buf += " u32 *out_tid_len,\n" buf += " char **port_nexus_ptr)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " char *tid = NULL;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" buf += " }\n\n" buf += " return tid;\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n" bufi += " const char *, u32 *, char **);\n" if re.search('alloc_fabric_acl\)\(', fo): buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n" buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n" buf += " if (!nacl) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n" buf += " return NULL;\n" buf += " }\n\n" buf += " return &nacl->se_node_acl;\n" buf += "}\n\n" bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n" if re.search('release_fabric_acl\)\(', fo): buf += "void " + fabric_mod_name + "_release_fabric_acl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " kfree(nacl);\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n" bufi += " struct se_node_acl *);\n" if re.search('tpg_get_inst_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n" if re.search('\*release_cmd\)\(', fo): buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n" if re.search('shutdown_session\)\(', fo): buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n" if re.search('close_session\)\(', fo): buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n" if re.search('stop_session\)\(', fo): buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n" if re.search('fall_back_to_erl0\)\(', fo): buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n" if re.search('sess_logged_in\)\(', fo): buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n" if re.search('sess_get_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n" if re.search('write_pending\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n" if re.search('write_pending_status\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n" if re.search('set_default_node_attributes\)\(', fo): buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n" if re.search('get_task_tag\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n" if re.search('get_cmd_state\)\(', fo): buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n" if re.search('queue_data_in\)\(', fo): buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n" if re.search('queue_status\)\(', fo): buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n" if re.search('queue_tm_rsp\)\(', fo): buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n" if re.search('get_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n" if re.search('set_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n" if re.search('is_state_remove\)\(', fo): buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() ret = pi.write(bufi) if ret: tcm_mod_err("Unable to write fi: " + fi) pi.close() return def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Makefile" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n" buf += " " + fabric_mod_name + "_configfs.o\n" buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Kconfig" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf = "config " + fabric_mod_name.upper() + "\n" buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n" buf += " depends on TARGET_CORE && CONFIGFS_FS\n" buf += " default n\n" buf += " ---help---\n" buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name): buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n" kbuild = tcm_dir + "/drivers/target/Makefile" f = open(kbuild, 'a') f.write(buf) f.close() return def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name): buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n" kconfig = tcm_dir + "/drivers/target/Kconfig" f = open(kconfig, 'a') f.write(buf) f.close() return def main(modname, proto_ident): # proto_ident = "FC" # proto_ident = "SAS" # proto_ident = "iSCSI" tcm_dir = os.getcwd(); tcm_dir += "/../../" print "tcm_dir: " + tcm_dir fabric_mod_name = modname fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name print "Set fabric_mod_name: " + fabric_mod_name print "Set fabric_mod_dir: " + fabric_mod_dir print "Using proto_ident: " + proto_ident if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI": print "Unsupported proto_ident: " + proto_ident sys.exit(1) ret = tcm_mod_create_module_subdir(fabric_mod_dir) if ret: print "tcm_mod_create_module_subdir() failed because module already exists!" sys.exit(1) tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_scan_fabric_ops(tcm_dir) tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name) tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kbuild(tcm_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kconfig(tcm_dir, fabric_mod_name) return parser = optparse.OptionParser() parser.add_option('-m', '--modulename', help='Module name', dest='modname', action='store', nargs=1, type='string') parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident', action='store', nargs=1, type='string') (opts, args) = parser.parse_args() mandatories = ['modname', 'protoident'] for m in mandatories: if not opts.__dict__[m]: print "mandatory option is missing\n" parser.print_help() exit(-1) if __name__ == "__main__": main(str(opts.modname), opts.protoident)
gpl-2.0
kifcaliph/odoo
openerp/report/render/rml2pdf/__init__.py
381
1101
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from trml2pdf import parseString, parseNode # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
zefie/kernel_moto_shamu
tools/perf/scripts/python/check-perf-trace.py
11214
2503
# perf script event handlers, generated by perf script -g python # (c) 2010, Tom Zanussi <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # This script tests basic functionality such as flag and symbol # strings, common_xxx() calls back into perf, begin, end, unhandled # events, etc. Basically, if this script runs successfully and # displays expected results, Python scripting support should be ok. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Core import * from perf_trace_context import * unhandled = autodict() def trace_begin(): print "trace_begin" pass def trace_end(): print_unhandled() def irq__softirq_entry(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, vec): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "vec=%s\n" % \ (symbol_str("irq__softirq_entry", "vec", vec)), def kmem__kmalloc(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, call_site, ptr, bytes_req, bytes_alloc, gfp_flags): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "call_site=%u, ptr=%u, bytes_req=%u, " \ "bytes_alloc=%u, gfp_flags=%s\n" % \ (call_site, ptr, bytes_req, bytes_alloc, flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)), def trace_unhandled(event_name, context, event_fields_dict): try: unhandled[event_name] += 1 except TypeError: unhandled[event_name] = 1 def print_header(event_name, cpu, secs, nsecs, pid, comm): print "%-20s %5u %05u.%09u %8u %-20s " % \ (event_name, cpu, secs, nsecs, pid, comm), # print trace fields not included in handler args def print_uncommon(context): print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \ % (common_pc(context), trace_flag_str(common_flags(context)), \ common_lock_depth(context)) def print_unhandled(): keys = unhandled.keys() if not keys: return print "\nunhandled events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "-----------"), for event_name in keys: print "%-40s %10d\n" % (event_name, unhandled[event_name])
gpl-2.0
shaistaansari/django
django/db/backends/oracle/features.py
356
2345
from django.db.backends.base.features import BaseDatabaseFeatures from django.db.utils import InterfaceError try: import pytz except ImportError: pytz = None class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () interprets_empty_strings_as_nulls = True uses_savepoints = True has_select_for_update = True has_select_for_update_nowait = True can_return_id_from_insert = True allow_sliced_subqueries = False supports_subqueries_in_group_by = False supports_transactions = True supports_timezones = False has_zoneinfo_database = pytz is not None supports_bitwise_or = False has_native_duration_field = True can_defer_constraint_checks = True supports_partially_nullable_unique_constraints = False truncates_names = True has_bulk_insert = True supports_tablespaces = True supports_sequence_reset = False can_introspect_default = False # Pending implementation by an interested person. can_introspect_max_length = False can_introspect_time_field = False atomic_transactions = False supports_combined_alters = False nulls_order_largest = True requires_literal_defaults = True closed_cursor_error_class = InterfaceError bare_select_suffix = " FROM DUAL" uppercases_column_names = True # select for update with limit can be achieved on Oracle, but not with the current backend. supports_select_for_update_with_limit = False def introspected_boolean_field_type(self, field=None, created_separately=False): """ Some versions of Oracle -- we've seen this on 11.2.0.1 and suspect it goes back -- have a weird bug where, when an integer column is added to an existing table with a default, its precision is later reported on introspection as 0, regardless of the real precision. For Django introspection, this means that such columns are reported as IntegerField even if they are really BigIntegerField or BooleanField. The bug is solved in Oracle 11.2.0.2 and up. """ if self.connection.oracle_full_version < '11.2.0.2' and field and field.has_default() and created_separately: return 'IntegerField' return super(DatabaseFeatures, self).introspected_boolean_field_type(field, created_separately)
bsd-3-clause
hernandito/SickRage
lib/hachoir_parser/misc/msoffice.py
74
31094
""" Parsers for the different streams and fragments found in an OLE2 file. Documents: - goffice source code - Microsoft Office PowerPoint 97-2007 Binary File Format (.ppt) Specification http://download.microsoft.com/download/0/B/E/0BE8BDD7-E5E8-422A-ABFD-4342ED7AD886/PowerPoint97-2007BinaryFileFormat(ppt)Specification.pdf Author: Robert Xiao, Victor Stinner Creation: 8 january 2005 """ from hachoir_core.field import (SubFile, FieldSet, UInt8, UInt16, Int32, UInt32, Enum, String, CString, Bits, RawBytes) from hachoir_core.text_handler import textHandler, hexadecimal from hachoir_parser.misc.ole2_util import OLE2FragmentParser, RawParser from hachoir_core.stream import StringInputStream from hachoir_parser.misc.msoffice_summary import Summary, CompObj from hachoir_parser.misc.word_doc import WordDocumentParser, WordTableParser class RootEntry(OLE2FragmentParser): ENDIAN_CHECK=False def createFields(self): for index, property in enumerate(self.ole2.properties): if index == 0: continue try: name,parser = PROPERTY_NAME[property["name"].value] except LookupError: name = property.name+"content" parser = RawParser for field in self.parseProperty(property, name, parser): yield field def seekSBlock(self, block): self.seekBit(block * self.ole2.ss_size) def parseProperty(self, property, name_prefix, parser=RawParser): ole2 = self.ole2 if not property["size"].value: return if property["size"].value >= ole2["header/threshold"].value: return name = "%s[]" % name_prefix first = None previous = None size = 0 fragment_group = None chain = ole2.getChain(property["start"].value, ole2.ss_fat) while True: try: block = chain.next() contiguous = False if first is None: first = block contiguous = True if previous is not None and block == (previous+1): contiguous = True if contiguous: previous = block size += ole2.ss_size continue except StopIteration: block = None if first is None: break self.seekSBlock(first) desc = "Small blocks %s..%s (%s)" % (first, previous, previous-first+1) desc += " of %s bytes" % (ole2.ss_size//8) field = CustomFragment(self, name, size, parser, desc, fragment_group) yield field if not fragment_group: fragment_group = field.group fragment_group.args["datasize"] = property["size"].value fragment_group.args["ole2name"] = property["name"].value if block is None: break first = block previous = block size = ole2.ss_size class FragmentGroup: def __init__(self, parser): self.items = [] self.parser = parser self.args = {} def add(self, item): self.items.append(item) def createInputStream(self): # FIXME: Use lazy stream creation data = [] for item in self.items: data.append( item["rawdata"].value ) data = "".join(data) # FIXME: Use smarter code to send arguments self.args["ole2"] = self.items[0].root tags = {"class": self.parser, "args": self.args} tags = tags.iteritems() return StringInputStream(data, "<fragment group>", tags=tags) class CustomFragment(FieldSet): def __init__(self, parent, name, size, parser, description=None, group=None): FieldSet.__init__(self, parent, name, description, size=size) if not group: group = FragmentGroup(parser) self.group = group self.group.add(self) def createFields(self): yield RawBytes(self, "rawdata", self.size//8) def _createInputStream(self, **args): return self.group.createInputStream() class Pictures(OLE2FragmentParser): class Picture(FieldSet): def createFields(self): yield RawBytes(self, "identifier", 4, "some kind of marker (A0461DF0)") yield UInt32(self, "size") yield RawBytes(self, "unknown[]", 16) yield RawBytes(self, "unknown[]", 1) yield SubFile(self, "image", self["size"].value-17, "Image Data") ENDIAN_CHECK=False def createFields(self): pos=0 while pos//8 < self.datasize: newpic=Pictures.Picture(self, "picture[]") yield newpic pos+=newpic.size class PowerPointDocument(OLE2FragmentParser): OBJ_TYPES={ 0:"Unknown", 1000:"Document", 1001:"DocumentAtom", 1002:"EndDocument", 1003:"SlidePersist", 1004:"SlideBase", 1005:"SlideBaseAtom", 1006:"Slide", 1007:"SlideAtom", 1008:"Notes", 1009:"NotesAtom", 1010:"Environment", 1011:"SlidePersistAtom", 1012:"Scheme", 1013:"SchemeAtom", 1014:"DocViewInfo", 1015:"SSlideLayoutAtom", 1016:"MainMaster", 1017:"SSSlideInfoAtom", 1018:"SlideViewInfo", 1019:"GuideAtom", 1020:"ViewInfo", 1021:"ViewInfoAtom", 1022:"SlideViewInfoAtom", 1023:"VBAInfo", 1024:"VBAInfoAtom", 1025:"SSDocInfoAtom", 1026:"Summary", 1027:"Texture", 1028:"VBASlideInfo", 1029:"VBASlideInfoAtom", 1030:"DocRoutingSlip", 1031:"OutlineViewInfo", 1032:"SorterViewInfo", 1033:"ExObjList", 1034:"ExObjListAtom", 1035:"PPDrawingGroup", #FIXME: Office Art File Format Docu 1036:"PPDrawing", #FIXME: Office Art File Format Docu 1038:"Theme", 1039:"ColorMapping", 1040:"NamedShows", # don't know if container 1041:"NamedShow", 1042:"NamedShowSlides", # don't know if container 1052:"OriginalMainMasterId", 1053:"CompositeMasterId", 1054:"RoundTripContentMasterInfo12", 1055:"RoundTripShapeId12", 1056:"RoundTripHFPlaceholder12", 1058:"RoundTripContentMasterId12", 1059:"RoundTripOArtTextStyles12", 1060:"HeaderFooterDefaults12", 1061:"DocFlags12", 1062:"RoundTripShapeCheckSumForCustomLayouts12", 1063:"RoundTripNotesMasterTextStyles12", 1064:"RoundTripCustomTableStyles12", 2000:"List", 2005:"FontCollection", 2017:"ListPlaceholder", 2019:"BookmarkCollection", 2020:"SoundCollection", 2021:"SoundCollAtom", 2022:"Sound", 2023:"SoundData", 2025:"BookmarkSeedAtom", 2026:"GuideList", 2028:"RunArray", 2029:"RunArrayAtom", 2030:"ArrayElementAtom", 2031:"Int4ArrayAtom", 2032:"ColorSchemeAtom", 3008:"OEShape", 3009:"ExObjRefAtom", 3011:"OEPlaceholderAtom", 3020:"GrColor", 3024:"GPointAtom", 3025:"GrectAtom", 3031:"GRatioAtom", 3032:"Gscaling", 3034:"GpointAtom", 3035:"OEShapeAtom", 3037:"OEPlaceholderNewPlaceholderId12", 3998:"OutlineTextRefAtom", 3999:"TextHeaderAtom", 4000:"TextCharsAtom", 4001:"StyleTextPropAtom", 4002:"BaseTextPropAtom", 4003:"TxMasterStyleAtom", 4004:"TxCFStyleAtom", 4005:"TxPFStyleAtom", 4006:"TextRulerAtom", 4007:"TextBookmarkAtom", 4008:"TextBytesAtom", 4009:"TxSIStyleAtom", 4010:"TextSpecInfoAtom", 4011:"DefaultRulerAtom", 4023:"FontEntityAtom", 4024:"FontEmbeddedData", 4025:"TypeFace", 4026:"CString", 4027:"ExternalObject", 4033:"MetaFile", 4034:"ExOleObj", 4035:"ExOleObjAtom", 4036:"ExPlainLinkAtom", 4037:"CorePict", 4038:"CorePictAtom", 4039:"ExPlainAtom", 4040:"SrKinsoku", 4041:"HandOut", 4044:"ExEmbed", 4045:"ExEmbedAtom", 4046:"ExLink", 4047:"ExLinkAtom_old", 4048:"BookmarkEntityAtom", 4049:"ExLinkAtom", 4050:"SrKinsokuAtom", 4051:"ExHyperlinkAtom", 4053:"ExPlain", 4054:"ExPlainLink", 4055:"ExHyperlink", 4056:"SlideNumberMCAtom", 4057:"HeadersFooters", 4058:"HeadersFootersAtom", 4062:"RecolorEntryAtom", 4063:"TxInteractiveInfoAtom", 4065:"EmFormatAtom", 4066:"CharFormatAtom", 4067:"ParaFormatAtom", 4068:"MasterText", 4071:"RecolorInfoAtom", 4073:"ExQuickTime", 4074:"ExQuickTimeMovie", 4075:"ExQuickTimeMovieData", 4076:"ExSubscription", 4077:"ExSubscriptionSection", 4078:"ExControl", 4080:"SlideListWithText", 4081:"AnimationInfoAtom", 4082:"InteractiveInfo", 4083:"InteractiveInfoAtom", 4084:"SlideList", 4085:"UserEditAtom", 4086:"CurrentUserAtom", 4087:"DateTimeMCAtom", 4088:"GenericDateMCAtom", 4090:"FooterMCAtom", 4091:"ExControlAtom", 4100:"ExMediaAtom", 4101:"ExVideo", 4102:"ExAviMovie", 4103:"ExMCIMovie", 4109:"ExMIDIAudio", 4110:"ExCDAudio", 4111:"ExWAVAudioEmbedded", 4112:"ExWAVAudioLink", 4113:"ExOleObjStg", 4114:"ExCDAudioAtom", 4115:"ExWAVAudioEmbeddedAtom", 4116:"AnimationInfoAtom", 4117:"RTFDateTimeMCAtom", 5000:"ProgTags", # don't know if container 5001:"ProgStringTag", 5002:"ProgBinaryTag", 5003:"BinaryTagData", 6000:"PrintOptions", 6001:"PersistPtrFullBlock", # don't know if container 6002:"PersistPtrIncrementalBlock", # don't know if container 10000:"RulerIndentAtom", 10001:"GScalingAtom", 10002:"GRColorAtom", 10003:"GLPointAtom", 10004:"GlineAtom", 11019:"AnimationAtom12", 11021:"AnimationHashAtom12", 14100:"SlideSyncInfo12", 14101:"SlideSyncInfoAtom12", 0xf000:"EscherDggContainer", # Drawing Group Container 0xf006:"EscherDgg", 0xf016:"EscherCLSID", 0xf00b:"EscherOPT", 0xf001:"EscherBStoreContainer", 0xf007:"EscherBSE", 0xf018:"EscherBlip_START", # Blip types are between 0xf117:"EscherBlip_END", # these two values 0xf002:"EscherDgContainer", # Drawing Container 0xf008:"EscherDg", 0xf118:"EscherRegroupItems", 0xf120:"EscherColorScheme", # bug in docs 0xf003:"EscherSpgrContainer", 0xf004:"EscherSpContainer", 0xf009:"EscherSpgr", 0xf00a:"EscherSp", 0xf00c:"EscherTextbox", 0xf00d:"EscherClientTextbox", 0xf00e:"EscherAnchor", 0xf00f:"EscherChildAnchor", 0xf010:"EscherClientAnchor", 0xf011:"EscherClientData", 0xf005:"EscherSolverContainer", 0xf012:"EscherConnectorRule", # bug in docs 0xf013:"EscherAlignRule", 0xf014:"EscherArcRule", 0xf015:"EscherClientRule", 0xf017:"EscherCalloutRule", 0xf119:"EscherSelection", 0xf11a:"EscherColorMRU", 0xf11d:"EscherDeletedPspl", # bug in docs 0xf11e:"EscherSplitMenuColors", 0xf11f:"EscherOleObject", 0xf122:"EscherUserDefined"} class CurrentUserAtom(FieldSet): def createFields(self): yield UInt32(self, "size") yield textHandler(UInt32(self, "magic", "0xe391c05f for normal PPT, 0xf3d1c4df for encrypted PPT"), hexadecimal) yield UInt32(self, "offsetToCurrentEdit", "Offset in main stream to current edit field") yield UInt16(self, "lenUserName", "Length of user name") yield UInt16(self, "docFileVersion", "1012 for PP97+") yield UInt8(self, "majorVersion", "3 for PP97+") yield UInt8(self, "minorVersion", "0 for PP97+") yield UInt16(self, "unknown") yield String(self, "userName", self["lenUserName"].value, "ANSI version of the username") yield UInt32(self, "relVersion", "Release version: 8 for regular PPT file, 9 for multiple-master PPT file") class PowerPointObject(FieldSet): def createFields(self): yield Bits(self, "version", 4) yield Bits(self, "instance", 12) yield Enum(UInt16(self, "type"),PowerPointDocument.OBJ_TYPES) yield UInt32(self, "length") self._size = self["length"].value * 8 + 64 obj_type = self["type"].display obj_len = self["length"].value # type 1064 (RoundTripCustomTableStyles12) may appear to be a container, but it is not. if self["version"].value==0xF and self["type"].value != 1064: while (self.current_size)//8 < obj_len+8: yield PowerPointDocument.PowerPointObject(self, "object[]") elif obj_len: if obj_type=="FontEntityAtom": yield String(self, "data", obj_len, charset="UTF-16-LE", truncate="\0", strip="\0") elif obj_type=="TextCharsAtom": yield String(self, "data", obj_len, charset="UTF-16-LE") elif obj_type=="TextBytesAtom": yield String(self, "data", obj_len, charset="ASCII") elif hasattr(PowerPointDocument, obj_type): field = getattr(PowerPointDocument, obj_type)(self, "data") field._size = obj_len * 8 yield field else: yield RawBytes(self, "data", obj_len) def createDescription(self): if self["version"].value==0xF: return "PowerPoint Object Container; instance %s, type %s"%(self["instance"].value,self["type"].display) return "PowerPoint Object; version %s, instance %s, type %s"%(self["version"].value,self["instance"].value,self["type"].display) ENDIAN_CHECK=False OS_CHECK=False def createFields(self): pos=0 while pos//8 < self.datasize: newobj=PowerPointDocument.PowerPointObject(self, "object[]") yield newobj pos+=newobj.size class CurrentUser(OLE2FragmentParser): def createFields(self): yield PowerPointDocument.PowerPointObject(self, "current_user") if self.current_size < self.size: yield String(self, "unicode_name", self["current_user/data/lenUserName"].value * 2, charset="UTF-16-LE") class ExcelWorkbook(OLE2FragmentParser): BIFF_TYPES={0x000:"DIMENSIONS_v0", 0x200:"DIMENSIONS_v2", 0x001:"BLANK_v0", 0x201:"BLANK_v2", 0x002:"INTEGER", 0x003:"NUMBER_v0", 0x203:"NUMBER_v2", 0x004:"LABEL_v0", 0x204:"LABEL_v2", 0x005:"BOOLERR_v0", 0x205:"BOOLERR_v2", 0x006:"FORMULA_v0", 0x206:"FORMULA_v2", 0x406:"FORMULA_v4", 0x007:"STRING_v0", 0x207:"STRING_v2", 0x008:"ROW_v0", 0x208:"ROW_v2", 0x009:"BOF_v0", 0x209:"BOF_v2", 0x409:"BOF_v4", 0x809:"BOF_v8", 0x00a:"EOF", 0x00b:"INDEX_v0", 0x20b:"INDEX_v2", 0x00c:"CALCCOUNT", 0x00d:"CALCMODE", 0x00e:"PRECISION", 0x00f:"REFMODE", 0x010:"DELTA", 0x011:"ITERATION", 0x012:"PROTECT", 0x013:"PASSWORD", 0x014:"HEADER", 0x015:"FOOTER", 0x016:"EXTERNCOUNT", 0x017:"EXTERNSHEET", 0x018:"NAME_v0", 0x218:"NAME_v2", 0x019:"WINDOWPROTECT", 0x01a:"VERTICALPAGEBREAKS", 0x01b:"HORIZONTALPAGEBREAKS", 0x01c:"NOTE", 0x01d:"SELECTION", 0x01e:"FORMAT_v0", 0x41e:"FORMAT_v4", 0x01f:"FORMATCOUNT", # Undocumented 0x020:"COLUMNDEFAULT", # Undocumented 0x021:"ARRAY_v0", 0x221:"ARRAY_v2", 0x022:"1904", 0x023:"EXTERNNAME_v0", 0x223:"EXTERNNAME_v2", 0x024:"COLWIDTH", # Undocumented 0x025:"DEFAULTROWHEIGHT_v0", 0x225:"DEFAULTROWHEIGHT_v2", 0x026:"LEFT_MARGIN", 0x027:"RIGHT_MARGIN", 0x028:"TOP_MARGIN", 0x029:"BOTTOM_MARGIN", 0x02a:"PRINTHEADERS", 0x02b:"PRINTGRIDLINES", 0x02f:"FILEPASS", 0x031:"FONT_v0", 0x231:"FONT_v2", 0x032:"FONTCOUNT", # Undocumented 0x033:"PRINTSIZE", # Undocumented 0x036:"TABLE_v0", 0x236:"TABLE_v2", 0x037:"TABLE2", # OOo has docs 0x038:"WNDESK", # Undocumented 0x039:"ZOOM", # Undocumented 0x03a:"BEGINPREF", # Undocumented 0x03b:"ENDPREF", # Undocumented 0x03c:"CONTINUE", 0x03d:"WINDOW1", 0x03e:"WINDOW2_v0", 0x23e:"WINDOW2_v2", 0x03f:"PANE_V2", # Undocumented 0x040:"BACKUP", 0x041:"PANE", 0x042:"CODEPAGE", 0x043:"XF_OLD_v0", 0x243:"XF_OLD_v2", 0x443:"XF_OLD_v4", 0x044:"XF_INDEX", 0x045:"FONT_COLOR", 0x04d:"PLS", 0x050:"DCON", 0x051:"DCONREF", 0x052:"DCONNAME", 0x055:"DEFCOLWIDTH", 0x059:"XCT", 0x05a:"CRN", 0x05b:"FILESHARING", 0x05c:"WRITEACCESS", 0x05d:"OBJ", 0x05e:"UNCALCED", 0x05f:"SAVERECALC", 0x060:"TEMPLATE", 0x061:"INTL", # Undocumented 0x862:"TAB_COLOR", # Undocumented, OO calls it SHEETLAYOUT 0x063:"OBJPROTECT", 0x07d:"COLINFO", 0x27e:"RK", # Odd that there is no 0x7e 0x07f:"IMDATA", 0x080:"GUTS", 0x081:"WSBOOL", 0x082:"GRIDSET", 0x083:"HCENTER", 0x084:"VCENTER", 0x085:"BOUNDSHEET", 0x086:"WRITEPROT", 0x087:"ADDIN", 0x088:"EDG", 0x089:"PUB", 0x08c:"COUNTRY", 0x08d:"HIDEOBJ", 0x08e:"BUNDLESOFFSET", # Undocumented 0x08f:"BUNDLEHEADER", # Undocumented 0x090:"SORT", 0x091:"SUB", 0x092:"PALETTE", 0x293:"STYLE", # Odd that there is no 0x93 0x094:"LHRECORD", 0x095:"LHNGRAPH", 0x096:"SOUND", 0x097:"SYNC", # Undocumented 0x098:"LPR", 0x099:"STANDARDWIDTH", 0x09a:"FNGROUPNAME", 0x09b:"FILTERMODE", 0x09c:"FNGROUPCOUNT", 0x09d:"AUTOFILTERINFO", 0x09e:"AUTOFILTER", 0x0a0:"SCL", 0x0a1:"SETUP", 0x0a4:"TOOLBARVER", # Undocumented 0x0a9:"COORDLIST", 0x0ab:"GCW", 0x0ae:"SCENMAN", 0x0af:"SCENARIO", 0x0b0:"SXVIEW", 0x0b1:"SXVD", 0x0b2:"SXVI", 0x0b3:"SXSI", # Undocumented 0x0b4:"SXIVD", 0x0b5:"SXLI", 0x0b6:"SXPI", 0x0b7:"FACENUM", # Undocumented 0x0b8:"DOCROUTE", 0x0b9:"RECIPNAME", 0x0ba:"SSLIST", # Undocumented 0x0bb:"MASKIMDATA", # Undocumented 0x4bc:"SHRFMLA", 0x0bd:"MULRK", 0x0be:"MULBLANK", 0x0bf:"TOOLBARHDR", # Undocumented 0x0c0:"TOOLBAREND", # Undocumented 0x0c1:"MMS", 0x0c2:"ADDMENU", 0x0c3:"DELMENU", 0x0c4:"TIPHISTORY", # Undocumented 0x0c5:"SXDI", 0x0c6:"SXDB", 0x0c7:"SXFDB", # guessed 0x0c8:"SXDDB", # guessed 0x0c9:"SXNUM", # guessed 0x0ca:"SXBOOL", # guessed 0x0cb:"SXERR", # guessed 0x0cc:"SXINT", # guessed 0x0cd:"SXSTRING", 0x0ce:"SXDTR", # guessed 0x0cf:"SXNIL", # guessed 0x0d0:"SXTBL", 0x0d1:"SXTBRGIITM", 0x0d2:"SXTBPG", 0x0d3:"OBPROJ", 0x0d5:"SXIDSTM", 0x0d6:"RSTRING", 0x0d7:"DBCELL", 0x0d8:"SXNUMGROUP", # from OO : numerical grouping in pivot cache field 0x0da:"BOOKBOOL", 0x0dc:"PARAMQRY", # DUPLICATE dc 0x0dc:"SXEXT", # DUPLICATE dc 0x0dd:"SCENPROTECT", 0x0de:"OLESIZE", 0x0df:"UDDESC", 0x0e0:"XF", 0x0e1:"INTERFACEHDR", 0x0e2:"INTERFACEEND", 0x0e3:"SXVS", 0x0e5:"MERGECELLS", # guessed 0x0e9:"BG_PIC", # Undocumented 0x0ea:"TABIDCONF", 0x0eb:"MS_O_DRAWING_GROUP", 0x0ec:"MS_O_DRAWING", 0x0ed:"MS_O_DRAWING_SELECTION", 0x0ef:"PHONETIC", # semi-Undocumented 0x0f0:"SXRULE", 0x0f1:"SXEX", 0x0f2:"SXFILT", 0x0f6:"SXNAME", 0x0f7:"SXSELECT", 0x0f8:"SXPAIR", 0x0f9:"SXFMLA", 0x0fb:"SXFORMAT", 0x0fc:"SST", 0x0fd:"LABELSST", 0x0ff:"EXTSST", 0x100:"SXVDEX", 0x103:"SXFORMULA", 0x122:"SXDBEX", 0x137:"CHTRINSERT", 0x138:"CHTRINFO", 0x13B:"CHTRCELLCONTENT", 0x13d:"TABID", 0x140:"CHTRMOVERANGE", 0x14D:"CHTRINSERTTAB", 0x15F:"LABELRANGES", 0x160:"USESELFS", 0x161:"DSF", 0x162:"XL5MODIFY", 0x196:"CHTRHEADER", 0x1a5:"FILESHARING2", 0x1a9:"USERDBVIEW", 0x1aa:"USERSVIEWBEGIN", 0x1ab:"USERSVIEWEND", 0x1ad:"QSI", 0x1ae:"SUPBOOK", 0x1af:"PROT4REV", 0x1b0:"CONDFMT", 0x1b1:"CF", 0x1b2:"DVAL", 0x1b5:"DCONBIN", 0x1b6:"TXO", 0x1b7:"REFRESHALL", 0x1b8:"HLINK", 0x1ba:"CODENAME", # TYPO in MS Docs 0x1bb:"SXFDBTYPE", 0x1bc:"PROT4REVPASS", 0x1be:"DV", 0x1c0:"XL9FILE", 0x1c1:"RECALCID", 0x800:"LINK_TIP", # follows an hlink 0x802:"UNKNOWN_802", # OO exports it but has not name or docs 0x803:"WQSETT", # OO named it and can export it, but does not include it in the docs 0x804:"WQTABLES", # OO named it and can export it, but does not include it in the docs 0x805:"UNKNOWN_805", # No name or docs, seems related to web query see #153260 for sample 0x810:"PIVOT_AUTOFORMAT", # Seems to contain pivot table autoformat indicies, plus ?? 0x864:"UNKNOWN_864", # seems related to pivot tables 0x867:"SHEETPROTECTION", # OO named it, and has docs 0x868:"RANGEPROTECTION", # OO named it, no docs yet 0x1001:"CHART_units", 0x1002:"CHART_chart", 0x1003:"CHART_series", 0x1006:"CHART_dataformat", 0x1007:"CHART_lineformat", 0x1009:"CHART_markerformat", 0x100a:"CHART_areaformat", 0x100b:"CHART_pieformat", 0x100c:"CHART_attachedlabel", 0x100d:"CHART_seriestext", 0x1014:"CHART_chartformat", 0x1015:"CHART_legend", 0x1016:"CHART_serieslist", 0x1017:"CHART_bar", 0x1018:"CHART_line", 0x1019:"CHART_pie", 0x101a:"CHART_area", 0x101b:"CHART_scatter", 0x101c:"CHART_chartline", 0x101d:"CHART_axis", 0x101e:"CHART_tick", 0x101f:"CHART_valuerange", 0x1020:"CHART_catserrange", 0x1021:"CHART_axislineformat", 0x1022:"CHART_chartformatlink", 0x1024:"CHART_defaulttext", 0x1025:"CHART_text", 0x1026:"CHART_fontx", 0x1027:"CHART_objectlink", 0x1032:"CHART_frame", 0x1033:"CHART_begin", 0x1034:"CHART_end", 0x1035:"CHART_plotarea", 0x103a:"CHART_3d", 0x103c:"CHART_picf", 0x103d:"CHART_dropbar", 0x103e:"CHART_radar", 0x103f:"CHART_surf", 0x1040:"CHART_radararea", 0x1041:"CHART_axisparent", 0x1043:"CHART_legendxn", 0x1044:"CHART_shtprops", 0x1045:"CHART_sertocrt", 0x1046:"CHART_axesused", 0x1048:"CHART_sbaseref", 0x104a:"CHART_serparent", 0x104b:"CHART_serauxtrend", 0x104e:"CHART_ifmt", 0x104f:"CHART_pos", 0x1050:"CHART_alruns", 0x1051:"CHART_ai", 0x105b:"CHART_serauxerrbar", 0x105c:"CHART_clrtclient", # Undocumented 0x105d:"CHART_serfmt", 0x105f:"CHART_3dbarshape", # Undocumented 0x1060:"CHART_fbi", 0x1061:"CHART_boppop", 0x1062:"CHART_axcext", 0x1063:"CHART_dat", 0x1064:"CHART_plotgrowth", 0x1065:"CHART_siindex", 0x1066:"CHART_gelframe", 0x1067:"CHART_boppopcustom",} class BIFF(FieldSet): def createFields(self): yield Enum(UInt16(self, "type"),ExcelWorkbook.BIFF_TYPES) yield UInt16(self, "length") if self["length"].value: yield RawBytes(self, "data", self["length"].value) def createDescription(self): return "Excel BIFF; type %s"%self["type"].display def createFields(self): pos=0 while pos//8 < self.datasize: newobj=ExcelWorkbook.BIFF(self, "BIFF[]") yield newobj pos+=newobj.size class ThumbsCatalog(OLE2FragmentParser): class ThumbsEntry(FieldSet): def createFields(self): yield UInt32(self, "size") yield UInt32(self, "index") yield Bits(self, "flags", 8) yield RawBytes(self, "unknown[]", 5) yield UInt16(self, "unknown[]") yield CString(self, "name", charset="UTF-16-LE") if self.current_size // 8 != self['size'].value: yield RawBytes(self, "padding", self['size'].value - self.current_size // 8) def createDescription(self): return "Thumbnail entry for %s"%self["name"].display def createFields(self): yield UInt16(self, "unknown[]") yield UInt16(self, "unknown[]") yield UInt32(self, "count") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") for i in xrange(self['count'].value): yield ThumbsCatalog.ThumbsEntry(self, "entry[]") PROPERTY_NAME = { u"Root Entry": ("root",RootEntry), u"\5DocumentSummaryInformation": ("doc_summary",Summary), u"\5SummaryInformation": ("summary",Summary), u"\1CompObj": ("compobj",CompObj), u"Pictures": ("pictures",Pictures), u"PowerPoint Document": ("powerpointdoc",PowerPointDocument), u"Current User": ("current_user",CurrentUser), u"Workbook": ("workbook",ExcelWorkbook), u"Catalog": ("catalog",ThumbsCatalog), u"WordDocument": ("word_doc",WordDocumentParser), u"0Table": ("table0",WordTableParser), u"1Table": ("table1",WordTableParser), }
gpl-3.0
rhdedgar/openshift-tools
scripts/monitoring/cron-send-create-app.py
5
12893
#!/usr/bin/env python """ Create application check for v3 """ # We just want to see any exception that happens # don't want the script to die under any cicumstances # script must try to clean itself up # pylint: disable=broad-except # main() function has a lot of setup and error handling # pylint: disable=too-many-statements # main() function raises a captured exception if there is one # pylint: disable=raising-bad-type # Adding the ignore because it does not like the naming of the script # to be different than the class name # pylint: disable=invalid-name import argparse import datetime import random import string import time import urllib2 # Our jenkins server does not include these rpms. # In the future we might move this to a container where these # libs might exist #pylint: disable=import-error from openshift_tools.monitoring.ocutil import OCUtil from openshift_tools.monitoring.metric_sender import MetricSender import logging logging.basicConfig( format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s', ) logger = logging.getLogger() logger.setLevel(logging.INFO) ocutil = OCUtil() commandDelay = 5 # seconds # use parsed arg instead #testLoopCountMax = 180 # * commandDelay = 15min testCurlCountMax = 18 # * commandDelay = 1min30s testNoPodCountMax = 18 # * commandDelay = 1min30s def runOCcmd(cmd, base_cmd='oc'): """ log commands through ocutil """ logger.info(base_cmd + " " + cmd) oc_time = time.time() oc_result = ocutil.run_user_cmd(cmd, base_cmd=base_cmd, ) logger.info("oc command took %s seconds", str(time.time() - oc_time)) return oc_result def runOCcmd_yaml(cmd, base_cmd='oc'): """ log commands through ocutil """ logger.info(base_cmd + " " + cmd) ocy_time = time.time() ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, ) logger.info("oc command took %s seconds", str(time.time() - ocy_time)) return ocy_result def parse_args(): """ parse the args from the cli """ logger.debug("parse_args()") parser = argparse.ArgumentParser(description='OpenShift app create end-to-end test') parser.add_argument('-v', '--verbose', action='store_true', default=None, help='Verbose?') parser.add_argument('--source', default="openshift/hello-openshift:v1.0.6", help='source application to use') parser.add_argument('--basename', default="test", help='base name, added to via openshift') parser.add_argument('--namespace', default="ops-health-monitoring", help='namespace (be careful of using existing namespaces)') parser.add_argument('--loopcount', default="36", help="how many 5 second loops before giving up on app creation") return parser.parse_args() def send_metrics(build_ran, create_app, http_code, run_time): """ send data to MetricSender""" logger.debug("send_metrics()") ms_time = time.time() ms = MetricSender() logger.info("Send data to MetricSender") if build_ran == 1: ms.add_metric({'openshift.master.app.build.create': create_app}) ms.add_metric({'openshift.master.app.build.create.code': http_code}) ms.add_metric({'openshift.master.app.build.create.time': run_time}) else: ms.add_metric({'openshift.master.app.create': create_app}) ms.add_metric({'openshift.master.app.create.code': http_code}) ms.add_metric({'openshift.master.app.create.time': run_time}) ms.send_metrics() logger.info("Data sent to Zagg in %s seconds", str(time.time() - ms_time)) def writeTmpFile(data, filename=None, outdir="/tmp"): """ write string to file """ filename = ''.join([ outdir, '/', filename, ]) with open(filename, 'w') as f: f.write(data) logger.info("wrote file: %s", filename) def curl(ip_addr, port, timeout=30): """ Open an http connection to the url and read """ url = 'http://%s:%s' % (ip_addr, port) logger.debug("curl(%s timeout=%ss)", url, timeout) try: return urllib2.urlopen(url, timeout=timeout).getcode() except urllib2.HTTPError, e: return e.fp.getcode() except Exception as e: logger.exception("Unknown error") return 0 def getPodStatus(pod): """ get pod status for display """ #logger.debug("getPodStatus()") if not pod: return "no pod" if not pod['status']: return "no pod status" return "%s %s" % (pod['metadata']['name'], pod['status']['phase']) def getPod(name): """ get Pod from all possible pods """ pods = ocutil.get_pods() result = None for pod in pods['items']: if pod and pod['metadata']['name'] and pod['metadata']['name'].startswith(name): # if we have a pod already, and this one is a build or deploy pod, don't worry about it # we want podname-xyz12 to be priority # if we dont already have a pod, then this one will do if result: if pod['metadata']['name'].endswith("build"): continue if pod['metadata']['name'].endswith("deploy"): continue result = pod return result def setup(config): """ global setup for tests """ logger.info('setup()') logger.debug(config) project = None try: project = runOCcmd_yaml("get project {}".format(config.namespace)) logger.debug(project) except Exception: pass # don't want exception if project not found if not project: try: runOCcmd("new-project {}".format(config.namespace), base_cmd='oadm') time.sleep(commandDelay) except Exception: logger.exception('error creating new project') runOCcmd("new-app {} --name={} -n {}".format( config.source, config.podname, config.namespace, )) def testCurl(config): """ run curl and return http_code, have retries """ logger.info('testCurl()') logger.debug(config) http_code = 0 # attempt retries for curlCount in range(testCurlCountMax): # introduce small delay to give time for route to establish time.sleep(commandDelay) service = ocutil.get_service(config.podname) if service: logger.debug("service") logger.debug(service) http_code = curl( service['spec']['clusterIP'], service['spec']['ports'][0]['port'] ) logger.debug("http code %s", http_code) if http_code == 200: logger.debug("curl completed in %d tries", curlCount) break return http_code def test(config): """ run tests """ logger.info('test()') logger.debug(config) build_ran = 0 pod = None noPodCount = 0 http_code = 0 for _ in range(int(config.loopcount)): time.sleep(commandDelay) pod = getPod(config.podname) if not pod: noPodCount = noPodCount + 1 if noPodCount > testNoPodCountMax: logger.critical("cannot find pod, fail early") break logger.debug("cannot find pod") continue # cannot test pod further noPodCount = 0 if not pod['status']: logger.error("no pod status") continue # cannot test pod further logger.info(getPodStatus(pod)) if pod['status']['phase']: if pod['status']['phase'] == "Failed": logger.error("Pod Failed") break if pod['status']['phase'] == "Error": logger.error("Pod Error") break if pod['metadata']['name'].endswith("build"): build_ran = 1 continue if pod['metadata']['name'].endswith("deploy"): continue if pod['status']['phase'] == 'Running' \ and pod['status'].has_key('podIP') \ and not pod['metadata']['name'].endswith("build"): http_code = testCurl(config) return { 'build_ran': build_ran, 'create_app': 0, # app create succeeded 'http_code': http_code, 'failed': (http_code != 200), # must be 200 to succeed 'pod': pod, } if build_ran: logger.critical("build timed out, please check build log for last messages") else: logger.critical("app create timed out, please check event log for information") return { 'build_ran': build_ran, 'create_app': 1, # app create failed 'http_code': http_code, 'failed': True, 'pod': pod, } def teardown(config): """ clean up after testing """ logger.info('teardown()') logger.debug(config) time.sleep(commandDelay) runOCcmd("delete all -l app={} -n {}".format( config.podname, config.namespace, )) def main(): """ setup / test / teardown with exceptions to ensure teardown """ exception = None logger.info('################################################################################') logger.info(' Starting App Create - %s', datetime.datetime.now().strftime("%Y-%m-%d %H:%M")) logger.info('################################################################################') logger.debug("main()") args = parse_args() if args.verbose: logger.setLevel(logging.DEBUG) ocutil.namespace = args.namespace ############# generate unique podname ############# args.uid = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(2)) args.timestamp = datetime.datetime.utcnow().strftime("%m%d%H%Mz") args.podname = '-'.join([args.basename, args.timestamp, args.uid]).lower() if len(args.podname) > 24: raise ValueError("len(args.podname) cannot exceed 24, currently {}: {}".format( len(args.podname), args.podname )) ############# setup() ############# try: setup(args) except Exception as e: logger.exception("error during setup()") exception = e ############# test() ############# if not exception: # start time tracking start_time = time.time() try: test_response = test(args) logger.debug(test_response) except Exception as e: logger.exception("error during test()") exception = e test_response = { 'build_ran': 0, 'create_app': 1, # app create failed 'http_code': 0, 'failed': True, 'pod': None, } # finish time tracking run_time = str(time.time() - start_time) logger.info('Test finished. Time to complete test only: %s', run_time) ############# send data to zabbix ############# try: send_metrics( test_response['build_ran'], test_response['create_app'], test_response['http_code'], run_time ) except Exception as e: logger.exception("error sending zabbix data") exception = e ############# collect more information if failed ############# if test_response['failed']: try: ocutil.verbose = True logger.setLevel(logging.DEBUG) logger.critical('Deploy State: Fail') if test_response['pod']: logger.info('Fetching Pod:') logger.info(test_response['pod']) logger.info('Fetching Logs: showing last 20, see file for full data') logs = ocutil.get_log(test_response['pod']['metadata']['name']) writeTmpFile(logs, filename=args.podname+".logs") logger.info("\n".join( logs.split("\n")[-20:] )) logger.info('Fetching Events: see file for full data') events = "\n".join( [event for event in runOCcmd('get events').split("\n") if args.podname in event] ) writeTmpFile(events, filename=args.podname+".events") logger.info(events) except Exception as e: logger.exception("problem fetching additional error data") exception = e else: logger.info('Deploy State: Success') logger.info('Service HTTP response code: %s', test_response['http_code']) ############# teardown ############# teardown(args) ############# raise any exceptions discovered ############# if exception: raise exception if __name__ == "__main__": main()
apache-2.0
Ichag/odoo
openerp/addons/base/tests/test_misc.py
393
1111
import unittest2 from openerp.tools import misc class test_countingstream(unittest2.TestCase): def test_empty_stream(self): s = misc.CountingStream(iter([])) self.assertEqual(s.index, -1) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 0) def test_single(self): s = misc.CountingStream(xrange(1)) self.assertEqual(s.index, -1) self.assertEqual(next(s, None), 0) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 1) def test_full(self): s = misc.CountingStream(xrange(42)) for _ in s: pass self.assertEqual(s.index, 42) def test_repeated(self): """ Once the CountingStream has stopped iterating, the index should not increase anymore (the internal state should not be allowed to change) """ s = misc.CountingStream(iter([])) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 0) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 0) if __name__ == '__main__': unittest2.main()
agpl-3.0
toshywoshy/ansible
lib/ansible/modules/cloud/rackspace/rax_files.py
57
11783
#!/usr/bin/python # (c) 2013, Paul Durivage <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rax_files short_description: Manipulate Rackspace Cloud Files Containers description: - Manipulate Rackspace Cloud Files Containers version_added: "1.5" options: clear_meta: description: - Optionally clear existing metadata when applying metadata to existing containers. Selecting this option is only appropriate when setting type=meta type: bool default: "no" container: description: - The container to use for container or metadata operations. required: true meta: description: - A hash of items to set as metadata values on a container private: description: - Used to set a container as private, removing it from the CDN. B(Warning!) Private containers, if previously made public, can have live objects available until the TTL on cached objects expires type: bool public: description: - Used to set a container as public, available via the Cloud Files CDN type: bool region: description: - Region to create an instance in default: DFW state: description: - Indicate desired state of the resource choices: ['present', 'absent'] default: present ttl: description: - In seconds, set a container-wide TTL for all objects cached on CDN edge nodes. Setting a TTL is only appropriate for containers that are public type: description: - Type of object to do work on, i.e. metadata object or a container object choices: - file - meta default: file web_error: description: - Sets an object to be presented as the HTTP error page when accessed by the CDN URL web_index: description: - Sets an object to be presented as the HTTP index page when accessed by the CDN URL author: "Paul Durivage (@angstwad)" extends_documentation_fragment: - rackspace - rackspace.openstack ''' EXAMPLES = ''' - name: "Test Cloud Files Containers" hosts: local gather_facts: no tasks: - name: "List all containers" rax_files: state: list - name: "Create container called 'mycontainer'" rax_files: container: mycontainer - name: "Create container 'mycontainer2' with metadata" rax_files: container: mycontainer2 meta: key: value file_for: [email protected] - name: "Set a container's web index page" rax_files: container: mycontainer web_index: index.html - name: "Set a container's web error page" rax_files: container: mycontainer web_error: error.html - name: "Make container public" rax_files: container: mycontainer public: yes - name: "Make container public with a 24 hour TTL" rax_files: container: mycontainer public: yes ttl: 86400 - name: "Make container private" rax_files: container: mycontainer private: yes - name: "Test Cloud Files Containers Metadata Storage" hosts: local gather_facts: no tasks: - name: "Get mycontainer2 metadata" rax_files: container: mycontainer2 type: meta - name: "Set mycontainer2 metadata" rax_files: container: mycontainer2 type: meta meta: uploaded_by: [email protected] - name: "Remove mycontainer2 metadata" rax_files: container: "mycontainer2" type: meta state: absent meta: key: "" file_for: "" ''' try: import pyrax HAS_PYRAX = True except ImportError as e: HAS_PYRAX = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.rax import rax_argument_spec, rax_required_together, setup_rax_module EXIT_DICT = dict(success=True) META_PREFIX = 'x-container-meta-' def _get_container(module, cf, container): try: return cf.get_container(container) except pyrax.exc.NoSuchContainer as e: module.fail_json(msg=e.message) def _fetch_meta(module, container): EXIT_DICT['meta'] = dict() try: for k, v in container.get_metadata().items(): split_key = k.split(META_PREFIX)[-1] EXIT_DICT['meta'][split_key] = v except Exception as e: module.fail_json(msg=e.message) def meta(cf, module, container_, state, meta_, clear_meta): c = _get_container(module, cf, container_) if meta_ and state == 'present': try: meta_set = c.set_metadata(meta_, clear=clear_meta) except Exception as e: module.fail_json(msg=e.message) elif meta_ and state == 'absent': remove_results = [] for k, v in meta_.items(): c.remove_metadata_key(k) remove_results.append(k) EXIT_DICT['deleted_meta_keys'] = remove_results elif state == 'absent': remove_results = [] for k, v in c.get_metadata().items(): c.remove_metadata_key(k) remove_results.append(k) EXIT_DICT['deleted_meta_keys'] = remove_results _fetch_meta(module, c) _locals = locals().keys() EXIT_DICT['container'] = c.name if 'meta_set' in _locals or 'remove_results' in _locals: EXIT_DICT['changed'] = True module.exit_json(**EXIT_DICT) def container(cf, module, container_, state, meta_, clear_meta, ttl, public, private, web_index, web_error): if public and private: module.fail_json(msg='container cannot be simultaneously ' 'set to public and private') if state == 'absent' and (meta_ or clear_meta or public or private or web_index or web_error): module.fail_json(msg='state cannot be omitted when setting/removing ' 'attributes on a container') if state == 'list': # We don't care if attributes are specified, let's list containers EXIT_DICT['containers'] = cf.list_containers() module.exit_json(**EXIT_DICT) try: c = cf.get_container(container_) except pyrax.exc.NoSuchContainer as e: # Make the container if state=present, otherwise bomb out if state == 'present': try: c = cf.create_container(container_) except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['changed'] = True EXIT_DICT['created'] = True else: module.fail_json(msg=e.message) else: # Successfully grabbed a container object # Delete if state is absent if state == 'absent': try: cont_deleted = c.delete() except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['deleted'] = True if meta_: try: meta_set = c.set_metadata(meta_, clear=clear_meta) except Exception as e: module.fail_json(msg=e.message) finally: _fetch_meta(module, c) if ttl: try: c.cdn_ttl = ttl except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['ttl'] = c.cdn_ttl if public: try: cont_public = c.make_public() except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['container_urls'] = dict(url=c.cdn_uri, ssl_url=c.cdn_ssl_uri, streaming_url=c.cdn_streaming_uri, ios_uri=c.cdn_ios_uri) if private: try: cont_private = c.make_private() except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['set_private'] = True if web_index: try: cont_web_index = c.set_web_index_page(web_index) except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['set_index'] = True finally: _fetch_meta(module, c) if web_error: try: cont_err_index = c.set_web_error_page(web_error) except Exception as e: module.fail_json(msg=e.message) else: EXIT_DICT['set_error'] = True finally: _fetch_meta(module, c) EXIT_DICT['container'] = c.name EXIT_DICT['objs_in_container'] = c.object_count EXIT_DICT['total_bytes'] = c.total_bytes _locals = locals().keys() if ('cont_deleted' in _locals or 'meta_set' in _locals or 'cont_public' in _locals or 'cont_private' in _locals or 'cont_web_index' in _locals or 'cont_err_index' in _locals): EXIT_DICT['changed'] = True module.exit_json(**EXIT_DICT) def cloudfiles(module, container_, state, meta_, clear_meta, typ, ttl, public, private, web_index, web_error): """ Dispatch from here to work with metadata or file objects """ cf = pyrax.cloudfiles if cf is None: module.fail_json(msg='Failed to instantiate client. This ' 'typically indicates an invalid region or an ' 'incorrectly capitalized region name.') if typ == "container": container(cf, module, container_, state, meta_, clear_meta, ttl, public, private, web_index, web_error) else: meta(cf, module, container_, state, meta_, clear_meta) def main(): argument_spec = rax_argument_spec() argument_spec.update( dict( container=dict(), state=dict(choices=['present', 'absent', 'list'], default='present'), meta=dict(type='dict', default=dict()), clear_meta=dict(default=False, type='bool'), type=dict(choices=['container', 'meta'], default='container'), ttl=dict(type='int'), public=dict(default=False, type='bool'), private=dict(default=False, type='bool'), web_index=dict(), web_error=dict() ) ) module = AnsibleModule( argument_spec=argument_spec, required_together=rax_required_together() ) if not HAS_PYRAX: module.fail_json(msg='pyrax is required for this module') container_ = module.params.get('container') state = module.params.get('state') meta_ = module.params.get('meta') clear_meta = module.params.get('clear_meta') typ = module.params.get('type') ttl = module.params.get('ttl') public = module.params.get('public') private = module.params.get('private') web_index = module.params.get('web_index') web_error = module.params.get('web_error') if state in ['present', 'absent'] and not container_: module.fail_json(msg='please specify a container name') if clear_meta and not typ == 'meta': module.fail_json(msg='clear_meta can only be used when setting ' 'metadata') setup_rax_module(module, pyrax) cloudfiles(module, container_, state, meta_, clear_meta, typ, ttl, public, private, web_index, web_error) if __name__ == '__main__': main()
gpl-3.0
ema/conpaas
conpaas-services/contrib/libcloud/compute/drivers/ktucloud.py
5
3393
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from libcloud.compute.providers import Provider from libcloud.compute.base import Node, NodeDriver, NodeImage, NodeLocation, \ NodeSize from libcloud.compute.types import NodeState from libcloud.compute.drivers.cloudstack import CloudStackNodeDriver class KTUCloudNodeDriver(CloudStackNodeDriver): "Driver for KTUCloud Compute platform." type = Provider.KTUCLOUD name = 'KTUCloud' website = 'https://ucloudbiz.olleh.com/' def list_images(self, location=None): args = { 'templatefilter': 'executable' } if location is not None: args['zoneid'] = location.id imgs = self._sync_request('listAvailableProductTypes') images = [] for img in imgs['producttypes']: images.append( NodeImage( img['serviceofferingid'], img['serviceofferingdesc'], self, {'hypervisor': '', 'format': '', 'os': img['templatedesc'], 'templateid': img['templateid'], 'zoneid': img['zoneid']} ) ) return images def list_sizes(self, location=None): szs = self._sync_request('listAvailableProductTypes') sizes = [] for sz in szs['producttypes']: sizes.append(NodeSize( sz['diskofferingid'], sz['diskofferingdesc'], 0, 0, 0, 0, self) ) return sizes def create_node(self, name, size, image, location=None, **kwargs): extra_args = {} usageplantype = kwargs.pop('usageplantype', None) if usageplantype is None: extra_args['usageplantype'] = 'hourly' else: extra_args['usageplantype'] = usageplantype result = self._async_request( 'deployVirtualMachine', displayname=name, serviceofferingid=image.id, diskofferingid=size.id, templateid=str(image.extra['templateid']), zoneid=str(image.extra['zoneid']), **extra_args ) node = result['virtualmachine'] return Node( id=node['id'], name=node['displayname'], state=self.NODE_STATE_MAP[node['state']], public_ips=[], private_ips=[], driver=self, extra={ 'zoneid': image.extra['zoneid'], 'ip_addresses': [], 'forwarding_rules': [], } )
bsd-3-clause
Huyuwei/tvm
tests/python/contrib/test_sort.py
2
3267
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import tvm import numpy as np def test_sort(): n = 2 l = 5 m = 3 data = tvm.placeholder((n, l, m), name='data') sort_num = tvm.placeholder((n, m), name="sort_num", dtype="int32") axis = 1 is_ascend = False out = tvm.extern(data.shape, [data, sort_num], lambda ins, outs: tvm.call_packed( "tvm.contrib.sort.argsort_nms", ins[0], ins[1], outs[0], axis, is_ascend), dtype='int32', name="sort_tensor") input = [[[1, 2, 3], [2, 4.5, 3.5], [1.1, 0.5, 1], [3.2, -5, 0.5], [1.5, 0, 0]], [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12], [13, 14, 15]]] sort_num_input = [[1, 2, 3], [4, 5, 5]] sorted_index = [[[0, 1, 1], [1, 0, 0], [2, 2, 2], [3, 3, 3], [4, 4, 4]], [[3, 4, 4], [2, 3, 3], [1, 2, 2], [0, 1, 1], [4, 0, 0]]] ctx = tvm.cpu(0) target = "llvm" s = tvm.create_schedule(out.op) f = tvm.build(s, [data, sort_num, out], target) a = tvm.nd.array(np.array(input).astype(data.dtype), ctx) b = tvm.nd.array(np.array(sort_num_input).astype(sort_num.dtype), ctx) c = tvm.nd.array(np.zeros(a.shape, dtype=out.dtype), ctx) f(a, b, c) tvm.testing.assert_allclose(c.asnumpy(), np.array(sorted_index).astype(out.dtype), rtol=1e-5) def test_sort_np(): dshape = (1, 2, 3, 4, 5, 6) axis = 4 reduced_shape = (1, 2, 3, 4, 6) is_ascend = True data = tvm.placeholder(dshape, name='data') sort_num = tvm.placeholder(reduced_shape, name="sort_num", dtype="int32") out = tvm.extern(data.shape, [data, sort_num], lambda ins, outs: tvm.call_packed( "tvm.contrib.sort.argsort_nms", ins[0], ins[1], outs[0], axis, is_ascend), dtype='int32', name="sort_tensor") ctx = tvm.cpu(0) target = "llvm" s = tvm.create_schedule(out.op) f = tvm.build(s, [data, sort_num, out], target) np_data = np.random.uniform(size=dshape) np_out = np.argsort(np_data, axis=axis) sort_num_input = np.full(reduced_shape, dshape[axis]) a = tvm.nd.array(np.array(np_data).astype(data.dtype), ctx) b = tvm.nd.array(np.array(sort_num_input).astype(sort_num.dtype), ctx) c = tvm.nd.array(np.zeros(a.shape, dtype=out.dtype), ctx) f(a, b, c) tvm.testing.assert_allclose(c.asnumpy(), np_out, rtol=1e-5) if __name__ == "__main__": test_sort() test_sort_np()
apache-2.0
liaorubei/depot_tools
third_party/pylint/pyreverse/main.py
67
5143
# # Copyright (c) 2000-2013 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:[email protected] # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ %prog [options] <packages> create UML diagrams for classes and modules in <packages> """ from __future__ import print_function import sys, os from logilab.common.configuration import ConfigurationMixIn from astroid.manager import AstroidManager from astroid.inspector import Linker from pylint.pyreverse.diadefslib import DiadefsHandler from pylint.pyreverse import writer from pylint.pyreverse.utils import insert_default_options OPTIONS = ( ("filter-mode", dict(short='f', default='PUB_ONLY', dest='mode', type='string', action='store', metavar='<mode>', help="""filter attributes and functions according to <mode>. Correct modes are : 'PUB_ONLY' filter all non public attributes [DEFAULT], equivalent to PRIVATE+SPECIAL_A 'ALL' no filter 'SPECIAL' filter Python special functions except constructor 'OTHER' filter protected and private attributes""")), ("class", dict(short='c', action="append", metavar="<class>", dest="classes", default=[], help="create a class diagram with all classes related to <class>;\ this uses by default the options -ASmy")), ("show-ancestors", dict(short="a", action="store", metavar='<ancestor>', type='int', help='show <ancestor> generations of ancestor classes not in <projects>')), ("all-ancestors", dict(short="A", default=None, help="show all ancestors off all classes in <projects>")), ("show-associated", dict(short='s', action="store", metavar='<ass_level>', type='int', help='show <ass_level> levels of associated classes not in <projects>')), ("all-associated", dict(short='S', default=None, help='show recursively all associated off all associated classes')), ("show-builtin", dict(short="b", action="store_true", default=False, help='include builtin objects in representation of classes')), ("module-names", dict(short="m", default=None, type='yn', metavar='[yn]', help='include module name in representation of classes')), # TODO : generate dependencies like in pylint # ("package-dependencies", # dict(short="M", action="store", metavar='<package_depth>', type='int', # help='show <package_depth> module dependencies beyond modules in \ # <projects> (for the package diagram)')), ("only-classnames", dict(short='k', action="store_true", default=False, help="don't show attributes and methods in the class boxes; \ this disables -f values")), ("output", dict(short="o", dest="output_format", action="store", default="dot", metavar="<format>", help="create a *.<format> output file if format available.")), ) # FIXME : quiet mode #( ('quiet', #dict(help='run quietly', action='store_true', short='q')), ) class Run(ConfigurationMixIn): """base class providing common behaviour for pyreverse commands""" options = OPTIONS def __init__(self, args): ConfigurationMixIn.__init__(self, usage=__doc__) insert_default_options() self.manager = AstroidManager() self.register_options_provider(self.manager) args = self.load_command_line_configuration() sys.exit(self.run(args)) def run(self, args): """checking arguments and run project""" if not args: print(self.help()) return 1 # insert current working directory to the python path to recognize # dependencies to local modules even if cwd is not in the PYTHONPATH sys.path.insert(0, os.getcwd()) try: project = self.manager.project_from_files(args) linker = Linker(project, tag=True) handler = DiadefsHandler(self.config) diadefs = handler.get_diadefs(project, linker) finally: sys.path.pop(0) if self.config.output_format == "vcg": writer.VCGWriter(self.config).write(diadefs) else: writer.DotWriter(self.config).write(diadefs) return 0 if __name__ == '__main__': Run(sys.argv[1:])
bsd-3-clause
hobarrera/django
django/core/checks/security/base.py
45
6645
from django.conf import settings from .. import Tags, Warning, register from ..utils import patch_middleware_message SECRET_KEY_MIN_LENGTH = 50 SECRET_KEY_MIN_UNIQUE_CHARACTERS = 5 W001 = Warning( "You do not have 'django.middleware.security.SecurityMiddleware' " "in your MIDDLEWARE so the SECURE_HSTS_SECONDS, " "SECURE_CONTENT_TYPE_NOSNIFF, " "SECURE_BROWSER_XSS_FILTER, and SECURE_SSL_REDIRECT settings " "will have no effect.", id='security.W001', ) W002 = Warning( "You do not have " "'django.middleware.clickjacking.XFrameOptionsMiddleware' in your " "MIDDLEWARE, so your pages will not be served with an " "'x-frame-options' header. Unless there is a good reason for your " "site to be served in a frame, you should consider enabling this " "header to help prevent clickjacking attacks.", id='security.W002', ) W004 = Warning( "You have not set a value for the SECURE_HSTS_SECONDS setting. " "If your entire site is served only over SSL, you may want to consider " "setting a value and enabling HTTP Strict Transport Security. " "Be sure to read the documentation first; enabling HSTS carelessly " "can cause serious, irreversible problems.", id='security.W004', ) W005 = Warning( "You have not set the SECURE_HSTS_INCLUDE_SUBDOMAINS setting to True. " "Without this, your site is potentially vulnerable to attack " "via an insecure connection to a subdomain. Only set this to True if " "you are certain that all subdomains of your domain should be served " "exclusively via SSL.", id='security.W005', ) W006 = Warning( "Your SECURE_CONTENT_TYPE_NOSNIFF setting is not set to True, " "so your pages will not be served with an " "'x-content-type-options: nosniff' header. " "You should consider enabling this header to prevent the " "browser from identifying content types incorrectly.", id='security.W006', ) W007 = Warning( "Your SECURE_BROWSER_XSS_FILTER setting is not set to True, " "so your pages will not be served with an " "'x-xss-protection: 1; mode=block' header. " "You should consider enabling this header to activate the " "browser's XSS filtering and help prevent XSS attacks.", id='security.W007', ) W008 = Warning( "Your SECURE_SSL_REDIRECT setting is not set to True. " "Unless your site should be available over both SSL and non-SSL " "connections, you may want to either set this setting True " "or configure a load balancer or reverse-proxy server " "to redirect all connections to HTTPS.", id='security.W008', ) W009 = Warning( "Your SECRET_KEY has less than %(min_length)s characters or less than " "%(min_unique_chars)s unique characters. Please generate a long and random " "SECRET_KEY, otherwise many of Django's security-critical features will be " "vulnerable to attack." % { 'min_length': SECRET_KEY_MIN_LENGTH, 'min_unique_chars': SECRET_KEY_MIN_UNIQUE_CHARACTERS, }, id='security.W009', ) W018 = Warning( "You should not have DEBUG set to True in deployment.", id='security.W018', ) W019 = Warning( "You have " "'django.middleware.clickjacking.XFrameOptionsMiddleware' in your " "MIDDLEWARE, but X_FRAME_OPTIONS is not set to 'DENY'. " "The default is 'SAMEORIGIN', but unless there is a good reason for " "your site to serve other parts of itself in a frame, you should " "change it to 'DENY'.", id='security.W019', ) W020 = Warning( "ALLOWED_HOSTS must not be empty in deployment.", id='security.W020', ) def _security_middleware(): return ("django.middleware.security.SecurityMiddleware" in settings.MIDDLEWARE_CLASSES or settings.MIDDLEWARE and "django.middleware.security.SecurityMiddleware" in settings.MIDDLEWARE) def _xframe_middleware(): return ("django.middleware.clickjacking.XFrameOptionsMiddleware" in settings.MIDDLEWARE_CLASSES or settings.MIDDLEWARE and "django.middleware.clickjacking.XFrameOptionsMiddleware" in settings.MIDDLEWARE) @register(Tags.security, deploy=True) def check_security_middleware(app_configs, **kwargs): passed_check = _security_middleware() return [] if passed_check else [patch_middleware_message(W001)] @register(Tags.security, deploy=True) def check_xframe_options_middleware(app_configs, **kwargs): passed_check = _xframe_middleware() return [] if passed_check else [patch_middleware_message(W002)] @register(Tags.security, deploy=True) def check_sts(app_configs, **kwargs): passed_check = not _security_middleware() or settings.SECURE_HSTS_SECONDS return [] if passed_check else [W004] @register(Tags.security, deploy=True) def check_sts_include_subdomains(app_configs, **kwargs): passed_check = ( not _security_middleware() or not settings.SECURE_HSTS_SECONDS or settings.SECURE_HSTS_INCLUDE_SUBDOMAINS is True ) return [] if passed_check else [W005] @register(Tags.security, deploy=True) def check_content_type_nosniff(app_configs, **kwargs): passed_check = ( not _security_middleware() or settings.SECURE_CONTENT_TYPE_NOSNIFF is True ) return [] if passed_check else [W006] @register(Tags.security, deploy=True) def check_xss_filter(app_configs, **kwargs): passed_check = ( not _security_middleware() or settings.SECURE_BROWSER_XSS_FILTER is True ) return [] if passed_check else [W007] @register(Tags.security, deploy=True) def check_ssl_redirect(app_configs, **kwargs): passed_check = ( not _security_middleware() or settings.SECURE_SSL_REDIRECT is True ) return [] if passed_check else [W008] @register(Tags.security, deploy=True) def check_secret_key(app_configs, **kwargs): passed_check = ( getattr(settings, 'SECRET_KEY', None) and len(set(settings.SECRET_KEY)) >= SECRET_KEY_MIN_UNIQUE_CHARACTERS and len(settings.SECRET_KEY) >= SECRET_KEY_MIN_LENGTH ) return [] if passed_check else [W009] @register(Tags.security, deploy=True) def check_debug(app_configs, **kwargs): passed_check = not settings.DEBUG return [] if passed_check else [W018] @register(Tags.security, deploy=True) def check_xframe_deny(app_configs, **kwargs): passed_check = ( not _xframe_middleware() or settings.X_FRAME_OPTIONS == 'DENY' ) return [] if passed_check else [patch_middleware_message(W019)] @register(Tags.security, deploy=True) def check_allowed_hosts(app_configs, **kwargs): return [] if settings.ALLOWED_HOSTS else [W020]
bsd-3-clause
mmardini/django
django/contrib/messages/storage/base.py
113
6286
from __future__ import unicode_literals from django.conf import settings from django.utils.encoding import force_text, python_2_unicode_compatible from django.contrib.messages import constants, utils LEVEL_TAGS = utils.get_level_tags() @python_2_unicode_compatible class Message(object): """ Represents an actual message that can be stored in any of the supported storage classes (typically session- or cookie-based) and rendered in a view or template. """ def __init__(self, level, message, extra_tags=None): self.level = int(level) self.message = message self.extra_tags = extra_tags def _prepare(self): """ Prepares the message for serialization by forcing the ``message`` and ``extra_tags`` to unicode in case they are lazy translations. Known "safe" types (None, int, etc.) are not converted (see Django's ``force_text`` implementation for details). """ self.message = force_text(self.message, strings_only=True) self.extra_tags = force_text(self.extra_tags, strings_only=True) def __eq__(self, other): return isinstance(other, Message) and self.level == other.level and \ self.message == other.message def __str__(self): return force_text(self.message) def _get_tags(self): extra_tags = force_text(self.extra_tags, strings_only=True) if extra_tags and self.level_tag: return ' '.join([extra_tags, self.level_tag]) elif extra_tags: return extra_tags elif self.level_tag: return self.level_tag return '' tags = property(_get_tags) @property def level_tag(self): return force_text(LEVEL_TAGS.get(self.level, ''), strings_only=True) class BaseStorage(object): """ This is the base backend for temporary message storage. This is not a complete class; to be a usable storage backend, it must be subclassed and the two methods ``_get`` and ``_store`` overridden. """ def __init__(self, request, *args, **kwargs): self.request = request self._queued_messages = [] self.used = False self.added_new = False super(BaseStorage, self).__init__(*args, **kwargs) def __len__(self): return len(self._loaded_messages) + len(self._queued_messages) def __iter__(self): self.used = True if self._queued_messages: self._loaded_messages.extend(self._queued_messages) self._queued_messages = [] return iter(self._loaded_messages) def __contains__(self, item): return item in self._loaded_messages or item in self._queued_messages @property def _loaded_messages(self): """ Returns a list of loaded messages, retrieving them first if they have not been loaded yet. """ if not hasattr(self, '_loaded_data'): messages, all_retrieved = self._get() self._loaded_data = messages or [] return self._loaded_data def _get(self, *args, **kwargs): """ Retrieves a list of stored messages. Returns a tuple of the messages and a flag indicating whether or not all the messages originally intended to be stored in this storage were, in fact, stored and retrieved; e.g., ``(messages, all_retrieved)``. **This method must be implemented by a subclass.** If it is possible to tell if the backend was not used (as opposed to just containing no messages) then ``None`` should be returned in place of ``messages``. """ raise NotImplementedError('subclasses of BaseStorage must provide a _get() method') def _store(self, messages, response, *args, **kwargs): """ Stores a list of messages, returning a list of any messages which could not be stored. One type of object must be able to be stored, ``Message``. **This method must be implemented by a subclass.** """ raise NotImplementedError('subclasses of BaseStorage must provide a _store() method') def _prepare_messages(self, messages): """ Prepares a list of messages for storage. """ for message in messages: message._prepare() def update(self, response): """ Stores all unread messages. If the backend has yet to be iterated, previously stored messages will be stored again. Otherwise, only messages added after the last iteration will be stored. """ self._prepare_messages(self._queued_messages) if self.used: return self._store(self._queued_messages, response) elif self.added_new: messages = self._loaded_messages + self._queued_messages return self._store(messages, response) def add(self, level, message, extra_tags=''): """ Queues a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``). """ if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Add the message. self.added_new = True message = Message(level, message, extra_tags=extra_tags) self._queued_messages.append(message) def _get_level(self): """ Returns the minimum recorded level. The default level is the ``MESSAGE_LEVEL`` setting. If this is not found, the ``INFO`` level is used. """ if not hasattr(self, '_level'): self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO) return self._level def _set_level(self, value=None): """ Sets a custom minimum recorded level. If set to ``None``, the default level will be used (see the ``_get_level`` method). """ if value is None and hasattr(self, '_level'): del self._level else: self._level = int(value) level = property(_get_level, _set_level, _set_level)
bsd-3-clause
andrecunha/idd3
idd3/rules/universal/misc_rulesets.py
1
10997
# -*- coding: utf-8 -*- # IDD3 - Propositional Idea Density from Dependency Trees # Copyright (C) 2014-2015 Andre Luiz Verucci da Cunha # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import print_function, unicode_literals, division from idd3 import Relation, Ruleset, config from idd3.rules.universal.np_rulesets import NounPhraseRuleset from idd3.rules.universal.vp_rulesets import VerbPhraseRuleset from idd3.rules.universal.adjp_rulesets import AdjectivalPhraseRuleset import logging logger = logging.getLogger(__name__) class TopRuleset(Ruleset): """A dummy ruleset that starts the analysis process.""" rel = 'TOP' def extract(self, relations, index, context, engine, info={}): return engine.analyze(relations, relations[index].deps[0], [index]) class ConjRuleset(NounPhraseRuleset, VerbPhraseRuleset): """A ruleset that processes the 'conj' relation.""" rel = 'conj' def extract(self, relations, index, context, engine, info={}): """extract(relations, index, context, engine, info) -> list(str) OUTDATED This ruleset returns a list of strings, corresponding to the return_list value of NounPhraseRuleset. Examples: * Mary and John conj(Mary, John) -> return ['John'] """ if info['class'] == 'NP': logger.debug('ConjRuleset is processing node as NP') # TODO: Maybe just return the first element in the list. d = NounPhraseRuleset.extract(self, relations, index, context, engine) if d['ids_for_preconj'] == []: return d['return_list'] elif info['class'] == 'VP': logger.debug('ConjRuleset is processing node as VP') d = VerbPhraseRuleset.extract(self, relations, index, context, engine, info) return d class CompmodJoinRuleset(Ruleset): """A ruleset that processes the 'compmod-join' relation.""" rel = 'compmod-join' def extract(self, relations, index, context, engine, info={}): """extract(relations, index, context, engine, info) -> str | list(str) An nn can be a single word or multiple words connected by cc/conj. Examples: * Oil prices nn(prices, Oil) -> return "Oil" * East and West Germany nn(Germany, East) cc(East, and) conj(East, West) -> return ["East", "West"] """ conj_indices = Relation.get_children_with_dep('conj', relations, index) if conj_indices != []: # Consume the conjunction. cc_indices = Relation.get_children_with_dep('cc', relations, index) for i in cc_indices: engine.analyze(relations, cc_indices[0], context + [index]) conjs = [engine.analyze(relations, i, context + [index], info={'class': 'NP'}) for i in conj_indices] conjs = [c[0] for c in conjs] # TODO: check if this makes sense. return [relations[index].word] + conjs else: return relations[index].word class DetRuleset(Ruleset): """A ruleset that processes the 'det' relation.""" rel = 'det' def extract(self, relations, index, context, engine, info={}): """extract(relations, index, context, engine, info) -> str | None A determiner may or may not emit a new proposition. Determiners like the, a, an, this, and these get joined to the noun they precede; others, like some and any, generate their own proposition. Examples: * The apple det(apple, The) -> return "The" * Some apples det(apple, some) -> emit((apple, some)) -> return None """ if relations[index].word.lower() in config['NON_EMITTED_DETS']: return relations[index].word else: # TODO: maybe get the subject from info. engine.emit((relations[context[-1]].word, relations[index].word), 'M') return None class AdpmodRuleset(Ruleset): """A ruleset that processes the 'adpmod' relation.""" rel = 'adpmod' def extract(self, relations, index, context, engine, info={}): """extract(relations, index, context, engine, info) -> None Prepositional phrases always generate new propositions, according to Chand et al.'s manual. Examples: * to the city pobj(to, city) det(city, the) -> emit((to the city,)) * to both East and West Germany pobj(to, Germany) preconj(Germany, both) nn(Germany, East) cc(East, and) conj(East, West) -> emit((to East Germany, )) # Proposition x -> emit((to West Germany, )) # Proposition y -> emit((both, x, y)) * TODO: insert example with PCOMP. """ # adpobj pobj_index = Relation.get_children_with_dep('adpobj', relations, index) if pobj_index != []: pobjs = engine.analyze(relations, pobj_index[0], context + [index]) emitted_prop_ids = [] for pobj in pobjs['return_list']: prop_id = engine.emit((relations[index].word + ' ' + pobj,), 'M') emitted_prop_ids.append(prop_id) if pobjs['ids_for_preconj'] != []: indices = [j for i, j in enumerate(emitted_prop_ids) if i in pobjs['ids_for_preconj']] proposition = tuple([pobjs['preconj']] + indices) engine.emit(proposition, 'C') # adpcomp pcomp_index = Relation.get_children_with_dep('adpcomp', relations, index) if pcomp_index != []: pcomp = engine.analyze(relations, pcomp_index[0], context + [index])['return_value'] if pcomp is not None: engine.emit((relations[index].word + ' ' + pcomp,), 'M') # TODO: check the 'else' condition. class NumRuleset(Ruleset): """A ruleset that processes the 'num' relation.""" rel = 'num' def extract(self, relations, index, context, engine, info={}): """extract(relations, index, context, engine, info) -> str Nummerical modifiers are treated in the same way as adjectives. This ruleset assembles and returns the number, and it's up to the calling NounPhraseRuleset to emit the propositions. This ruleset also emits propositions for quantifier phrase modifiers. Examples: * About 200 people num(people, 200) quantmod(200, About) -> emit((200, about)) # by calling QuantmodRuleset -> return "200" """ number_indices = Relation.get_children_with_dep('num', relations, index) cc_indices = Relation.get_children_with_dep('cc', relations, index) conj_indices = Relation.get_children_with_dep('conj', relations, index) indices = sorted([index] + number_indices + cc_indices + conj_indices) words = [] for n in indices: if n != index: word = engine.analyze(relations, n, context + [index], info={'class': 'NP'}) else: word = relations[index].word if isinstance(word, str): words.append(word) elif isinstance(word, list): words += word this_number = ' '.join(words) # Process advmods advmod_indices = Relation.get_children_with_dep('advmod', relations, index) for q in advmod_indices: engine.analyze(relations, q, context + [index], {'num': this_number}) return this_number class WhatRuleset(NounPhraseRuleset, AdjectivalPhraseRuleset): """A ruleset that processes the 'what' relation.""" rel = 'what' def extract(self, relations, index, context, engine, info={}): # if relations[index].tag in ('NN', 'NNS', 'NNP', 'NNPS'): if relations[index].ctag == 'NOUN': this = NounPhraseRuleset.extract(self, relations, index, context, engine, info) for noun in this['return_list']: engine.emit((noun,), 'WHAT') # elif relations[index].tag == 'JJ': elif relations[index].ctag == 'ADJ': this = AdjectivalPhraseRuleset.extract(self, relations, index, context, engine, info) for adj in this: engine.emit((adj,), 'WHAT') else: # In case something weird happens, we just emit the word. engine.emit((relations[index].word,), 'WHAT') class CompmodRuleset(Ruleset): """A ruleset that processes the 'compmod' relation.""" rel = 'compmod' def extract(self, relations, index, context, engine, info={}): cc_indices = Relation.get_children_with_dep('cc', relations, index) if cc_indices != []: engine.analyze(relations, cc_indices[0], context + [index]) conj_indices = Relation.get_children_with_dep('conj', relations, index) conjs = [engine.analyze(relations, i, context + [index], info={'class': 'NP'}) for i in conj_indices] conjs = [c[0] for c in conjs] # TODO: check if this makes sense. return [relations[index].word] + conjs else: return [relations[index].word]
gpl-3.0
Gamebasis/3DGamebasisServer
GameData/blender-2.71-windows64/2.71/scripts/modules/bl_i18n_utils/merge_po.py
1
6180
#!/usr/bin/env python3 # ***** BEGIN GPL LICENSE BLOCK ***** # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ***** END GPL LICENSE BLOCK ***** # <pep8 compliant> # Merge one or more .po files into the first dest one. # If a msgkey is present in more than one merged po, the one in the first file wins, unless # it’s marked as fuzzy and one later is not. # The fuzzy flag is removed if necessary. # All other comments are never modified. # However, commented messages in dst will always remain commented, and commented messages are # never merged from sources. import sys try: import settings import utils except: from . import (settings, utils) # XXX This is a quick hack to make it work with new I18n... objects! To be reworked! def main(): import argparse parser = argparse.ArgumentParser(description="" "Merge one or more .po files into the first dest one.\n" "If a msgkey (msgctxt, msgid) is present in more than one merged po, the one in the first file " "wins, unless it’s marked as fuzzy and one later is not.\n" "The fuzzy flag is removed if necessary.\n" "All other comments are never modified.\n" "Commented messages in dst will always remain commented, and commented messages are never merged " "from sources.") parser.add_argument('-s', '--stats', action="store_true", help="Show statistics info.") parser.add_argument('-r', '--replace', action="store_true", help="Replace existing messages of same \"level\" already in dest po.") parser.add_argument('dst', metavar='dst.po', help="The dest po into which merge the others.") parser.add_argument('src', metavar='src.po', nargs='+', help="The po's to merge into the dst.po one.") args = parser.parse_args() ret = 0 done_msgkeys = set() done_fuzzy_msgkeys = set() nbr_merged = 0 nbr_replaced = 0 nbr_added = 0 nbr_unfuzzied = 0 dst_msgs = utils.I18nMessages(kind='PO', src=args.dst) if dst_msgs.parsing_errors: print("Dest po is BROKEN, aborting.") return 1 if args.stats: print("Dest po, before merging:") dst_msgs.print_stats(prefix="\t") # If we don’t want to replace existing valid translations, pre-populate done_msgkeys and done_fuzzy_msgkeys. if not args.replace: done_msgkeys = dst_msgs.trans_msgs.copy() done_fuzzy_msgkeys = dst_msgs.fuzzy_msgs.copy() for po in args.src: msgs = utils.I18nMessages(kind='PO', src=po) if msgs.parsing_errors: print("\tSrc po {} is BROKEN, skipping.".format(po)) ret = 1 continue print("\tMerging {}...".format(po)) if args.stats: print("\t\tMerged po stats:") msgs.print_stats(prefix="\t\t\t") for msgkey, msg in msgs.msgs.items(): msgctxt, msgid = msgkey # This msgkey has already been completely merged, or is a commented one, # or the new message is commented, skip it. if msgkey in (done_msgkeys | dst_msgs.comm_msgs | msgs.comm_msgs): continue is_ttip = msg.is_tooltip # New messages does not yet exists in dest. if msgkey not in dst_msgs.msgs: dst_msgs[msgkey] = msgs.msgs[msgkey] if msgkey in msgs.fuzzy_msgs: done_fuzzy_msgkeys.add(msgkey) dst_msgs.fuzzy_msgs.add(msgkey) elif msgkey in msgs.trans_msgs: done_msgkeys.add(msgkey) dst_msgs.trans_msgs.add(msgkey) nbr_added += 1 # From now on, the new messages is already in dst. # New message is neither translated nor fuzzy, skip it. elif msgkey not in (msgs.trans_msgs | msgs.fuzzy_msgs): continue # From now on, the new message is either translated or fuzzy! # The new message is translated. elif msgkey in msgs.trans_msgs: dst_msgs.msgs[msgkey].msgstr = msg.msgstr done_msgkeys.add(msgkey) done_fuzzy_msgkeys.discard(msgkey) if msgkey in dst_msgs.fuzzy_msgs: dst_msgs.fuzzy_msgs.remove(msgkey) nbr_unfuzzied += 1 if msgkey not in dst_msgs.trans_msgs: dst_msgs.trans_msgs.add(msgkey) else: nbr_replaced += 1 nbr_merged += 1 # The new message is fuzzy, org one is fuzzy too, and this msgkey has not yet been merged. elif msgkey not in (dst_msgs.trans_msgs | done_fuzzy_msgkeys): dst_msgs[msgkey].msgstr = msg.msgstr done_fuzzy_msgkeys.add(msgkey) dst_msgs.fuzzy_msgs.add(msgkey) nbr_merged += 1 nbr_replaced += 1 dst_msgs.write(kind='PO', dest=args.dst) print("Merged completed. {} messages were merged (among which {} were replaced), {} were added, " "{} were \"un-fuzzied\".".format(nbr_merged, nbr_replaced, nbr_added, nbr_unfuzzied)) if args.stats: dst_msgs.update_info() print("Final merged po stats:") dst_msgs.print_stats(prefix="\t") return ret if __name__ == "__main__": print("\n\n *** Running {} *** \n".format(__file__)) sys.exit(main())
gpl-3.0
saurabh6790/-aimobilize-lib
core/page/permission_manager/permission_manager.py
32
3461
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import webnotes import webnotes.defaults @webnotes.whitelist() def get_roles_and_doctypes(): webnotes.only_for(("System Manager", "Administrator")) return { "doctypes": [d[0] for d in webnotes.conn.sql("""select name from `tabDocType` dt where ifnull(istable,0)=0 and name not in ('DocType', 'Control Panel') and exists(select * from `tabDocField` where parent=dt.name)""")], "roles": [d[0] for d in webnotes.conn.sql("""select name from tabRole where name not in ('Guest', 'Administrator')""")] } @webnotes.whitelist() def get_permissions(doctype=None, role=None): webnotes.only_for(("System Manager", "Administrator")) return webnotes.conn.sql("""select * from tabDocPerm where %s%s order by parent, permlevel, role""" % (\ doctype and (" parent='%s'" % doctype) or "", role and ((doctype and " and " or "") + " role='%s'" % role) or "", ), as_dict=True) @webnotes.whitelist() def remove(doctype, name): webnotes.only_for(("System Manager", "Administrator")) match = webnotes.conn.get_value("DocPerm", name, "`match`") webnotes.conn.sql("""delete from tabDocPerm where name=%s""", name) validate_and_reset(doctype, for_remove=True) if match: webnotes.defaults.clear_cache() @webnotes.whitelist() def add(parent, role, permlevel): webnotes.only_for(("System Manager", "Administrator")) webnotes.doc(fielddata={ "doctype":"DocPerm", "__islocal": 1, "parent": parent, "parenttype": "DocType", "parentfield": "permissions", "role": role, "permlevel": permlevel, "read": 1 }).save() validate_and_reset(parent) @webnotes.whitelist() def update(name, doctype, ptype, value=0): webnotes.only_for(("System Manager", "Administrator")) webnotes.conn.sql("""update tabDocPerm set `%s`=%s where name=%s"""\ % (ptype, '%s', '%s'), (value, name)) validate_and_reset(doctype) if ptype == "read" and webnotes.conn.get_value("DocPerm", name, "`match`"): webnotes.defaults.clear_cache() @webnotes.whitelist() def update_match(name, doctype, match=""): webnotes.only_for(("System Manager", "Administrator")) webnotes.conn.sql("""update tabDocPerm set `match`=%s where name=%s""", (match, name)) validate_and_reset(doctype) webnotes.defaults.clear_cache() def validate_and_reset(doctype, for_remove=False): from core.doctype.doctype.doctype import validate_permissions_for_doctype validate_permissions_for_doctype(doctype, for_remove) clear_doctype_cache(doctype) @webnotes.whitelist() def reset(doctype): webnotes.only_for(("System Manager", "Administrator")) webnotes.reset_perms(doctype) clear_doctype_cache(doctype) webnotes.defaults.clear_cache() def clear_doctype_cache(doctype): webnotes.clear_cache(doctype=doctype) for user in webnotes.conn.sql_list("""select distinct tabUserRole.parent from tabUserRole, tabDocPerm where tabDocPerm.parent = %s and tabDocPerm.role = tabUserRole.role""", doctype): webnotes.clear_cache(user=user) @webnotes.whitelist() def get_users_with_role(role): webnotes.only_for(("System Manager", "Administrator")) return [p[0] for p in webnotes.conn.sql("""select distinct tabProfile.name from tabUserRole, tabProfile where tabUserRole.role=%s and tabProfile.name != "Administrator" and tabUserRole.parent = tabProfile.name and ifnull(tabProfile.enabled,0)=1""", role)]
mit
apache/airflow
tests/providers/odbc/hooks/test_odbc.py
2
7642
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import json from unittest import mock from urllib.parse import quote_plus, urlparse import pyodbc from airflow.models import Connection from airflow.providers.odbc.hooks.odbc import OdbcHook class TestOdbcHook: def get_hook(self=None, hook_params=None, conn_params=None): hook_params = hook_params or {} conn_params = conn_params or {} connection = Connection( **{ **dict(login='login', password='password', host='host', schema='schema', port=1234), **conn_params, } ) hook = OdbcHook(**hook_params) hook.get_connection = mock.Mock() hook.get_connection.return_value = connection return hook def test_driver_in_extra(self): conn_params = dict(extra=json.dumps(dict(Driver='Fake Driver', Fake_Param='Fake Param'))) hook = self.get_hook(conn_params=conn_params) expected = ( 'DRIVER={Fake Driver};' 'SERVER=host;' 'DATABASE=schema;' 'UID=login;' 'PWD=password;' 'PORT=1234;' 'Fake_Param=Fake Param;' ) assert hook.odbc_connection_string == expected def test_driver_in_both(self): conn_params = dict(extra=json.dumps(dict(Driver='Fake Driver', Fake_Param='Fake Param'))) hook_params = dict(driver='ParamDriver') hook = self.get_hook(hook_params=hook_params, conn_params=conn_params) expected = ( 'DRIVER={ParamDriver};' 'SERVER=host;' 'DATABASE=schema;' 'UID=login;' 'PWD=password;' 'PORT=1234;' 'Fake_Param=Fake Param;' ) assert hook.odbc_connection_string == expected def test_dsn_in_extra(self): conn_params = dict(extra=json.dumps(dict(DSN='MyDSN', Fake_Param='Fake Param'))) hook = self.get_hook(conn_params=conn_params) expected = ( 'DSN=MyDSN;SERVER=host;DATABASE=schema;UID=login;PWD=password;PORT=1234;Fake_Param=Fake Param;' ) assert hook.odbc_connection_string == expected def test_dsn_in_both(self): conn_params = dict(extra=json.dumps(dict(DSN='MyDSN', Fake_Param='Fake Param'))) hook_params = dict(driver='ParamDriver', dsn='ParamDSN') hook = self.get_hook(hook_params=hook_params, conn_params=conn_params) expected = ( 'DRIVER={ParamDriver};' 'DSN=ParamDSN;' 'SERVER=host;' 'DATABASE=schema;' 'UID=login;' 'PWD=password;' 'PORT=1234;' 'Fake_Param=Fake Param;' ) assert hook.odbc_connection_string == expected def test_get_uri(self): conn_params = dict(extra=json.dumps(dict(DSN='MyDSN', Fake_Param='Fake Param'))) hook_params = dict(dsn='ParamDSN') hook = self.get_hook(hook_params=hook_params, conn_params=conn_params) uri_param = quote_plus( 'DSN=ParamDSN;SERVER=host;DATABASE=schema;UID=login;PWD=password;PORT=1234;Fake_Param=Fake Param;' ) expected = 'mssql+pyodbc:///?odbc_connect=' + uri_param assert hook.get_uri() == expected def test_connect_kwargs_from_hook(self): hook = self.get_hook( hook_params=dict( connect_kwargs={ 'attrs_before': { 1: 2, pyodbc.SQL_TXN_ISOLATION: pyodbc.SQL_TXN_READ_UNCOMMITTED, }, 'readonly': True, 'autocommit': False, } ), ) assert hook.connect_kwargs == { 'attrs_before': {1: 2, pyodbc.SQL_TXN_ISOLATION: pyodbc.SQL_TXN_READ_UNCOMMITTED}, 'readonly': True, 'autocommit': False, } def test_connect_kwargs_from_conn(self): extra = json.dumps( dict( connect_kwargs={ 'attrs_before': { 1: 2, pyodbc.SQL_TXN_ISOLATION: pyodbc.SQL_TXN_READ_UNCOMMITTED, }, 'readonly': True, 'autocommit': True, } ) ) hook = self.get_hook(conn_params=dict(extra=extra)) assert hook.connect_kwargs == { 'attrs_before': {1: 2, pyodbc.SQL_TXN_ISOLATION: pyodbc.SQL_TXN_READ_UNCOMMITTED}, 'readonly': True, 'autocommit': True, } def test_connect_kwargs_from_conn_and_hook(self): """ When connect_kwargs in both hook and conn, should be merged properly. Hook beats conn. """ conn_extra = json.dumps(dict(connect_kwargs={'attrs_before': {1: 2, 3: 4}, 'readonly': False})) hook_params = dict( connect_kwargs={'attrs_before': {3: 5, pyodbc.SQL_TXN_ISOLATION: 0}, 'readonly': True} ) hook = self.get_hook(conn_params=dict(extra=conn_extra), hook_params=hook_params) assert hook.connect_kwargs == { 'attrs_before': {1: 2, 3: 5, pyodbc.SQL_TXN_ISOLATION: 0}, 'readonly': True, } def test_connect_kwargs_bool_from_uri(self): """ Bools will be parsed from uri as strings """ conn_extra = json.dumps(dict(connect_kwargs={'ansi': True})) hook = self.get_hook(conn_params=dict(extra=conn_extra)) assert hook.connect_kwargs == { 'ansi': True, } def test_driver(self): hook = self.get_hook(hook_params=dict(driver='Blah driver')) assert hook.driver == 'Blah driver' hook = self.get_hook(hook_params=dict(driver='{Blah driver}')) assert hook.driver == 'Blah driver' hook = self.get_hook(conn_params=dict(extra='{"driver": "Blah driver"}')) assert hook.driver == 'Blah driver' hook = self.get_hook(conn_params=dict(extra='{"driver": "{Blah driver}"}')) assert hook.driver == 'Blah driver' def test_database(self): hook = self.get_hook(hook_params=dict(database='abc')) assert hook.database == 'abc' hook = self.get_hook() assert hook.database == 'schema' def test_sqlalchemy_scheme_default(self): hook = self.get_hook() uri = hook.get_uri() assert urlparse(uri).scheme == 'mssql+pyodbc' def test_sqlalchemy_scheme_param(self): hook = self.get_hook(hook_params=dict(sqlalchemy_scheme='my-scheme')) uri = hook.get_uri() assert urlparse(uri).scheme == 'my-scheme' def test_sqlalchemy_scheme_extra(self): hook = self.get_hook(conn_params=dict(extra=json.dumps(dict(sqlalchemy_scheme='my-scheme')))) uri = hook.get_uri() assert urlparse(uri).scheme == 'my-scheme'
apache-2.0
psaavedra/matrix-bot
matrixbot/plugins/wkbotsfeeder.py
1
9174
import json import logging import os import pytz import requests import sys import urllib.request, urllib.parse, urllib.error import time if os.path.dirname(__file__) == "matrixbot/plugins": sys.path.append(os.path.abspath(".")) from matrixbot import utils pp, puts, set_property = utils.pp, utils.puts, utils.set_property class WKBotsFeederPlugin: def __init__(self, bot, settings): self.name = "WKBotsFeederPlugin" self.logger = utils.get_logger() self.bot = bot self.load(settings) def load(self, settings): self.settings = settings self.logger.info("WKBotsFeederPlugin loaded (%(name)s)" % settings) for builder_name, builder in list(self.settings["builders"].items()): if 'builder_name' not in builder: builder['builder_name'] = builder_name builder['last_buildjob'] = -1 set_property(self.settings, builder, "last_buildjob_url_schema") set_property(self.settings, builder, "builds_url_schema") set_property(self.settings, builder, "only_failures", default=True) set_property(self.settings, builder, "notify_recoveries", default=True) self.logger.info("WKBotsFeederPlugin loaded (%(name)s) builder: " % settings + json.dumps(builder, indent = 4)) self.lasttime = time.time() self.period = self.settings.get('period', 60) def pretty_entry(self, builder): url = self.last_build_url(builder) res = "%(builder_name)s " % builder res += "(<a href='%s'>" % url res += "%(last_buildjob)s </a>): " % builder if builder['recovery']: res += pp("recovery", color="green", strong="") elif builder['failed']: res += pp("failed", color="red", strong="") else: res += pp("success", color="green", strong="") return res def last_build_url(self, builder): builderid = int(builder['builderid']) build_number = int(builder['last_buildjob']) return builder['last_buildjob_url_schema'] % (builderid, build_number) def send(self, message): for room_id in self.settings["rooms"]: room_id = self.bot.get_real_room_id(room_id) self.bot.send_html(room_id, message, msgtype="m.notice") def should_send_message(self, builder, failed): if "mute" in builder: return False return failed or (not builder['only_failures']) or (builder['notify_recoveries'] and builder['recovery']) def failed(self, build, value = "build successful"): return not self.succeeded(build, value) def succeeded(self, build, value = "build successful"): return 'state_string' in build and build['state_string'] == value def get_step(self, builder, build, stepname): builderid = int(builder['builderid']) buildNumber = int(build['number']) url = "https://build.webkit.org/api/v2/builders/%d/builds/%d/steps?name=%s" % (builderid, buildNumber, stepname) ret = requests.get(url).json() return ret['steps'][0] def get_last_build(self, builder): url = builder['builds_url_schema'] % builder['builderid'] ret = requests.get(url).json() return ret['builds'][0] def dispatch(self, handler=None): self.logger.debug("WKBotsFeederPlugin dispatch") now = time.time() if now < self.lasttime + self.period: return # Feeder is only updated each 'period' time self.lasttime = now res = [] for builder_name, builder in list(self.settings["builders"].items()): self.logger.debug("WKBotsFeederPlugin dispatch: Fetching %s ..." % builder_name) try: build = self.get_last_build(builder) if builder['last_buildjob'] >= build['number']: continue if 'target_step' in builder: target_step = builder['target_step'] step = self.get_step(builder, build, target_step['name']) failed = self.failed(step, target_step['text']) else: failed = self.failed(build) builder.update({ 'failed': failed, 'last_buildjob': int(build['number']), 'recovery': 'failed' in builder and builder['failed'] and not failed }) if self.should_send_message(builder, failed): self.logger.debug("WKBotsFeederPlugin: Should send message") message = self.pretty_entry(builder) self.send(message) except Exception as e: self.logger.error("WKBotsFeederPlugin got error in builder %s: %s" % (builder_name,e)) def command(self, sender, room_id, body, handler = None): self.logger.debug("WKBotsFeederPlugin command: %s" % body) if len(body) == 0: return command = body.split() if command[0] != self.settings["name"]: return if len(command) > 1 and command[1].lower() == "mute": self.command_mute(sender, room_id, command[2:], handler) else: self.bot.send_html(room_id, "<p>Unknown command: %s</p>" % body) def command_mute(self, sender, room_id, command, handler): # Command: mute <builder-name> [ON|OFF]. if len(command) == 0: self.help(sender, room_id, handler) return builders = self.settings["builders"] builderName = command[0] if builderName not in builders.keys(): self.help(sender, room_id, handler) return value = (command[1] if command[1:] else "on").lower() if value not in ["on", "off"]: self.help(sender, room_id, handler) return if value == "on": builders[builderName]["mute"] = True else: builders[builderName].pop("mute") self.bot.send_html(room_id, "<p>Builder '%s' was set to mute %s</p>" % (builderName, value)) def help(self, sender, room_id, handler): if not handler: return self.logger.debug("WKBotsFeederPlugin help") message = "mute <builder-name> [ON|OFF]" handler(room_id, message) def selftest(): print("selftest: " + os.path.basename(__file__)) def webkitBuilderSettings(): return { "name": "WKBotsFeederPlugin", "last_buildjob_url_schema": "https://build.webkit.org/#/builders/%d/builds/%d", "builds_url_schema": "https://build.webkit.org/api/v2/builders/%d/builds?complete=true&order=-number&limit=1", "only_failures": False, "rooms": ["0"], "builders": { "GTK-Linux-64-bit-Release-Ubuntu-LTS-Build": { "builderid": 68, }, }, } def jsCoreBuilderSettings(): ret = webkitBuilderSettings() ret["builders"] = { "JSCOnly-Linux-MIPS32el-Release": { "builderid": 31, "target_step": { "name": "compile-webkit", "text": "compiled" } } } return ret plugin = WKBotsFeederPlugin(utils.MockBot(), webkitBuilderSettings()) test_dispatch(plugin) test_can_fetch_last_build(plugin) plugin.load(jsCoreBuilderSettings()) test_dispatch(plugin) plugin.load(webkitBuilderSettings()) test_mute_command(plugin) def test_dispatch(plugin): print("test_dispatch: ") logging.basicConfig(level = logging.DEBUG) plugin.lasttime = 0 plugin.period = 0 plugin.dispatch() print("") print("Ok") def test_can_fetch_last_build(plugin): puts("test_can_fetch_last_build: ") builder = plugin.settings['builders']["GTK-Linux-64-bit-Release-Ubuntu-LTS-Build"] build = plugin.get_last_build(builder) assert(build) print("Ok") def test_mute_command(plugin): puts("test_mute_command: ") logging.basicConfig(level = logging.DEBUG) builder = plugin.settings['builders']["GTK-Linux-64-bit-Release-Ubuntu-LTS-Build"] sender = "user" room_id = plugin.settings["rooms"][0] # mute <builder-name>. body = "WKBotsFeederPlugin mute GTK-Linux-64-bit-Release-Ubuntu-LTS-Build" plugin.command(sender, room_id, body) assert(plugin.settings['builders']["GTK-Linux-64-bit-Release-Ubuntu-LTS-Build"]["mute"]) print("") # mute <builder-name> off. body = "WKBotsFeederPlugin mute GTK-Linux-64-bit-Release-Ubuntu-LTS-Build off" plugin.command(sender, room_id, body) assert("mute" not in plugin.settings['builders']["GTK-Linux-64-bit-Release-Ubuntu-LTS-Build"]) print("") # mute <builder-name>. body = "WKBotsFeederPlugin mute GTK-Linux-64-bit-Release-Ubuntu-LTS-Build on" plugin.command(sender, room_id, body) assert(plugin.settings['builders']["GTK-Linux-64-bit-Release-Ubuntu-LTS-Build"]["mute"]) print("") print("Ok") if __name__ == '__main__': selftest()
mit
PaulcyMboungou/django-messaging
django_messaging/models.py
1
2827
from django.core.exceptions import ObjectDoesNotExist from django.db import models from django.contrib.auth.models import User class DmUser(models.Model): user=models.ForeignKey(User,unique=True,related_name='dm_user') last_activity=models.DateTimeField(auto_now_add=True) contacts=models.ManyToManyField(User,related_name='dm_contact') def __unicode__(self): return 'profile> '+self.user.username def print_contacts(self): for contact in self.contacts.all(): print str(contact.id)+' - '+contact.username return def get_messages(self): return self.dmmessage_set.all() def get_message(self,message_id): try: return self.get_messages().filter(id=message_id,to_user=self)[0] except ObjectDoesNotExist: #~ this guy is trying to read other user's pm return None def get_first_unreaded_message(self): try: return self.get_messages().order_by('-date').filter(to_user=self,readed=False)[0] except ObjectDoesNotExist: return None def print_messages(self): msgs=self.get_messages() readed_msgs=0 unreaded_msgs=0 for msg in msgs: readed='' if msg.readed: readed=' - readed' readed_msgs=readed_msgs+1 else: unreaded_msgs=unreaded_msgs+1 print 'id: '+str(msg.id)+' - '+str(msg)+readed if list(msgs)==[]: print 'No messages' else: print str(readed_msgs)+' readed messages' print str(unreaded_msgs)+' unreaded messages' return def delete_message(self,message_id): try: msg=self.get_messages().filter(id=message_id) except ObjectDoesNotExists: return False print 'Deleting message '+str(msg) msg.delete() return True def delete_all_messages(self): msgs=self.get_messages() i=0 for msg in msgs: self.delete_message(msg.id) i=i+1 print str(i)+' messages deleted' return def send_message(self,to_user,message): msg=DmMessage(from_user=self,to_user=to_user,message=message) msg.save() return True def has_message(self): return self.count_unreaded_messages()>0 def count_messages(self): return self.get_messages().count() def count_unreaded_messages(self): readed=0 for message in self.get_messages(): if message.readed==False: readed=readed+1 return readed class Admin: pass class DmMessage(models.Model): to_user=models.ForeignKey(DmUser) from_user=models.ForeignKey(DmUser,related_name='from_user') date=models.DateTimeField(auto_now_add=True) message=models.CharField(max_length=255) readed=models.BooleanField(default=False) def __unicode__(self): return self.from_user.user.username+' ('+str(self.from_user.id)+') -> '+self.to_user.user.username+'('+str(self.to_user.id)+')' class Admin: pass
bsd-3-clause
sodexis/odoo
openerp/addons/base/ir/ir_model.py
4
61629
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (C) 2004-2014 OpenERP S.A. (<http://openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from collections import defaultdict import logging import re import time import types import openerp from openerp import SUPERUSER_ID from openerp import models, tools, api from openerp.modules.registry import RegistryManager from openerp.osv import fields, osv from openerp.osv.orm import BaseModel, Model, MAGIC_COLUMNS, except_orm from openerp.tools import config from openerp.tools.safe_eval import safe_eval as eval from openerp.tools.translate import _ _logger = logging.getLogger(__name__) MODULE_UNINSTALL_FLAG = '_force_unlink' def _get_fields_type(self, cr, uid, context=None): # Avoid too many nested `if`s below, as RedHat's Python 2.6 # break on it. See bug 939653. return sorted([(k,k) for k,v in fields.__dict__.iteritems() if type(v) == types.TypeType and \ issubclass(v, fields._column) and \ v != fields._column and \ not v._deprecated and \ not issubclass(v, fields.function)]) def _in_modules(self, cr, uid, ids, field_name, arg, context=None): #pseudo-method used by fields.function in ir.model/ir.model.fields module_pool = self.pool["ir.module.module"] installed_module_ids = module_pool.search(cr, uid, [('state','=','installed')]) installed_module_names = module_pool.read(cr, uid, installed_module_ids, ['name'], context=context) installed_modules = set(x['name'] for x in installed_module_names) result = {} xml_ids = osv.osv._get_xml_ids(self, cr, uid, ids) for k,v in xml_ids.iteritems(): result[k] = ', '.join(sorted(installed_modules & set(xml_id.split('.')[0] for xml_id in v))) return result class unknown(models.AbstractModel): """ Abstract model used as a substitute for relational fields with an unknown comodel. """ _name = '_unknown' class ir_model(osv.osv): _name = 'ir.model' _description = "Models" _order = 'model' def _is_osv_memory(self, cr, uid, ids, field_name, arg, context=None): models = self.browse(cr, uid, ids, context=context) res = dict.fromkeys(ids) for model in models: if model.model in self.pool: res[model.id] = self.pool[model.model].is_transient() else: _logger.error('Missing model %s' % (model.model, )) return res def _search_osv_memory(self, cr, uid, model, name, domain, context=None): if not domain: return [] __, operator, value = domain[0] if operator not in ['=', '!=']: raise osv.except_osv(_("Invalid Search Criteria"), _('The osv_memory field can only be compared with = and != operator.')) value = bool(value) if operator == '=' else not bool(value) all_model_ids = self.search(cr, uid, [], context=context) is_osv_mem = self._is_osv_memory(cr, uid, all_model_ids, 'osv_memory', arg=None, context=context) return [('id', 'in', [id for id in is_osv_mem if bool(is_osv_mem[id]) == value])] def _view_ids(self, cr, uid, ids, field_name, arg, context=None): models = self.browse(cr, uid, ids) res = {} for model in models: res[model.id] = self.pool["ir.ui.view"].search(cr, uid, [('model', '=', model.model)]) return res def _inherited_models(self, cr, uid, ids, field_name, arg, context=None): res = {} for model in self.browse(cr, uid, ids, context=context): res[model.id] = [] inherited_models = [model_name for model_name in self.pool[model.model]._inherits] if inherited_models: res[model.id] = self.search(cr, uid, [('model', 'in', inherited_models)], context=context) return res _columns = { 'name': fields.char('Model Description', translate=True, required=True), 'model': fields.char('Model', required=True, select=1), 'info': fields.text('Information'), 'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True, copy=True), 'inherited_model_ids': fields.function(_inherited_models, type="many2many", obj="ir.model", string="Inherited models", help="The list of models that extends the current model."), 'state': fields.selection([('manual','Custom Object'),('base','Base Object')],'Type', readonly=True), 'access_ids': fields.one2many('ir.model.access', 'model_id', 'Access'), 'osv_memory': fields.function(_is_osv_memory, string='Transient Model', type='boolean', fnct_search=_search_osv_memory, help="This field specifies whether the model is transient or not (i.e. if records are automatically deleted from the database or not)"), 'modules': fields.function(_in_modules, type='char', string='In Modules', help='List of modules in which the object is defined or inherited'), 'view_ids': fields.function(_view_ids, type='one2many', obj='ir.ui.view', string='Views'), } _defaults = { 'model': 'x_', 'state': 'manual', } def _check_model_name(self, cr, uid, ids, context=None): for model in self.browse(cr, uid, ids, context=context): if model.state=='manual': if not model.model.startswith('x_'): return False if not re.match('^[a-z_A-Z0-9.]+$',model.model): return False return True def _model_name_msg(self, cr, uid, ids, context=None): return _('The Object name must start with x_ and not contain any special character !') _constraints = [ (_check_model_name, _model_name_msg, ['model']), ] _sql_constraints = [ ('obj_name_uniq', 'unique (model)', 'Each model must be unique!'), ] # overridden to allow searching both on model name (model field) # and model description (name field) def _name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None): if args is None: args = [] domain = args + ['|', ('model', operator, name), ('name', operator, name)] return self.name_get(cr, name_get_uid or uid, super(ir_model, self).search(cr, uid, domain, limit=limit, context=context), context=context) def _drop_table(self, cr, uid, ids, context=None): for model in self.browse(cr, uid, ids, context): model_pool = self.pool[model.model] cr.execute('select relkind from pg_class where relname=%s', (model_pool._table,)) result = cr.fetchone() if result and result[0] == 'v': cr.execute('DROP view %s' % (model_pool._table,)) elif result and result[0] == 'r': cr.execute('DROP TABLE %s CASCADE' % (model_pool._table,)) return True def unlink(self, cr, user, ids, context=None): # Prevent manual deletion of module tables if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] if not context.get(MODULE_UNINSTALL_FLAG): for model in self.browse(cr, user, ids, context): if model.state != 'manual': raise except_orm(_('Error'), _("Model '%s' contains module data and cannot be removed!") % (model.name,)) self._drop_table(cr, user, ids, context) res = super(ir_model, self).unlink(cr, user, ids, context) if not context.get(MODULE_UNINSTALL_FLAG): # only reload pool for normal unlink. For module uninstall the # reload is done independently in openerp.modules.loading cr.commit() # must be committed before reloading registry in new cursor api.Environment.reset() RegistryManager.new(cr.dbname) RegistryManager.signal_registry_change(cr.dbname) return res def write(self, cr, user, ids, vals, context=None): if context: context = dict(context) context.pop('__last_update', None) # Filter out operations 4 link from field id, because openerp-web # always write (4,id,False) even for non dirty items if 'field_id' in vals: vals['field_id'] = [op for op in vals['field_id'] if op[0] != 4] return super(ir_model,self).write(cr, user, ids, vals, context) def create(self, cr, user, vals, context=None): if context is None: context = {} res = super(ir_model,self).create(cr, user, vals, context) if vals.get('state','manual')=='manual': # add model in registry self.instanciate(cr, user, vals['model'], context) self.pool.setup_models(cr, partial=(not self.pool.ready)) # update database schema model = self.pool[vals['model']] ctx = dict(context, field_name=vals['name'], field_state='manual', select=vals.get('select_level', '0'), update_custom_fields=True) model._auto_init(cr, ctx) model._auto_end(cr, ctx) # actually create FKs! RegistryManager.signal_registry_change(cr.dbname) return res def instanciate(self, cr, user, model, context=None): if isinstance(model, unicode): model = model.encode('utf-8') class CustomModel(models.Model): _name = model _module = False _custom = True CustomModel._build_model(self.pool, cr) class ir_model_fields(osv.osv): _name = 'ir.model.fields' _description = "Fields" _rec_name = 'field_description' _columns = { 'name': fields.char('Name', required=True, select=1), 'complete_name': fields.char('Complete Name', select=1), 'model': fields.char('Object Name', required=True, select=1, help="The technical name of the model this field belongs to"), 'relation': fields.char('Object Relation', help="For relationship fields, the technical name of the target model"), 'relation_field': fields.char('Relation Field', help="For one2many fields, the field on the target model that implement the opposite many2one relationship"), 'model_id': fields.many2one('ir.model', 'Model', required=True, select=True, ondelete='cascade', help="The model this field belongs to"), 'field_description': fields.char('Field Label', required=True), 'ttype': fields.selection(_get_fields_type, 'Field Type', required=True), 'selection': fields.char('Selection Options', help="List of options for a selection field, " "specified as a Python expression defining a list of (key, label) pairs. " "For example: [('blue','Blue'),('yellow','Yellow')]"), 'required': fields.boolean('Required'), 'readonly': fields.boolean('Readonly'), 'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search (deprecated)')],'Searchable', required=True), 'translate': fields.boolean('Translatable', help="Whether values for this field can be translated (enables the translation mechanism for that field)"), 'size': fields.integer('Size'), 'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Type', required=True, readonly=True, select=1), 'on_delete': fields.selection([('cascade', 'Cascade'), ('set null', 'Set NULL'), ('restrict', 'Restrict')], 'On Delete', help='On delete property for many2one fields'), 'domain': fields.char('Domain', help="The optional domain to restrict possible values for relationship fields, " "specified as a Python expression defining a list of triplets. " "For example: [('color','=','red')]"), 'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'), 'selectable': fields.boolean('Selectable'), 'modules': fields.function(_in_modules, type='char', string='In Modules', help='List of modules in which the field is defined'), 'serialization_field_id': fields.many2one('ir.model.fields', 'Serialization Field', domain = "[('ttype','=','serialized')]", ondelete='cascade', help="If set, this field will be stored in the sparse " "structure of the serialization field, instead " "of having its own database column. This cannot be " "changed after creation."), } _rec_name='field_description' _defaults = { 'selection': "", 'domain': "[]", 'name': 'x_', 'state': 'manual', 'on_delete': 'set null', 'select_level': '0', 'field_description': '', 'selectable': 1, } _order = "name" def _check_selection(self, cr, uid, selection, context=None): try: selection_list = eval(selection) except Exception: _logger.warning('Invalid selection list definition for fields.selection', exc_info=True) raise except_orm(_('Error'), _("The Selection Options expression is not a valid Pythonic expression." "Please provide an expression in the [('key','Label'), ...] format.")) check = True if not (isinstance(selection_list, list) and selection_list): check = False else: for item in selection_list: if not (isinstance(item, (tuple,list)) and len(item) == 2): check = False break if not check: raise except_orm(_('Error'), _("The Selection Options expression is must be in the [('key','Label'), ...] format!")) return True def _size_gt_zero_msg(self, cr, user, ids, context=None): return _('Size of the field can never be less than 0 !') _sql_constraints = [ ('size_gt_zero', 'CHECK (size>=0)',_size_gt_zero_msg ), ] def _drop_column(self, cr, uid, ids, context=None): for field in self.browse(cr, uid, ids, context): if field.name in MAGIC_COLUMNS: continue model = self.pool[field.model] cr.execute('select relkind from pg_class where relname=%s', (model._table,)) result = cr.fetchone() cr.execute("SELECT column_name FROM information_schema.columns WHERE table_name ='%s' and column_name='%s'" %(model._table, field.name)) column_name = cr.fetchone() if column_name and (result and result[0] == 'r'): cr.execute('ALTER table "%s" DROP column "%s" cascade' % (model._table, field.name)) # remove m2m relation table for custom fields # we consider the m2m relation is only one way as it's not possible # to specify the relation table in the interface for custom fields # TODO master: maybe use ir.model.relations for custom fields if field.state == 'manual' and field.ttype == 'many2many': rel_name = model._fields[field.name].relation cr.execute('DROP table "%s"' % (rel_name)) model._pop_field(field.name) return True def unlink(self, cr, user, ids, context=None): # Prevent manual deletion of module columns if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] if not context.get(MODULE_UNINSTALL_FLAG) and \ any(field.state != 'manual' for field in self.browse(cr, user, ids, context)): raise except_orm(_('Error'), _("This column contains module data and cannot be removed!")) self._drop_column(cr, user, ids, context) res = super(ir_model_fields, self).unlink(cr, user, ids, context) if not context.get(MODULE_UNINSTALL_FLAG): # The field we just deleted might have be inherited, and registry is # inconsistent in this case; therefore we reload the registry. cr.commit() api.Environment.reset() RegistryManager.new(cr.dbname) RegistryManager.signal_registry_change(cr.dbname) return res def create(self, cr, user, vals, context=None): if 'model_id' in vals: model_data = self.pool['ir.model'].browse(cr, user, vals['model_id']) vals['model'] = model_data.model if context is None: context = {} if vals.get('ttype', False) == 'selection': if not vals.get('selection',False): raise except_orm(_('Error'), _('For selection fields, the Selection Options must be given!')) self._check_selection(cr, user, vals['selection'], context=context) res = super(ir_model_fields,self).create(cr, user, vals, context) if vals.get('state','manual') == 'manual': if not vals['name'].startswith('x_'): raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !")) if vals.get('relation',False) and not self.pool['ir.model'].search(cr, user, [('model','=',vals['relation'])]): raise except_orm(_('Error'), _("Model %s does not exist!") % vals['relation']) self.pool.clear_manual_fields() if vals['model'] in self.pool: model = self.pool[vals['model']] if vals['model'].startswith('x_') and vals['name'] == 'x_name': model._rec_name = 'x_name' # re-initialize model in registry model.__init__(self.pool, cr) self.pool.setup_models(cr, partial=(not self.pool.ready)) # update database schema model = self.pool[vals['model']] ctx = dict(context, field_name=vals['name'], field_state='manual', select=vals.get('select_level', '0'), update_custom_fields=True) model._auto_init(cr, ctx) model._auto_end(cr, ctx) # actually create FKs! RegistryManager.signal_registry_change(cr.dbname) return res def write(self, cr, user, ids, vals, context=None): if context is None: context = {} #For the moment renaming a sparse field or changing the storing system is not allowed. This may be done later if 'serialization_field_id' in vals or 'name' in vals: for field in self.browse(cr, user, ids, context=context): if 'serialization_field_id' in vals and field.serialization_field_id.id != vals['serialization_field_id']: raise except_orm(_('Error!'), _('Changing the storing system for field "%s" is not allowed.')%field.name) if field.serialization_field_id and (field.name != vals['name']): raise except_orm(_('Error!'), _('Renaming sparse field "%s" is not allowed')%field.name) # if set, *one* column can be renamed here column_rename = None # names of the models to patch patched_models = set() if vals and ids: checked_selection = False # need only check it once, so defer for item in self.browse(cr, user, ids, context=context): obj = self.pool.get(item.model) field = getattr(obj, '_fields', {}).get(item.name) if item.state != 'manual': raise except_orm(_('Error!'), _('Properties of base fields cannot be altered in this manner! ' 'Please modify them through Python code, ' 'preferably through a custom addon!')) if item.ttype == 'selection' and 'selection' in vals \ and not checked_selection: self._check_selection(cr, user, vals['selection'], context=context) checked_selection = True final_name = item.name if 'name' in vals and vals['name'] != item.name: # We need to rename the column if column_rename: raise except_orm(_('Error!'), _('Can only rename one column at a time!')) if vals['name'] in obj._columns: raise except_orm(_('Error!'), _('Cannot rename column to %s, because that column already exists!') % vals['name']) if vals.get('state', 'manual') == 'manual' and not vals['name'].startswith('x_'): raise except_orm(_('Error!'), _('New column name must still start with x_ , because it is a custom field!')) if '\'' in vals['name'] or '"' in vals['name'] or ';' in vals['name']: raise ValueError('Invalid character in column name') column_rename = (obj, (obj._table, item.name, vals['name'])) final_name = vals['name'] if 'model_id' in vals and vals['model_id'] != item.model_id.id: raise except_orm(_("Error!"), _("Changing the model of a field is forbidden!")) if 'ttype' in vals and vals['ttype'] != item.ttype: raise except_orm(_("Error!"), _("Changing the type of a column is not yet supported. " "Please drop it and create it again!")) # We don't check the 'state', because it might come from the context # (thus be set for multiple fields) and will be ignored anyway. if obj is not None and field is not None: patched_models.add(obj._name) # These shall never be written (modified) for column_name in ('model_id', 'model', 'state'): if column_name in vals: del vals[column_name] res = super(ir_model_fields,self).write(cr, user, ids, vals, context=context) self.pool.clear_manual_fields() if column_rename: obj, rename = column_rename cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % rename) if column_rename or patched_models: # setup models, this will reload all manual fields in registry self.pool.setup_models(cr, partial=(not self.pool.ready)) if patched_models: # We have to update _columns of the model(s) and then call their # _auto_init to sync the db with the model. Hopefully, since write() # was called earlier, they will be in-sync before the _auto_init. # Anything we don't update in _columns now will be reset from # the model into ir.model.fields (db). ctx = dict(context, select=vals.get('select_level', '0'), update_custom_fields=True, ) for model_name in patched_models: obj = self.pool[model_name] obj._auto_init(cr, ctx) obj._auto_end(cr, ctx) # actually create FKs! if column_rename or patched_models: RegistryManager.signal_registry_change(cr.dbname) return res class ir_model_constraint(Model): """ This model tracks PostgreSQL foreign keys and constraints used by OpenERP models. """ _name = 'ir.model.constraint' _columns = { 'name': fields.char('Constraint', required=True, select=1, help="PostgreSQL constraint or foreign key name."), 'model': fields.many2one('ir.model', string='Model', required=True, select=1), 'module': fields.many2one('ir.module.module', string='Module', required=True, select=1), 'type': fields.char('Constraint Type', required=True, size=1, select=1, help="Type of the constraint: `f` for a foreign key, " "`u` for other constraints."), 'date_update': fields.datetime('Update Date'), 'date_init': fields.datetime('Initialization Date') } _sql_constraints = [ ('module_name_uniq', 'unique(name, module)', 'Constraints with the same name are unique per module.'), ] def _module_data_uninstall(self, cr, uid, ids, context=None): """ Delete PostgreSQL foreign keys and constraints tracked by this model. """ if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"): raise except_orm(_('Permission Denied'), (_('Administrator access is required to uninstall a module'))) context = dict(context or {}) ids_set = set(ids) ids.sort() ids.reverse() for data in self.browse(cr, uid, ids, context): model = data.model.model model_obj = self.pool[model] name = openerp.tools.ustr(data.name) typ = data.type # double-check we are really going to delete all the owners of this schema element cr.execute("""SELECT id from ir_model_constraint where name=%s""", (data.name,)) external_ids = [x[0] for x in cr.fetchall()] if set(external_ids)-ids_set: # as installed modules have defined this element we must not delete it! continue if typ == 'f': # test if FK exists on this table (it could be on a related m2m table, in which case we ignore it) cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid) WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""", ('f', name, model_obj._table)) if cr.fetchone(): cr.execute('ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (model_obj._table, name),) _logger.info('Dropped FK CONSTRAINT %s@%s', name, model) if typ == 'u': # test if constraint exists cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid) WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""", ('u', name, model_obj._table)) if cr.fetchone(): cr.execute('ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (model_obj._table, name),) _logger.info('Dropped CONSTRAINT %s@%s', name, model) self.unlink(cr, uid, ids, context) class ir_model_relation(Model): """ This model tracks PostgreSQL tables used to implement OpenERP many2many relations. """ _name = 'ir.model.relation' _columns = { 'name': fields.char('Relation Name', required=True, select=1, help="PostgreSQL table name implementing a many2many relation."), 'model': fields.many2one('ir.model', string='Model', required=True, select=1), 'module': fields.many2one('ir.module.module', string='Module', required=True, select=1), 'date_update': fields.datetime('Update Date'), 'date_init': fields.datetime('Initialization Date') } def _module_data_uninstall(self, cr, uid, ids, context=None): """ Delete PostgreSQL many2many relations tracked by this model. """ if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"): raise except_orm(_('Permission Denied'), (_('Administrator access is required to uninstall a module'))) ids_set = set(ids) to_drop_table = [] ids.sort() ids.reverse() for data in self.browse(cr, uid, ids, context): model = data.model name = openerp.tools.ustr(data.name) # double-check we are really going to delete all the owners of this schema element cr.execute("""SELECT id from ir_model_relation where name = %s""", (data.name,)) external_ids = [x[0] for x in cr.fetchall()] if set(external_ids)-ids_set: # as installed modules have defined this element we must not delete it! continue cr.execute("SELECT 1 FROM information_schema.tables WHERE table_name=%s", (name,)) if cr.fetchone() and not name in to_drop_table: to_drop_table.append(name) self.unlink(cr, uid, ids, context) # drop m2m relation tables for table in to_drop_table: cr.execute('DROP TABLE %s CASCADE'% table,) _logger.info('Dropped table %s', table) cr.commit() class ir_model_access(osv.osv): _name = 'ir.model.access' _columns = { 'name': fields.char('Name', required=True, select=True), 'active': fields.boolean('Active', help='If you uncheck the active field, it will disable the ACL without deleting it (if you delete a native ACL, it will be re-created when you reload the module.'), 'model_id': fields.many2one('ir.model', 'Object', required=True, domain=[('osv_memory','=', False)], select=True, ondelete='cascade'), 'group_id': fields.many2one('res.groups', 'Group', ondelete='cascade', select=True), 'perm_read': fields.boolean('Read Access'), 'perm_write': fields.boolean('Write Access'), 'perm_create': fields.boolean('Create Access'), 'perm_unlink': fields.boolean('Delete Access'), } _defaults = { 'active': True, } def check_groups(self, cr, uid, group): grouparr = group.split('.') if not grouparr: return False cr.execute("select 1 from res_groups_users_rel where uid=%s and gid IN (select res_id from ir_model_data where module=%s and name=%s)", (uid, grouparr[0], grouparr[1],)) return bool(cr.fetchone()) def check_group(self, cr, uid, model, mode, group_ids): """ Check if a specific group has the access mode to the specified model""" assert mode in ['read','write','create','unlink'], 'Invalid access mode' if isinstance(model, BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.name else: model_name = model if isinstance(group_ids, (int, long)): group_ids = [group_ids] for group_id in group_ids: cr.execute("SELECT perm_" + mode + " " " FROM ir_model_access a " " JOIN ir_model m ON (m.id = a.model_id) " " WHERE m.model = %s AND a.active IS True " " AND a.group_id = %s", (model_name, group_id) ) r = cr.fetchone() if r is None: cr.execute("SELECT perm_" + mode + " " " FROM ir_model_access a " " JOIN ir_model m ON (m.id = a.model_id) " " WHERE m.model = %s AND a.active IS True " " AND a.group_id IS NULL", (model_name, ) ) r = cr.fetchone() access = bool(r and r[0]) if access: return True # pass no groups -> no access return False def group_names_with_access(self, cr, model_name, access_mode): """Returns the names of visible groups which have been granted ``access_mode`` on the model ``model_name``. :rtype: list """ assert access_mode in ['read','write','create','unlink'], 'Invalid access mode: %s' % access_mode cr.execute('''SELECT c.name, g.name FROM ir_model_access a JOIN ir_model m ON (a.model_id=m.id) JOIN res_groups g ON (a.group_id=g.id) LEFT JOIN ir_module_category c ON (c.id=g.category_id) WHERE m.model=%s AND a.active IS True AND a.perm_''' + access_mode, (model_name,)) return [('%s/%s' % x) if x[0] else x[1] for x in cr.fetchall()] # The context parameter is useful when the method translates error messages. # But as the method raises an exception in that case, the key 'lang' might # not be really necessary as a cache key, unless the `ormcache_context` # decorator catches the exception (it does not at the moment.) @tools.ormcache_context(accepted_keys=('lang',)) def check(self, cr, uid, model, mode='read', raise_exception=True, context=None): if uid==1: # User root have all accesses # TODO: exclude xml-rpc requests return True assert mode in ['read','write','create','unlink'], 'Invalid access mode' if isinstance(model, BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # TransientModel records have no access rights, only an implicit access rule if model_name not in self.pool: _logger.error('Missing model %s' % (model_name, )) elif self.pool[model_name].is_transient(): return True # We check if a specific rule exists cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) ' ' FROM ir_model_access a ' ' JOIN ir_model m ON (m.id = a.model_id) ' ' JOIN res_groups_users_rel gu ON (gu.gid = a.group_id) ' ' WHERE m.model = %s ' ' AND gu.uid = %s ' ' AND a.active IS True ' , (model_name, uid,) ) r = cr.fetchone()[0] if r is None: # there is no specific rule. We check the generic rule cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) ' ' FROM ir_model_access a ' ' JOIN ir_model m ON (m.id = a.model_id) ' ' WHERE a.group_id IS NULL ' ' AND m.model = %s ' ' AND a.active IS True ' , (model_name,) ) r = cr.fetchone()[0] if not r and raise_exception: groups = '\n\t'.join('- %s' % g for g in self.group_names_with_access(cr, model_name, mode)) msg_heads = { # Messages are declared in extenso so they are properly exported in translation terms 'read': _("Sorry, you are not allowed to access this document."), 'write': _("Sorry, you are not allowed to modify this document."), 'create': _("Sorry, you are not allowed to create this kind of document."), 'unlink': _("Sorry, you are not allowed to delete this document."), } if groups: msg_tail = _("Only users with the following access level are currently allowed to do that") + ":\n%s\n\n(" + _("Document model") + ": %s)" msg_params = (groups, model_name) else: msg_tail = _("Please contact your system administrator if you think this is an error.") + "\n\n(" + _("Document model") + ": %s)" msg_params = (model_name,) _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, uid, model_name) msg = '%s %s' % (msg_heads[mode], msg_tail) raise openerp.exceptions.AccessError(msg % msg_params) return bool(r) __cache_clearing_methods = [] def register_cache_clearing_method(self, model, method): self.__cache_clearing_methods.append((model, method)) def unregister_cache_clearing_method(self, model, method): try: i = self.__cache_clearing_methods.index((model, method)) del self.__cache_clearing_methods[i] except ValueError: pass def call_cache_clearing_methods(self, cr): self.invalidate_cache(cr, SUPERUSER_ID) self.check.clear_cache(self) # clear the cache of check function for model, method in self.__cache_clearing_methods: if model in self.pool: getattr(self.pool[model], method)() # # Check rights on actions # def write(self, cr, uid, ids, values, context=None): self.call_cache_clearing_methods(cr) res = super(ir_model_access, self).write(cr, uid, ids, values, context=context) return res def create(self, cr, uid, values, context=None): self.call_cache_clearing_methods(cr) res = super(ir_model_access, self).create(cr, uid, values, context=context) return res def unlink(self, cr, uid, ids, context=None): self.call_cache_clearing_methods(cr) res = super(ir_model_access, self).unlink(cr, uid, ids, context=context) return res class ir_model_data(osv.osv): """Holds external identifier keys for records in the database. This has two main uses: * allows easy data integration with third-party systems, making import/export/sync of data possible, as records can be uniquely identified across multiple systems * allows tracking the origin of data installed by OpenERP modules themselves, thus making it possible to later update them seamlessly. """ _name = 'ir.model.data' _order = 'module,model,name' def name_get(self, cr, uid, ids, context=None): bymodel = defaultdict(dict) names = {} for res in self.browse(cr, uid, ids, context=context): bymodel[res.model][res.res_id] = res names[res.id] = res.complete_name #result[res.model][res.res_id] = res.id for model, id_map in bymodel.iteritems(): try: ng = dict(self.pool[model].name_get(cr, uid, id_map.keys(), context=context)) except Exception: pass else: for r in id_map.itervalues(): names[r.id] = ng.get(r.res_id, r.complete_name) return [(i, names[i]) for i in ids] def _complete_name_get(self, cr, uid, ids, prop, unknow_none, context=None): result = {} for res in self.browse(cr, uid, ids, context=context): result[res.id] = (res.module and (res.module + '.') or '')+res.name return result _columns = { 'name': fields.char('External Identifier', required=True, select=1, help="External Key/Identifier that can be used for " "data integration with third-party systems"), 'complete_name': fields.function(_complete_name_get, type='char', string='Complete ID'), 'model': fields.char('Model Name', required=True, select=1), 'module': fields.char('Module', required=True, select=1), 'res_id': fields.integer('Record ID', select=1, help="ID of the target record in the database"), 'noupdate': fields.boolean('Non Updatable'), 'date_update': fields.datetime('Update Date'), 'date_init': fields.datetime('Init Date') } _defaults = { 'date_init': fields.datetime.now, 'date_update': fields.datetime.now, 'noupdate': False, 'module': '' } _sql_constraints = [ ('module_name_uniq', 'unique(name, module)', 'You cannot have multiple records with the same external ID in the same module!'), ] def __init__(self, pool, cr): osv.osv.__init__(self, pool, cr) # also stored in pool to avoid being discarded along with this osv instance if getattr(pool, 'model_data_reference_ids', None) is None: self.pool.model_data_reference_ids = {} # put loads on the class, in order to share it among all instances type(self).loads = self.pool.model_data_reference_ids def _auto_init(self, cr, context=None): super(ir_model_data, self)._auto_init(cr, context) cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_model_data_module_name_index\'') if not cr.fetchone(): cr.execute('CREATE INDEX ir_model_data_module_name_index ON ir_model_data (module, name)') # NEW V8 API @tools.ormcache(skiparg=3) def xmlid_lookup(self, cr, uid, xmlid): """Low level xmlid lookup Return (id, res_model, res_id) or raise ValueError if not found """ module, name = xmlid.split('.', 1) ids = self.search(cr, uid, [('module','=',module), ('name','=', name)]) if not ids: raise ValueError('External ID not found in the system: %s' % (xmlid)) # the sql constraints ensure us we have only one result res = self.read(cr, uid, ids[0], ['model', 'res_id']) if not res['res_id']: raise ValueError('External ID not found in the system: %s' % (xmlid)) return ids[0], res['model'], res['res_id'] def xmlid_to_res_model_res_id(self, cr, uid, xmlid, raise_if_not_found=False): """ Return (res_model, res_id)""" try: return self.xmlid_lookup(cr, uid, xmlid)[1:3] except ValueError: if raise_if_not_found: raise return (False, False) def xmlid_to_res_id(self, cr, uid, xmlid, raise_if_not_found=False): """ Returns res_id """ return self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found)[1] def xmlid_to_object(self, cr, uid, xmlid, raise_if_not_found=False, context=None): """ Return a browse_record if not found and raise_if_not_found is True return None """ t = self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found) res_model, res_id = t if res_model and res_id: record = self.pool[res_model].browse(cr, uid, res_id, context=context) if record.exists(): return record if raise_if_not_found: raise ValueError('No record found for unique ID %s. It may have been deleted.' % (xmlid)) return None # OLD API def _get_id(self, cr, uid, module, xml_id): """Returns the id of the ir.model.data record corresponding to a given module and xml_id (cached) or raise a ValueError if not found""" return self.xmlid_lookup(cr, uid, "%s.%s" % (module, xml_id))[0] def get_object_reference(self, cr, uid, module, xml_id): """Returns (model, res_id) corresponding to a given module and xml_id (cached) or raise ValueError if not found""" return self.xmlid_lookup(cr, uid, "%s.%s" % (module, xml_id))[1:3] def check_object_reference(self, cr, uid, module, xml_id, raise_on_access_error=False): """Returns (model, res_id) corresponding to a given module and xml_id (cached), if and only if the user has the necessary access rights to see that object, otherwise raise a ValueError if raise_on_access_error is True or returns a tuple (model found, False)""" model, res_id = self.get_object_reference(cr, uid, module, xml_id) #search on id found in result to check if current user has read access right check_right = self.pool.get(model).search(cr, uid, [('id', '=', res_id)]) if check_right: return model, res_id if raise_on_access_error: raise ValueError('Not enough access rights on the external ID: %s.%s' % (module, xml_id)) return model, False def get_object(self, cr, uid, module, xml_id, context=None): """ Returns a browsable record for the given module name and xml_id. If not found, raise a ValueError or return None, depending on the value of `raise_exception`. """ return self.xmlid_to_object(cr, uid, "%s.%s" % (module, xml_id), raise_if_not_found=True, context=context) def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True): if not xml_id: return False id = False try: # One step to check the ID is defined and the record actually exists record = self.get_object(cr, uid, module, xml_id) if record: id = record.id self.loads[(module,xml_id)] = (model,id) for table, inherit_field in self.pool[model]._inherits.iteritems(): parent_id = record[inherit_field].id parent_xid = '%s_%s' % (xml_id, table.replace('.', '_')) self.loads[(module, parent_xid)] = (table, parent_id) except Exception: pass return id def clear_caches(self): """ Clears all orm caches on the object's methods :returns: itself """ self.xmlid_lookup.clear_cache(self) return self def unlink(self, cr, uid, ids, context=None): """ Regular unlink method, but make sure to clear the caches. """ self.clear_caches() return super(ir_model_data,self).unlink(cr, uid, ids, context=context) def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False, context=None): model_obj = self.pool[model] if not context: context = {} # records created during module install should not display the messages of OpenChatter context = dict(context, install_mode=True) if xml_id and ('.' in xml_id): assert len(xml_id.split('.'))==2, _("'%s' contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id") % xml_id module, xml_id = xml_id.split('.') action_id = False if xml_id: cr.execute('''SELECT imd.id, imd.res_id, md.id, imd.model, imd.noupdate FROM ir_model_data imd LEFT JOIN %s md ON (imd.res_id = md.id) WHERE imd.module=%%s AND imd.name=%%s''' % model_obj._table, (module, xml_id)) results = cr.fetchall() for imd_id2,res_id2,real_id2,real_model,noupdate_imd in results: # In update mode, do not update a record if it's ir.model.data is flagged as noupdate if mode == 'update' and noupdate_imd: return res_id2 if not real_id2: self.clear_caches() cr.execute('delete from ir_model_data where id=%s', (imd_id2,)) res_id = False else: assert model == real_model, "External ID conflict, %s already refers to a `%s` record,"\ " you can't define a `%s` record with this ID." % (xml_id, real_model, model) res_id,action_id = res_id2,imd_id2 if action_id and res_id: model_obj.write(cr, uid, [res_id], values, context=context) self.write(cr, SUPERUSER_ID, [action_id], { 'date_update': time.strftime('%Y-%m-%d %H:%M:%S'), },context=context) elif res_id: model_obj.write(cr, uid, [res_id], values, context=context) if xml_id: if model_obj._inherits: for table in model_obj._inherits: inherit_id = model_obj.browse(cr, uid, res_id,context=context)[model_obj._inherits[table]] self.create(cr, SUPERUSER_ID, { 'name': xml_id + '_' + table.replace('.', '_'), 'model': table, 'module': module, 'res_id': inherit_id.id, 'noupdate': noupdate, },context=context) self.create(cr, SUPERUSER_ID, { 'name': xml_id, 'model': model, 'module':module, 'res_id':res_id, 'noupdate': noupdate, },context=context) else: if mode=='init' or (mode=='update' and xml_id): inherit_xml_ids = [] for table, field_name in model_obj._inherits.items(): xml_ids = self.pool['ir.model.data'].search(cr, uid, [ ('module', '=', module), ('name', '=', xml_id + '_' + table.replace('.', '_')), ], context=context) # XML ID found in the database, try to recover an existing record if xml_ids: found_xml_id = self.pool['ir.model.data'].browse(cr, uid, xml_ids[0], context=context) record = self.pool[found_xml_id.model].browse(cr, uid, [found_xml_id.res_id], context=context)[0] # The record exists, store the id and don't recreate the XML ID if record.exists(): inherit_xml_ids.append(found_xml_id.model) values[field_name] = found_xml_id.res_id # Orphan XML ID, delete it else: found_xml_id.unlink() res_id = model_obj.create(cr, uid, values, context=context) if xml_id: if model_obj._inherits: for table in model_obj._inherits: if table in inherit_xml_ids: continue inherit_id = model_obj.browse(cr, uid, res_id,context=context)[model_obj._inherits[table]] self.create(cr, SUPERUSER_ID, { 'name': xml_id + '_' + table.replace('.', '_'), 'model': table, 'module': module, 'res_id': inherit_id.id, 'noupdate': noupdate, },context=context) self.create(cr, SUPERUSER_ID, { 'name': xml_id, 'model': model, 'module': module, 'res_id': res_id, 'noupdate': noupdate },context=context) if xml_id and res_id: self.loads[(module, xml_id)] = (model, res_id) for table, inherit_field in model_obj._inherits.iteritems(): inherit_id = model_obj.read(cr, uid, [res_id], [inherit_field])[0][inherit_field] self.loads[(module, xml_id + '_' + table.replace('.', '_'))] = (table, inherit_id) return res_id def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False): if isinstance(models[0], (list, tuple)): model,res_id = models[0] else: res_id=None model = models[0] if res_id: where = ' and res_id=%s' % (res_id,) else: where = ' and (res_id is null)' if key2: where += ' and key2=\'%s\'' % (key2,) else: where += ' and (key2 is null)' cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name)) res = cr.fetchone() ir_values_obj = openerp.registry(cr.dbname)['ir.values'] if not res: ir_values_obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta) elif xml_id: cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name)) ir_values_obj.invalidate_cache(cr, uid, ['value']) return True def _module_data_uninstall(self, cr, uid, modules_to_remove, context=None): """Deletes all the records referenced by the ir.model.data entries ``ids`` along with their corresponding database backed (including dropping tables, columns, FKs, etc, as long as there is no other ir.model.data entry holding a reference to them (which indicates that they are still owned by another module). Attempts to perform the deletion in an appropriate order to maximize the chance of gracefully deleting all records. This step is performed as part of the full uninstallation of a module. """ ids = self.search(cr, uid, [('module', 'in', modules_to_remove)]) if uid != 1 and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"): raise except_orm(_('Permission Denied'), (_('Administrator access is required to uninstall a module'))) context = dict(context or {}) context[MODULE_UNINSTALL_FLAG] = True # enable model/field deletion ids_set = set(ids) wkf_todo = [] to_unlink = [] ids.sort() ids.reverse() for data in self.browse(cr, uid, ids, context): model = data.model res_id = data.res_id pair_to_unlink = (model, res_id) if pair_to_unlink not in to_unlink: to_unlink.append(pair_to_unlink) if model == 'workflow.activity': # Special treatment for workflow activities: temporarily revert their # incoming transition and trigger an update to force all workflow items # to move out before deleting them cr.execute('select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)', (res_id,)) wkf_todo.extend(cr.fetchall()) cr.execute("update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id)) self.invalidate_cache(cr, uid, context=context) for model,res_id in wkf_todo: try: openerp.workflow.trg_write(uid, model, res_id, cr) except Exception: _logger.info('Unable to force processing of workflow for item %s@%s in order to leave activity to be deleted', res_id, model, exc_info=True) def unlink_if_refcount(to_unlink): for model, res_id in to_unlink: external_ids = self.search(cr, uid, [('model', '=', model),('res_id', '=', res_id)]) if set(external_ids)-ids_set: # if other modules have defined this record, we must not delete it continue if model == 'ir.model.fields': # Don't remove the LOG_ACCESS_COLUMNS unless _log_access # has been turned off on the model. field = self.pool[model].browse(cr, uid, [res_id], context=context)[0] if not field.exists(): _logger.info('Deleting orphan external_ids %s', external_ids) self.unlink(cr, uid, external_ids) continue if field.name in openerp.models.LOG_ACCESS_COLUMNS and self.pool[field.model]._log_access: continue if field.name == 'id': continue _logger.info('Deleting %s@%s', res_id, model) try: cr.execute('SAVEPOINT record_unlink_save') self.pool[model].unlink(cr, uid, [res_id], context=context) except Exception: _logger.info('Unable to delete %s@%s', res_id, model, exc_info=True) cr.execute('ROLLBACK TO SAVEPOINT record_unlink_save') else: cr.execute('RELEASE SAVEPOINT record_unlink_save') # Remove non-model records first, then model fields, and finish with models unlink_if_refcount((model, res_id) for model, res_id in to_unlink if model not in ('ir.model','ir.model.fields','ir.model.constraint')) unlink_if_refcount((model, res_id) for model, res_id in to_unlink if model == 'ir.model.constraint') ir_module_module = self.pool['ir.module.module'] ir_model_constraint = self.pool['ir.model.constraint'] modules_to_remove_ids = ir_module_module.search(cr, uid, [('name', 'in', modules_to_remove)], context=context) constraint_ids = ir_model_constraint.search(cr, uid, [('module', 'in', modules_to_remove_ids)], context=context) ir_model_constraint._module_data_uninstall(cr, uid, constraint_ids, context) unlink_if_refcount((model, res_id) for model, res_id in to_unlink if model == 'ir.model.fields') ir_model_relation = self.pool['ir.model.relation'] relation_ids = ir_model_relation.search(cr, uid, [('module', 'in', modules_to_remove_ids)]) ir_model_relation._module_data_uninstall(cr, uid, relation_ids, context) unlink_if_refcount((model, res_id) for model, res_id in to_unlink if model == 'ir.model') cr.commit() self.unlink(cr, uid, ids, context) def _process_end(self, cr, uid, modules): """ Clear records removed from updated module data. This method is called at the end of the module loading process. It is meant to removed records that are no longer present in the updated data. Such records are recognised as the one with an xml id and a module in ir_model_data and noupdate set to false, but not present in self.loads. """ if not modules or config.get('import_partial'): return True bad_imd_ids = [] context = {MODULE_UNINSTALL_FLAG: True} cr.execute("""SELECT id,name,model,res_id,module FROM ir_model_data WHERE module IN %s AND res_id IS NOT NULL AND noupdate=%s ORDER BY id DESC """, (tuple(modules), False)) for (id, name, model, res_id, module) in cr.fetchall(): if (module, name) not in self.loads: if model in self.pool: _logger.info('Deleting %s@%s (%s.%s)', res_id, model, module, name) if self.pool[model].exists(cr, uid, [res_id], context=context): self.pool[model].unlink(cr, uid, [res_id], context=context) else: bad_imd_ids.append(id) if bad_imd_ids: self.unlink(cr, uid, bad_imd_ids, context=context) self.loads.clear() class wizard_model_menu(osv.osv_memory): _name = 'wizard.ir.model.menu.create' _columns = { 'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True), 'name': fields.char('Menu Name', required=True), } def menu_create(self, cr, uid, ids, context=None): if not context: context = {} model_pool = self.pool.get('ir.model') for menu in self.browse(cr, uid, ids, context): model = model_pool.browse(cr, uid, context.get('model_id'), context=context) val = { 'name': menu.name, 'res_model': model.model, 'view_type': 'form', 'view_mode': 'tree,form' } action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val) self.pool.get('ir.ui.menu').create(cr, uid, { 'name': menu.name, 'parent_id': menu.menu_id.id, 'action': 'ir.actions.act_window,%d' % (action_id,), 'icon': 'STOCK_INDENT' }, context) return {'type':'ir.actions.act_window_close'} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
TOC-Shard/moul-scripts
Python/system/encodings/cp037.py
593
13377
""" Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp037', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x9c' # 0x04 -> CONTROL u'\t' # 0x05 -> HORIZONTAL TABULATION u'\x86' # 0x06 -> CONTROL u'\x7f' # 0x07 -> DELETE u'\x97' # 0x08 -> CONTROL u'\x8d' # 0x09 -> CONTROL u'\x8e' # 0x0A -> CONTROL u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x9d' # 0x14 -> CONTROL u'\x85' # 0x15 -> CONTROL u'\x08' # 0x16 -> BACKSPACE u'\x87' # 0x17 -> CONTROL u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x92' # 0x1A -> CONTROL u'\x8f' # 0x1B -> CONTROL u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u'\x80' # 0x20 -> CONTROL u'\x81' # 0x21 -> CONTROL u'\x82' # 0x22 -> CONTROL u'\x83' # 0x23 -> CONTROL u'\x84' # 0x24 -> CONTROL u'\n' # 0x25 -> LINE FEED u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK u'\x1b' # 0x27 -> ESCAPE u'\x88' # 0x28 -> CONTROL u'\x89' # 0x29 -> CONTROL u'\x8a' # 0x2A -> CONTROL u'\x8b' # 0x2B -> CONTROL u'\x8c' # 0x2C -> CONTROL u'\x05' # 0x2D -> ENQUIRY u'\x06' # 0x2E -> ACKNOWLEDGE u'\x07' # 0x2F -> BELL u'\x90' # 0x30 -> CONTROL u'\x91' # 0x31 -> CONTROL u'\x16' # 0x32 -> SYNCHRONOUS IDLE u'\x93' # 0x33 -> CONTROL u'\x94' # 0x34 -> CONTROL u'\x95' # 0x35 -> CONTROL u'\x96' # 0x36 -> CONTROL u'\x04' # 0x37 -> END OF TRANSMISSION u'\x98' # 0x38 -> CONTROL u'\x99' # 0x39 -> CONTROL u'\x9a' # 0x3A -> CONTROL u'\x9b' # 0x3B -> CONTROL u'\x14' # 0x3C -> DEVICE CONTROL FOUR u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE u'\x9e' # 0x3E -> CONTROL u'\x1a' # 0x3F -> SUBSTITUTE u' ' # 0x40 -> SPACE u'\xa0' # 0x41 -> NO-BREAK SPACE u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE u'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE u'\xa2' # 0x4A -> CENT SIGN u'.' # 0x4B -> FULL STOP u'<' # 0x4C -> LESS-THAN SIGN u'(' # 0x4D -> LEFT PARENTHESIS u'+' # 0x4E -> PLUS SIGN u'|' # 0x4F -> VERTICAL LINE u'&' # 0x50 -> AMPERSAND u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN) u'!' # 0x5A -> EXCLAMATION MARK u'$' # 0x5B -> DOLLAR SIGN u'*' # 0x5C -> ASTERISK u')' # 0x5D -> RIGHT PARENTHESIS u';' # 0x5E -> SEMICOLON u'\xac' # 0x5F -> NOT SIGN u'-' # 0x60 -> HYPHEN-MINUS u'/' # 0x61 -> SOLIDUS u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE u'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE u'\xa6' # 0x6A -> BROKEN BAR u',' # 0x6B -> COMMA u'%' # 0x6C -> PERCENT SIGN u'_' # 0x6D -> LOW LINE u'>' # 0x6E -> GREATER-THAN SIGN u'?' # 0x6F -> QUESTION MARK u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE u'`' # 0x79 -> GRAVE ACCENT u':' # 0x7A -> COLON u'#' # 0x7B -> NUMBER SIGN u'@' # 0x7C -> COMMERCIAL AT u"'" # 0x7D -> APOSTROPHE u'=' # 0x7E -> EQUALS SIGN u'"' # 0x7F -> QUOTATION MARK u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE u'a' # 0x81 -> LATIN SMALL LETTER A u'b' # 0x82 -> LATIN SMALL LETTER B u'c' # 0x83 -> LATIN SMALL LETTER C u'd' # 0x84 -> LATIN SMALL LETTER D u'e' # 0x85 -> LATIN SMALL LETTER E u'f' # 0x86 -> LATIN SMALL LETTER F u'g' # 0x87 -> LATIN SMALL LETTER G u'h' # 0x88 -> LATIN SMALL LETTER H u'i' # 0x89 -> LATIN SMALL LETTER I u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC) u'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE u'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC) u'\xb1' # 0x8F -> PLUS-MINUS SIGN u'\xb0' # 0x90 -> DEGREE SIGN u'j' # 0x91 -> LATIN SMALL LETTER J u'k' # 0x92 -> LATIN SMALL LETTER K u'l' # 0x93 -> LATIN SMALL LETTER L u'm' # 0x94 -> LATIN SMALL LETTER M u'n' # 0x95 -> LATIN SMALL LETTER N u'o' # 0x96 -> LATIN SMALL LETTER O u'p' # 0x97 -> LATIN SMALL LETTER P u'q' # 0x98 -> LATIN SMALL LETTER Q u'r' # 0x99 -> LATIN SMALL LETTER R u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE u'\xb8' # 0x9D -> CEDILLA u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE u'\xa4' # 0x9F -> CURRENCY SIGN u'\xb5' # 0xA0 -> MICRO SIGN u'~' # 0xA1 -> TILDE u's' # 0xA2 -> LATIN SMALL LETTER S u't' # 0xA3 -> LATIN SMALL LETTER T u'u' # 0xA4 -> LATIN SMALL LETTER U u'v' # 0xA5 -> LATIN SMALL LETTER V u'w' # 0xA6 -> LATIN SMALL LETTER W u'x' # 0xA7 -> LATIN SMALL LETTER X u'y' # 0xA8 -> LATIN SMALL LETTER Y u'z' # 0xA9 -> LATIN SMALL LETTER Z u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK u'\xbf' # 0xAB -> INVERTED QUESTION MARK u'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC) u'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE u'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC) u'\xae' # 0xAF -> REGISTERED SIGN u'^' # 0xB0 -> CIRCUMFLEX ACCENT u'\xa3' # 0xB1 -> POUND SIGN u'\xa5' # 0xB2 -> YEN SIGN u'\xb7' # 0xB3 -> MIDDLE DOT u'\xa9' # 0xB4 -> COPYRIGHT SIGN u'\xa7' # 0xB5 -> SECTION SIGN u'\xb6' # 0xB6 -> PILCROW SIGN u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS u'[' # 0xBA -> LEFT SQUARE BRACKET u']' # 0xBB -> RIGHT SQUARE BRACKET u'\xaf' # 0xBC -> MACRON u'\xa8' # 0xBD -> DIAERESIS u'\xb4' # 0xBE -> ACUTE ACCENT u'\xd7' # 0xBF -> MULTIPLICATION SIGN u'{' # 0xC0 -> LEFT CURLY BRACKET u'A' # 0xC1 -> LATIN CAPITAL LETTER A u'B' # 0xC2 -> LATIN CAPITAL LETTER B u'C' # 0xC3 -> LATIN CAPITAL LETTER C u'D' # 0xC4 -> LATIN CAPITAL LETTER D u'E' # 0xC5 -> LATIN CAPITAL LETTER E u'F' # 0xC6 -> LATIN CAPITAL LETTER F u'G' # 0xC7 -> LATIN CAPITAL LETTER G u'H' # 0xC8 -> LATIN CAPITAL LETTER H u'I' # 0xC9 -> LATIN CAPITAL LETTER I u'\xad' # 0xCA -> SOFT HYPHEN u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE u'}' # 0xD0 -> RIGHT CURLY BRACKET u'J' # 0xD1 -> LATIN CAPITAL LETTER J u'K' # 0xD2 -> LATIN CAPITAL LETTER K u'L' # 0xD3 -> LATIN CAPITAL LETTER L u'M' # 0xD4 -> LATIN CAPITAL LETTER M u'N' # 0xD5 -> LATIN CAPITAL LETTER N u'O' # 0xD6 -> LATIN CAPITAL LETTER O u'P' # 0xD7 -> LATIN CAPITAL LETTER P u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q u'R' # 0xD9 -> LATIN CAPITAL LETTER R u'\xb9' # 0xDA -> SUPERSCRIPT ONE u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX u'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS u'\\' # 0xE0 -> REVERSE SOLIDUS u'\xf7' # 0xE1 -> DIVISION SIGN u'S' # 0xE2 -> LATIN CAPITAL LETTER S u'T' # 0xE3 -> LATIN CAPITAL LETTER T u'U' # 0xE4 -> LATIN CAPITAL LETTER U u'V' # 0xE5 -> LATIN CAPITAL LETTER V u'W' # 0xE6 -> LATIN CAPITAL LETTER W u'X' # 0xE7 -> LATIN CAPITAL LETTER X u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z u'\xb2' # 0xEA -> SUPERSCRIPT TWO u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE u'0' # 0xF0 -> DIGIT ZERO u'1' # 0xF1 -> DIGIT ONE u'2' # 0xF2 -> DIGIT TWO u'3' # 0xF3 -> DIGIT THREE u'4' # 0xF4 -> DIGIT FOUR u'5' # 0xF5 -> DIGIT FIVE u'6' # 0xF6 -> DIGIT SIX u'7' # 0xF7 -> DIGIT SEVEN u'8' # 0xF8 -> DIGIT EIGHT u'9' # 0xF9 -> DIGIT NINE u'\xb3' # 0xFA -> SUPERSCRIPT THREE u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX u'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE u'\x9f' # 0xFF -> CONTROL ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
gpl-3.0
jmckaskill/subversion
tools/buildbot/master/Feeder.py
5
16341
# # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # # This file is part of the Buildbot configuration for the Subversion project. # The original file was created by Lieven Govaerts # # Minor changes made by API ([email protected]) in order to fit with our # configuration and last buildbot changes # # Minor whitespace clean up, clean up imports, adapted to buildbot 0.7.7, # and finally attempt to create valid atom and RSS feeds. # Changes by Chandan-Dutta Chowdhury <chandan-dutta chowdhury @ hp com> and # Gareth Armstrong <gareth armstrong @ hp com> # Also integrate changes from # http://code.google.com/p/pybots/source/browse/trunk/master/Feeder.py # which adds ability to filter RSS feeds to specific builders. # e.g. http://localhost:8012/rss?builder=builder-log4c-rhel-4-i386 import time import os import re import sys from twisted.web.resource import Resource from buildbot.status.web import baseweb from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS class XmlResource(Resource): contentType = "text/xml; charset=UTF-8" def render(self, request): data = self.content(request) request.setHeader("content-type", self.contentType) if request.method == "HEAD": request.setHeader("content-length", len(data)) return '' return data docType = '' def header (self, request): data = ('<?xml version="1.0"?>\n') return data def footer(self, request): data = '' return data def content(self, request): data = self.docType data += self.header(request) data += self.body(request) data += self.footer(request) return data def body(self, request): return '' class FeedResource(XmlResource): title = 'Dummy' link = 'http://dummylink' language = 'en-us' description = 'Dummy rss' status = None def __init__(self, status, categories=None): self.status = status self.categories = categories self.link = self.status.getBuildbotURL() self.title = 'Build status of ' + status.getProjectName() self.description = 'List of FAILED builds' self.pubdate = time.gmtime(int(time.time())) def getBuilds(self, request): builds = [] # THIS is lifted straight from the WaterfallStatusResource Class in # status/web/waterfall.py # # we start with all Builders available to this Waterfall: this is # limited by the config-file -time categories= argument, and defaults # to all defined Builders. allBuilderNames = self.status.getBuilderNames(categories=self.categories) builders = [self.status.getBuilder(name) for name in allBuilderNames] # but if the URL has one or more builder= arguments (or the old show= # argument, which is still accepted for backwards compatibility), we # use that set of builders instead. We still don't show anything # outside the config-file time set limited by categories=. showBuilders = request.args.get("show", []) showBuilders.extend(request.args.get("builder", [])) if showBuilders: builders = [b for b in builders if b.name in showBuilders] # now, if the URL has one or category= arguments, use them as a # filter: only show those builders which belong to one of the given # categories. showCategories = request.args.get("category", []) if showCategories: builders = [b for b in builders if b.category in showCategories] maxFeeds = 25 # Copy all failed builds in a new list. # This could clearly be implemented much better if we had # access to a global list of builds. for b in builders: lastbuild = b.getLastFinishedBuild() if lastbuild is None: continue lastnr = lastbuild.getNumber() totalbuilds = 0 i = lastnr while i >= 0: build = b.getBuild(i) i -= 1 if not build: continue results = build.getResults() # only add entries for failed builds! if results == FAILURE: totalbuilds += 1 builds.append(build) # stop for this builder when our total nr. of feeds is reached if totalbuilds >= maxFeeds: break # Sort build list by date, youngest first. if sys.version_info[:3] >= (2,4,0): builds.sort(key=lambda build: build.getTimes(), reverse=True) else: # If you need compatibility with python < 2.4, use this for # sorting instead: # We apply Decorate-Sort-Undecorate deco = [(build.getTimes(), build) for build in builds] deco.sort() deco.reverse() builds = [build for (b1, build) in deco] if builds: builds = builds[:min(len(builds), maxFeeds)] return builds def body (self, request): data = '' builds = self.getBuilds(request) for build in builds: start, finished = build.getTimes() finishedTime = time.gmtime(int(finished)) projectName = self.status.getProjectName() link = re.sub(r'index.html', "", self.status.getURLForThing(build)) # title: trunk r862265 (plus patch) failed on 'i686-debian-sarge1 shared gcc-3.3.5' ss = build.getSourceStamp() source = "" if ss.branch: source += "Branch %s " % ss.branch if ss.revision: source += "Revision %s " % str(ss.revision) if ss.patch: source += " (plus patch)" if ss.changes: pass if (ss.branch is None and ss.revision is None and ss.patch is None and not ss.changes): source += "Latest revision " got_revision = None try: got_revision = build.getProperty("got_revision") except KeyError: pass if got_revision: got_revision = str(got_revision) if len(got_revision) > 40: got_revision = "[revision string too long]" source += "(Got Revision: %s)" % got_revision title = ('%s failed on "%s"' % (source, build.getBuilder().getName())) # get name of the failed step and the last 30 lines of its log. if build.getLogs(): log = build.getLogs()[-1] laststep = log.getStep().getName() try: lastlog = log.getText() except IOError: # Probably the log file has been removed lastlog='<b>log file not available</b>' lines = re.split('\n', lastlog) lastlog = '' for logline in lines[max(0, len(lines)-30):]: lastlog = lastlog + logline + '<br/>' lastlog = lastlog.replace('\n', '<br/>') description = '' description += ('Date: %s<br/><br/>' % time.strftime("%a, %d %b %Y %H:%M:%S GMT", finishedTime)) description += ('Full details available here: <a href="%s">%s</a><br/>' % (self.link, projectName)) builder_summary_link = ('%s/builders/%s' % (re.sub(r'/index.html', '', self.link), build.getBuilder().getName())) description += ('Build summary: <a href="%s">%s</a><br/><br/>' % (builder_summary_link, build.getBuilder().getName())) description += ('Build details: <a href="%s">%s</a><br/><br/>' % (link, self.link + link[1:])) description += ('Author list: <b>%s</b><br/><br/>' % ",".join(build.getResponsibleUsers())) description += ('Failed step: <b>%s</b><br/><br/>' % laststep) description += 'Last lines of the build log:<br/>' data += self.item(title, description=description, lastlog=lastlog, link=link, pubDate=finishedTime) return data def item(self, title='', link='', description='', pubDate=''): """Generates xml for one item in the feed.""" class Rss20StatusResource(FeedResource): def __init__(self, status, categories=None): FeedResource.__init__(self, status, categories) contentType = 'application/rss+xml' def header(self, request): data = FeedResource.header(self, request) data += ('<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">\n') data += (' <channel>\n') if self.title is not None: data += (' <title>%s</title>\n' % self.title) if self.link is not None: data += (' <link>%s</link>\n' % self.link) link = re.sub(r'/index.html', '', self.link) data += (' <atom:link href="%s/rss" rel="self" type="application/rss+xml"/>\n' % link) if self.language is not None: data += (' <language>%s</language>\n' % self.language) if self.description is not None: data += (' <description>%s</description>\n' % self.description) if self.pubdate is not None: rfc822_pubdate = time.strftime("%a, %d %b %Y %H:%M:%S GMT", self.pubdate) data += (' <pubDate>%s</pubDate>\n' % rfc822_pubdate) return data def item(self, title='', link='', description='', lastlog='', pubDate=''): data = (' <item>\n') data += (' <title>%s</title>\n' % title) if link is not None: data += (' <link>%s</link>\n' % link) if (description is not None and lastlog is not None): lastlog = re.sub(r'<br/>', "\n", lastlog) lastlog = re.sub(r'&', "&amp;", lastlog) lastlog = re.sub(r"'", "&apos;", lastlog) lastlog = re.sub(r'"', "&quot;", lastlog) lastlog = re.sub(r'<', '&lt;', lastlog) lastlog = re.sub(r'>', '&gt;', lastlog) lastlog = lastlog.replace('\n', '<br/>') content = '<![CDATA[' content += description content += lastlog content += ']]>' data += (' <description>%s</description>\n' % content) if pubDate is not None: rfc822pubDate = time.strftime("%a, %d %b %Y %H:%M:%S GMT", pubDate) data += (' <pubDate>%s</pubDate>\n' % rfc822pubDate) # Every RSS item must have a globally unique ID guid = ('tag:%s@%s,%s:%s' % (os.environ['USER'], os.environ['HOSTNAME'], time.strftime("%Y-%m-%d", pubDate), time.strftime("%Y%m%d%H%M%S", pubDate))) data += (' <guid isPermaLink="false">%s</guid>\n' % guid) data += (' </item>\n') return data def footer(self, request): data = (' </channel>\n' '</rss>') return data class Atom10StatusResource(FeedResource): def __init__(self, status, categories=None): FeedResource.__init__(self, status, categories) contentType = 'application/atom+xml' def header(self, request): data = FeedResource.header(self, request) data += '<feed xmlns="http://www.w3.org/2005/Atom">\n' data += (' <id>%s</id>\n' % self.status.getBuildbotURL()) if self.title is not None: data += (' <title>%s</title>\n' % self.title) if self.link is not None: link = re.sub(r'/index.html', '', self.link) data += (' <link rel="self" href="%s/atom"/>\n' % link) data += (' <link rel="alternate" href="%s/"/>\n' % link) if self.description is not None: data += (' <subtitle>%s</subtitle>\n' % self.description) if self.pubdate is not None: rfc3339_pubdate = time.strftime("%Y-%m-%dT%H:%M:%SZ", self.pubdate) data += (' <updated>%s</updated>\n' % rfc3339_pubdate) data += (' <author>\n') data += (' <name>Build Bot</name>\n') data += (' </author>\n') return data def item(self, title='', link='', description='', lastlog='', pubDate=''): data = (' <entry>\n') data += (' <title>%s</title>\n' % title) if link is not None: data += (' <link href="%s"/>\n' % link) if (description is not None and lastlog is not None): lastlog = re.sub(r'<br/>', "\n", lastlog) lastlog = re.sub(r'&', "&amp;", lastlog) lastlog = re.sub(r"'", "&apos;", lastlog) lastlog = re.sub(r'"', "&quot;", lastlog) lastlog = re.sub(r'<', '&lt;', lastlog) lastlog = re.sub(r'>', '&gt;', lastlog) data += (' <content type="xhtml">\n') data += (' <div xmlns="http://www.w3.org/1999/xhtml">\n') data += (' %s\n' % description) data += (' <pre xml:space="preserve">%s</pre>\n' % lastlog) data += (' </div>\n') data += (' </content>\n') if pubDate is not None: rfc3339pubDate = time.strftime("%Y-%m-%dT%H:%M:%SZ", pubDate) data += (' <updated>%s</updated>\n' % rfc3339pubDate) # Every Atom entry must have a globally unique ID # http://diveintomark.org/archives/2004/05/28/howto-atom-id guid = ('tag:%s@%s,%s:%s' % (os.environ['USER'], os.environ['HOSTNAME'], time.strftime("%Y-%m-%d", pubDate), time.strftime("%Y%m%d%H%M%S", pubDate))) data += (' <id>%s</id>\n' % guid) data += (' <author>\n') data += (' <name>Build Bot</name>\n') data += (' </author>\n') data += (' </entry>\n') return data def footer(self, request): data = ('</feed>') return data class WebStatusWithFeeds(baseweb.WebStatus): """Override the standard WebStatus class to add RSS and Atom feeds. This adds the following web resources in addition to /waterfall: /rss /atom The same "branch" and "category" query arguments can be passed as with /waterfall e.g. http://mybot.buildbot.com:8012/rss?branch=&builder=builder-log4c-rhel-4-i386 or http://mybot.buildbot.com:8012/rss?branch=&category=log4c """ def setupSite(self): baseweb.WebStatus.setupSite(self) status = self.parent.getStatus() sr = self.site.resource rss = Rss20StatusResource(status, categories=None) sr.putChild("rss", rss) atom = Atom10StatusResource(status, categories=None) sr.putChild("atom", atom)
apache-2.0
nathania/networkx
examples/algorithms/blockmodel.py
32
3009
#!/usr/bin/env python # encoding: utf-8 """ Example of creating a block model using the blockmodel function in NX. Data used is the Hartford, CT drug users network: @article{, title = {Social Networks of Drug Users in {High-Risk} Sites: Finding the Connections}, volume = {6}, shorttitle = {Social Networks of Drug Users in {High-Risk} Sites}, url = {http://dx.doi.org/10.1023/A:1015457400897}, doi = {10.1023/A:1015457400897}, number = {2}, journal = {{AIDS} and Behavior}, author = {Margaret R. Weeks and Scott Clair and Stephen P. Borgatti and Kim Radda and Jean J. Schensul}, month = jun, year = {2002}, pages = {193--206} } """ __author__ = """\n""".join(['Drew Conway <[email protected]>', 'Aric Hagberg <[email protected]>']) from collections import defaultdict import networkx as nx import numpy from scipy.cluster import hierarchy from scipy.spatial import distance import matplotlib.pyplot as plt def create_hc(G): """Creates hierarchical cluster of graph G from distance matrix""" path_length=nx.all_pairs_shortest_path_length(G) distances=numpy.zeros((len(G),len(G))) for u,p in path_length.items(): for v,d in p.items(): distances[u][v]=d # Create hierarchical cluster Y=distance.squareform(distances) Z=hierarchy.complete(Y) # Creates HC using farthest point linkage # This partition selection is arbitrary, for illustrive purposes membership=list(hierarchy.fcluster(Z,t=1.15)) # Create collection of lists for blockmodel partition=defaultdict(list) for n,p in zip(list(range(len(G))),membership): partition[p].append(n) return list(partition.values()) if __name__ == '__main__': G=nx.read_edgelist("hartford_drug.edgelist") # Extract largest connected component into graph H H=nx.connected_component_subgraphs(G)[0] # Makes life easier to have consecutively labeled integer nodes H=nx.convert_node_labels_to_integers(H) # Create parititions with hierarchical clustering partitions=create_hc(H) # Build blockmodel graph BM=nx.blockmodel(H,partitions) # Draw original graph pos=nx.spring_layout(H,iterations=100) fig=plt.figure(1,figsize=(6,10)) ax=fig.add_subplot(211) nx.draw(H,pos,with_labels=False,node_size=10) plt.xlim(0,1) plt.ylim(0,1) # Draw block model with weighted edges and nodes sized by number of internal nodes node_size=[BM.node[x]['nnodes']*10 for x in BM.nodes()] edge_width=[(2*d['weight']) for (u,v,d) in BM.edges(data=True)] # Set positions to mean of positions of internal nodes from original graph posBM={} for n in BM: xy=numpy.array([pos[u] for u in BM.node[n]['graph']]) posBM[n]=xy.mean(axis=0) ax=fig.add_subplot(212) nx.draw(BM,posBM,node_size=node_size,width=edge_width,with_labels=False) plt.xlim(0,1) plt.ylim(0,1) plt.axis('off') plt.savefig('hartford_drug_block_model.png')
bsd-3-clause