repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
miumok98/weblate
examples/mt_service.py
11
1388
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <[email protected]> # # This file is part of Weblate <http://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ''' Machine translation example. ''' from weblate.trans.machine.base import MachineTranslation import dictionary class SampleTranslation(MachineTranslation): ''' Sample machine translation interface. ''' name = 'Sample' def download_languages(self): ''' Returns list of languages your machine translation supports. ''' return set(('cs',)) def download_translations(self, language, text, unit, request): ''' Returns tuple with translations. ''' return [(t, 100, self.name, text) for t in dictionary.translate(text)]
gpl-3.0
yedidiaklein/local_video_directory
classes/task/googleSpeech/google/protobuf/python/google/protobuf/json_format.py
19
29835
# Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # https://developers.google.com/protocol-buffers/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Contains routines for printing protocol messages in JSON format. Simple usage example: # Create a proto object and serialize it to a json format string. message = my_proto_pb2.MyMessage(foo='bar') json_string = json_format.MessageToJson(message) # Parse a json format string to proto object. message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) """ __author__ = '[email protected] (Jie Luo)' # pylint: disable=g-statement-before-imports,g-import-not-at-top try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict # PY26 # pylint: enable=g-statement-before-imports,g-import-not-at-top import base64 import json import math from operator import methodcaller import re import sys import six from google.protobuf import descriptor from google.protobuf import symbol_database _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' _INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, descriptor.FieldDescriptor.CPPTYPE_UINT32, descriptor.FieldDescriptor.CPPTYPE_INT64, descriptor.FieldDescriptor.CPPTYPE_UINT64]) _INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, descriptor.FieldDescriptor.CPPTYPE_UINT64]) _FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) _INFINITY = 'Infinity' _NEG_INFINITY = '-Infinity' _NAN = 'NaN' _UNPAIRED_SURROGATE_PATTERN = re.compile(six.u( r'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]' )) _VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$') class Error(Exception): """Top-level module error for json_format.""" class SerializeToJsonError(Error): """Thrown if serialization to JSON fails.""" class ParseError(Error): """Thrown in case of parsing error.""" def MessageToJson(message, including_default_value_fields=False, preserving_proto_field_name=False, indent=2, sort_keys=False, use_integers_for_enums=False): """Converts protobuf message to JSON format. Args: message: The protocol buffers message instance to serialize. including_default_value_fields: If True, singular primitive fields, repeated fields, and map fields will always be serialized. If False, only serialize non-empty fields. Singular message fields and oneof fields are not affected by this option. preserving_proto_field_name: If True, use the original proto field names as defined in the .proto file. If False, convert the field names to lowerCamelCase. indent: The JSON object will be pretty-printed with this indent level. An indent level of 0 or negative will only insert newlines. sort_keys: If True, then the output will be sorted by field names. use_integers_for_enums: If true, print integers instead of enum names. Returns: A string containing the JSON formatted protocol buffer message. """ printer = _Printer(including_default_value_fields, preserving_proto_field_name, use_integers_for_enums) return printer.ToJsonString(message, indent, sort_keys) def MessageToDict(message, including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False): """Converts protobuf message to a dictionary. When the dictionary is encoded to JSON, it conforms to proto3 JSON spec. Args: message: The protocol buffers message instance to serialize. including_default_value_fields: If True, singular primitive fields, repeated fields, and map fields will always be serialized. If False, only serialize non-empty fields. Singular message fields and oneof fields are not affected by this option. preserving_proto_field_name: If True, use the original proto field names as defined in the .proto file. If False, convert the field names to lowerCamelCase. use_integers_for_enums: If true, print integers instead of enum names. Returns: A dict representation of the protocol buffer message. """ printer = _Printer(including_default_value_fields, preserving_proto_field_name, use_integers_for_enums) # pylint: disable=protected-access return printer._MessageToJsonObject(message) def _IsMapEntry(field): return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and field.message_type.has_options and field.message_type.GetOptions().map_entry) class _Printer(object): """JSON format printer for protocol message.""" def __init__(self, including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False): self.including_default_value_fields = including_default_value_fields self.preserving_proto_field_name = preserving_proto_field_name self.use_integers_for_enums = use_integers_for_enums def ToJsonString(self, message, indent, sort_keys): js = self._MessageToJsonObject(message) return json.dumps(js, indent=indent, sort_keys=sort_keys) def _MessageToJsonObject(self, message): """Converts message to an object according to Proto3 JSON Specification.""" message_descriptor = message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): return self._WrapperMessageToJsonObject(message) if full_name in _WKTJSONMETHODS: return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self) js = {} return self._RegularMessageToJsonObject(message, js) def _RegularMessageToJsonObject(self, message, js): """Converts normal message according to Proto3 JSON Specification.""" fields = message.ListFields() try: for field, value in fields: if self.preserving_proto_field_name: name = field.name else: name = field.json_name if _IsMapEntry(field): # Convert a map field. v_field = field.message_type.fields_by_name['value'] js_map = {} for key in value: if isinstance(key, bool): if key: recorded_key = 'true' else: recorded_key = 'false' else: recorded_key = key js_map[recorded_key] = self._FieldToJsonObject( v_field, value[key]) js[name] = js_map elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: # Convert a repeated field. js[name] = [self._FieldToJsonObject(field, k) for k in value] elif field.is_extension: f = field if (f.containing_type.GetOptions().message_set_wire_format and f.type == descriptor.FieldDescriptor.TYPE_MESSAGE and f.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): f = f.message_type name = '[%s.%s]' % (f.full_name, name) js[name] = self._FieldToJsonObject(field, value) else: js[name] = self._FieldToJsonObject(field, value) # Serialize default value if including_default_value_fields is True. if self.including_default_value_fields: message_descriptor = message.DESCRIPTOR for field in message_descriptor.fields: # Singular message fields and oneof fields will not be affected. if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or field.containing_oneof): continue if self.preserving_proto_field_name: name = field.name else: name = field.json_name if name in js: # Skip the field which has been serailized already. continue if _IsMapEntry(field): js[name] = {} elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: js[name] = [] else: js[name] = self._FieldToJsonObject(field, field.default_value) except ValueError as e: raise SerializeToJsonError( 'Failed to serialize {0} field: {1}.'.format(field.name, e)) return js def _FieldToJsonObject(self, field, value): """Converts field value according to Proto3 JSON Specification.""" if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: return self._MessageToJsonObject(value) elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: if self.use_integers_for_enums: return value enum_value = field.enum_type.values_by_number.get(value, None) if enum_value is not None: return enum_value.name else: if field.file.syntax == 'proto3': return value raise SerializeToJsonError('Enum field contains an integer value ' 'which can not mapped to an enum value.') elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: if field.type == descriptor.FieldDescriptor.TYPE_BYTES: # Use base64 Data encoding for bytes return base64.b64encode(value).decode('utf-8') else: return value elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: return bool(value) elif field.cpp_type in _INT64_TYPES: return str(value) elif field.cpp_type in _FLOAT_TYPES: if math.isinf(value): if value < 0.0: return _NEG_INFINITY else: return _INFINITY if math.isnan(value): return _NAN return value def _AnyMessageToJsonObject(self, message): """Converts Any message according to Proto3 JSON Specification.""" if not message.ListFields(): return {} # Must print @type first, use OrderedDict instead of {} js = OrderedDict() type_url = message.type_url js['@type'] = type_url sub_message = _CreateMessageFromTypeUrl(type_url) sub_message.ParseFromString(message.value) message_descriptor = sub_message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): js['value'] = self._WrapperMessageToJsonObject(sub_message) return js if full_name in _WKTJSONMETHODS: js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0], sub_message)(self) return js return self._RegularMessageToJsonObject(sub_message, js) def _GenericMessageToJsonObject(self, message): """Converts message according to Proto3 JSON Specification.""" # Duration, Timestamp and FieldMask have ToJsonString method to do the # convert. Users can also call the method directly. return message.ToJsonString() def _ValueMessageToJsonObject(self, message): """Converts Value message according to Proto3 JSON Specification.""" which = message.WhichOneof('kind') # If the Value message is not set treat as null_value when serialize # to JSON. The parse back result will be different from original message. if which is None or which == 'null_value': return None if which == 'list_value': return self._ListValueMessageToJsonObject(message.list_value) if which == 'struct_value': value = message.struct_value else: value = getattr(message, which) oneof_descriptor = message.DESCRIPTOR.fields_by_name[which] return self._FieldToJsonObject(oneof_descriptor, value) def _ListValueMessageToJsonObject(self, message): """Converts ListValue message according to Proto3 JSON Specification.""" return [self._ValueMessageToJsonObject(value) for value in message.values] def _StructMessageToJsonObject(self, message): """Converts Struct message according to Proto3 JSON Specification.""" fields = message.fields ret = {} for key in fields: ret[key] = self._ValueMessageToJsonObject(fields[key]) return ret def _WrapperMessageToJsonObject(self, message): return self._FieldToJsonObject( message.DESCRIPTOR.fields_by_name['value'], message.value) def _IsWrapperMessage(message_descriptor): return message_descriptor.file.name == 'google/protobuf/wrappers.proto' def _DuplicateChecker(js): result = {} for name, value in js: if name in result: raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name)) result[name] = value return result def _CreateMessageFromTypeUrl(type_url): # TODO(jieluo): Should add a way that users can register the type resolver # instead of the default one. db = symbol_database.Default() type_name = type_url.split('/')[-1] try: message_descriptor = db.pool.FindMessageTypeByName(type_name) except KeyError: raise TypeError( 'Can not find message descriptor by type_url: {0}.'.format(type_url)) message_class = db.GetPrototype(message_descriptor) return message_class() def Parse(text, message, ignore_unknown_fields=False): """Parses a JSON representation of a protocol message into a message. Args: text: Message JSON representation. message: A protocol buffer message to merge into. ignore_unknown_fields: If True, do not raise errors for unknown fields. Returns: The same message passed as argument. Raises:: ParseError: On JSON parsing problems. """ if not isinstance(text, six.text_type): text = text.decode('utf-8') try: if sys.version_info < (2, 7): # object_pair_hook is not supported before python2.7 js = json.loads(text) else: js = json.loads(text, object_pairs_hook=_DuplicateChecker) except ValueError as e: raise ParseError('Failed to load JSON: {0}.'.format(str(e))) return ParseDict(js, message, ignore_unknown_fields) def ParseDict(js_dict, message, ignore_unknown_fields=False): """Parses a JSON dictionary representation into a message. Args: js_dict: Dict representation of a JSON message. message: A protocol buffer message to merge into. ignore_unknown_fields: If True, do not raise errors for unknown fields. Returns: The same message passed as argument. """ parser = _Parser(ignore_unknown_fields) parser.ConvertMessage(js_dict, message) return message _INT_OR_FLOAT = six.integer_types + (float,) class _Parser(object): """JSON format parser for protocol message.""" def __init__(self, ignore_unknown_fields): self.ignore_unknown_fields = ignore_unknown_fields def ConvertMessage(self, value, message): """Convert a JSON object into a message. Args: value: A JSON object. message: A WKT or regular protocol message to record the data. Raises: ParseError: In case of convert problems. """ message_descriptor = message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): self._ConvertWrapperMessage(value, message) elif full_name in _WKTJSONMETHODS: methodcaller(_WKTJSONMETHODS[full_name][1], value, message)(self) else: self._ConvertFieldValuePair(value, message) def _ConvertFieldValuePair(self, js, message): """Convert field value pairs into regular message. Args: js: A JSON object to convert the field value pairs. message: A regular protocol message to record the data. Raises: ParseError: In case of problems converting. """ names = [] message_descriptor = message.DESCRIPTOR fields_by_json_name = dict((f.json_name, f) for f in message_descriptor.fields) for name in js: try: field = fields_by_json_name.get(name, None) if not field: field = message_descriptor.fields_by_name.get(name, None) if not field and _VALID_EXTENSION_NAME.match(name): if not message_descriptor.is_extendable: raise ParseError('Message type {0} does not have extensions'.format( message_descriptor.full_name)) identifier = name[1:-1] # strip [] brackets identifier = '.'.join(identifier.split('.')[:-1]) # pylint: disable=protected-access field = message.Extensions._FindExtensionByName(identifier) # pylint: enable=protected-access if not field: if self.ignore_unknown_fields: continue raise ParseError( ('Message type "{0}" has no field named "{1}".\n' ' Available Fields(except extensions): {2}').format( message_descriptor.full_name, name, message_descriptor.fields)) if name in names: raise ParseError('Message type "{0}" should not have multiple ' '"{1}" fields.'.format( message.DESCRIPTOR.full_name, name)) names.append(name) # Check no other oneof field is parsed. if field.containing_oneof is not None: oneof_name = field.containing_oneof.name if oneof_name in names: raise ParseError('Message type "{0}" should not have multiple ' '"{1}" oneof fields.'.format( message.DESCRIPTOR.full_name, oneof_name)) names.append(oneof_name) value = js[name] if value is None: if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE and field.message_type.full_name == 'google.protobuf.Value'): sub_message = getattr(message, field.name) sub_message.null_value = 0 else: message.ClearField(field.name) continue # Parse field value. if _IsMapEntry(field): message.ClearField(field.name) self._ConvertMapFieldValue(value, message, field) elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: message.ClearField(field.name) if not isinstance(value, list): raise ParseError('repeated field {0} must be in [] which is ' '{1}.'.format(name, value)) if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: # Repeated message field. for item in value: sub_message = getattr(message, field.name).add() # None is a null_value in Value. if (item is None and sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): raise ParseError('null is not allowed to be used as an element' ' in a repeated field.') self.ConvertMessage(item, sub_message) else: # Repeated scalar field. for item in value: if item is None: raise ParseError('null is not allowed to be used as an element' ' in a repeated field.') getattr(message, field.name).append( _ConvertScalarFieldValue(item, field)) elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: if field.is_extension: sub_message = message.Extensions[field] else: sub_message = getattr(message, field.name) sub_message.SetInParent() self.ConvertMessage(value, sub_message) else: setattr(message, field.name, _ConvertScalarFieldValue(value, field)) except ParseError as e: if field and field.containing_oneof is None: raise ParseError('Failed to parse {0} field: {1}'.format(name, e)) else: raise ParseError(str(e)) except ValueError as e: raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) except TypeError as e: raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) def _ConvertAnyMessage(self, value, message): """Convert a JSON representation into Any message.""" if isinstance(value, dict) and not value: return try: type_url = value['@type'] except KeyError: raise ParseError('@type is missing when parsing any message.') sub_message = _CreateMessageFromTypeUrl(type_url) message_descriptor = sub_message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): self._ConvertWrapperMessage(value['value'], sub_message) elif full_name in _WKTJSONMETHODS: methodcaller( _WKTJSONMETHODS[full_name][1], value['value'], sub_message)(self) else: del value['@type'] self._ConvertFieldValuePair(value, sub_message) # Sets Any message message.value = sub_message.SerializeToString() message.type_url = type_url def _ConvertGenericMessage(self, value, message): """Convert a JSON representation into message with FromJsonString.""" # Duration, Timestamp, FieldMask have a FromJsonString method to do the # conversion. Users can also call the method directly. message.FromJsonString(value) def _ConvertValueMessage(self, value, message): """Convert a JSON representation into Value message.""" if isinstance(value, dict): self._ConvertStructMessage(value, message.struct_value) elif isinstance(value, list): self. _ConvertListValueMessage(value, message.list_value) elif value is None: message.null_value = 0 elif isinstance(value, bool): message.bool_value = value elif isinstance(value, six.string_types): message.string_value = value elif isinstance(value, _INT_OR_FLOAT): message.number_value = value else: raise ParseError('Unexpected type for Value message.') def _ConvertListValueMessage(self, value, message): """Convert a JSON representation into ListValue message.""" if not isinstance(value, list): raise ParseError( 'ListValue must be in [] which is {0}.'.format(value)) message.ClearField('values') for item in value: self._ConvertValueMessage(item, message.values.add()) def _ConvertStructMessage(self, value, message): """Convert a JSON representation into Struct message.""" if not isinstance(value, dict): raise ParseError( 'Struct must be in a dict which is {0}.'.format(value)) for key in value: self._ConvertValueMessage(value[key], message.fields[key]) return def _ConvertWrapperMessage(self, value, message): """Convert a JSON representation into Wrapper message.""" field = message.DESCRIPTOR.fields_by_name['value'] setattr(message, 'value', _ConvertScalarFieldValue(value, field)) def _ConvertMapFieldValue(self, value, message, field): """Convert map field value for a message map field. Args: value: A JSON object to convert the map field value. message: A protocol message to record the converted data. field: The descriptor of the map field to be converted. Raises: ParseError: In case of convert problems. """ if not isinstance(value, dict): raise ParseError( 'Map field {0} must be in a dict which is {1}.'.format( field.name, value)) key_field = field.message_type.fields_by_name['key'] value_field = field.message_type.fields_by_name['value'] for key in value: key_value = _ConvertScalarFieldValue(key, key_field, True) if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: self.ConvertMessage(value[key], getattr( message, field.name)[key_value]) else: getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( value[key], value_field) def _ConvertScalarFieldValue(value, field, require_str=False): """Convert a single scalar field value. Args: value: A scalar value to convert the scalar field value. field: The descriptor of the field to convert. require_str: If True, the field value must be a str. Returns: The converted scalar field value Raises: ParseError: In case of convert problems. """ if field.cpp_type in _INT_TYPES: return _ConvertInteger(value) elif field.cpp_type in _FLOAT_TYPES: return _ConvertFloat(value) elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: return _ConvertBool(value, require_str) elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: if field.type == descriptor.FieldDescriptor.TYPE_BYTES: return base64.b64decode(value) else: # Checking for unpaired surrogates appears to be unreliable, # depending on the specific Python version, so we check manually. if _UNPAIRED_SURROGATE_PATTERN.search(value): raise ParseError('Unpaired surrogate') return value elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: # Convert an enum value. enum_value = field.enum_type.values_by_name.get(value, None) if enum_value is None: try: number = int(value) enum_value = field.enum_type.values_by_number.get(number, None) except ValueError: raise ParseError('Invalid enum value {0} for enum type {1}.'.format( value, field.enum_type.full_name)) if enum_value is None: if field.file.syntax == 'proto3': # Proto3 accepts unknown enums. return number raise ParseError('Invalid enum value {0} for enum type {1}.'.format( value, field.enum_type.full_name)) return enum_value.number def _ConvertInteger(value): """Convert an integer. Args: value: A scalar value to convert. Returns: The integer value. Raises: ParseError: If an integer couldn't be consumed. """ if isinstance(value, float) and not value.is_integer(): raise ParseError('Couldn\'t parse integer: {0}.'.format(value)) if isinstance(value, six.text_type) and value.find(' ') != -1: raise ParseError('Couldn\'t parse integer: "{0}".'.format(value)) return int(value) def _ConvertFloat(value): """Convert an floating point number.""" if value == 'nan': raise ParseError('Couldn\'t parse float "nan", use "NaN" instead.') try: # Assume Python compatible syntax. return float(value) except ValueError: # Check alternative spellings. if value == _NEG_INFINITY: return float('-inf') elif value == _INFINITY: return float('inf') elif value == _NAN: return float('nan') else: raise ParseError('Couldn\'t parse float: {0}.'.format(value)) def _ConvertBool(value, require_str): """Convert a boolean value. Args: value: A scalar value to convert. require_str: If True, value must be a str. Returns: The bool parsed. Raises: ParseError: If a boolean value couldn't be consumed. """ if require_str: if value == 'true': return True elif value == 'false': return False else: raise ParseError('Expected "true" or "false", not {0}.'.format(value)) if not isinstance(value, bool): raise ParseError('Expected true or false without quotes.') return value _WKTJSONMETHODS = { 'google.protobuf.Any': ['_AnyMessageToJsonObject', '_ConvertAnyMessage'], 'google.protobuf.Duration': ['_GenericMessageToJsonObject', '_ConvertGenericMessage'], 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', '_ConvertGenericMessage'], 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', '_ConvertListValueMessage'], 'google.protobuf.Struct': ['_StructMessageToJsonObject', '_ConvertStructMessage'], 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', '_ConvertGenericMessage'], 'google.protobuf.Value': ['_ValueMessageToJsonObject', '_ConvertValueMessage'] }
gpl-3.0
mvaled/sentry
tests/acceptance/test_organization_sentry_app.py
2
3591
from __future__ import absolute_import from exam import mock from sentry.testutils import AcceptanceTestCase from tests.acceptance.page_objects.organization_integration_settings import ( OrganizationIntegrationSettingsPage, ) class OrganizationSentryAppAcceptanceTestCase(AcceptanceTestCase): """ As a user, I can install and uninstall a Sentry App """ def setUp(self): super(OrganizationSentryAppAcceptanceTestCase, self).setUp() self.user = self.create_user("[email protected]") self.org = self.create_organization(name="Tesla", owner=None) self.team = self.create_team(organization=self.org, name="Tesla Motors") self.project = self.create_project(organization=self.org, teams=[self.team], name="Model S") self.create_member(user=self.user, organization=self.org, role="owner", teams=[self.team]) self.sentry_app = self.create_sentry_app( name="Tesla App", published=True, organization=self.org ) self.login_as(self.user) self.org_integration_settings_path = u"/settings/{}/integrations/".format(self.org.slug) self.provider = mock.Mock() self.provider.key = "tesla-app" self.provider.name = "Tesla App" def load_page(self, url): self.browser.get(url) self.browser.wait_until_not(".loading-indicator") def test_install_sentry_app(self): self.load_page(self.org_integration_settings_path) org_settings_page = OrganizationIntegrationSettingsPage(browser=self.browser) provider_element = org_settings_page.get_provider(self.provider) # assert installation rather than upgrade button assert provider_element.install_button.label == "Install" assert provider_element.install_button.icon_href == "#icon-circle-add" provider_element.install_button.click() install_selecter = '.modal-dialog [aria-label="Accept & Install"]' self.browser.wait_until(install_selecter) self.browser.click(install_selecter) self.browser.wait_until(".ref-success") # provider_element might be rerendered provider_element = org_settings_page.get_provider(self.provider) # assert we see the uninstall option assert provider_element.install_button.label == "Uninstall" assert provider_element.install_button.icon_href == "#icon-trash" def test_uninstall_sentry_app(self): # add the installation self.installation = self.create_sentry_app_installation( slug=self.sentry_app.slug, organization=self.org, user=self.user ) self.load_page(self.org_integration_settings_path) org_settings_page = OrganizationIntegrationSettingsPage(browser=self.browser) provider_element = org_settings_page.get_provider(self.provider) # assert we see the uninstall option assert provider_element.install_button.label == "Uninstall" assert provider_element.install_button.icon_href == "#icon-trash" provider_element.install_button.click() self.browser.wait_until(org_settings_page.modal_selector) self.browser.click('.modal-dialog [aria-label="Confirm"]') self.browser.wait_until(".ref-success") # provider_element might be rerendered provider_element = org_settings_page.get_provider(self.provider) # assert we see the install option now assert provider_element.install_button.label == "Install" assert provider_element.install_button.icon_href == "#icon-circle-add"
bsd-3-clause
nmrao/robotframework
utest/reporting/test_logreportwriters.py
29
1276
import unittest from robot.reporting.logreportwriters import LogWriter from robot.utils.asserts import assert_true, assert_equals class LogWriterWithMockedWriting(LogWriter): def __init__(self, model): LogWriter.__init__(self, model) self.split_write_calls = [] self.write_called = False def _write_split_log(self, index, keywords, strings, path): self.split_write_calls.append((index, keywords, strings, path)) def _write_file(self, output, config, template): self.write_called = True class TestLogWriter(unittest.TestCase): def test_splitting_log(self): class model: split_results = [((0, 1, 2, -1), ('*', '*1', '*2')), ((0, 1, 0, 42), ('*','*x')), (((1, 2), (3, 4, ())), ('*',))] writer = LogWriterWithMockedWriting(model) writer.write('mylog.html', None) assert_true(writer.write_called) assert_equals([(1, (0, 1, 2, -1), ('*', '*1', '*2'), 'mylog-1.js'), (2, (0, 1, 0, 42), ('*', '*x'), 'mylog-2.js'), (3, ((1, 2), (3, 4, ())), ('*',), 'mylog-3.js')], writer.split_write_calls) if __name__ == '__main__': unittest.main()
apache-2.0
sarnold/exaile
xlgui/preferences/appearance.py
4
5192
# Copyright (C) 2008-2010 Adam Olsen # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # # The developers of the Exaile media player hereby grant permission # for non-GPL compatible GStreamer and Exaile plugins to be used and # distributed together with GStreamer and Exaile. This permission is # above and beyond the permissions granted by the GPL license by which # Exaile is covered. If you modify this code, you may extend this # exception to your version of the code, but you are not obligated to # do so. If you do not wish to do so, delete this exception statement # from your version. from gi.repository import Gtk from xl import xdg from xl.nls import gettext as _ from xlgui.preferences import widgets from xlgui import tray name = _('Appearance') icon = 'preferences-desktop-theme' ui = xdg.get_data_path('ui', 'preferences', 'appearance.ui') class ShowInfoAreaPreference(widgets.CheckPreference): default = True name = 'gui/show_info_area' class ShowInfoAreaCoversPreference(widgets.CheckPreference): default = True name = 'gui/show_info_area_covers' class SplashPreference(widgets.CheckPreference): default = True name = 'gui/use_splash' class ShowTabBarPreference(widgets.CheckPreference): default = True name = 'gui/show_tabbar' def _get_system_default_font(): return Gtk.Widget.get_default_style().font_desc.to_string() class PlaylistFontPreference(widgets.FontButtonPreference): default = _get_system_default_font() name = 'gui/playlist_font' class PlaylistFontResetButtonPreference(widgets.FontResetButtonPreference): default = _get_system_default_font() name = 'gui/playlist_font_reset_button' condition_preference_name = 'gui/playlist_font' class UseAlphaTransparencyPreference(widgets.CheckPreference): default = False name = 'gui/use_alpha' restart_required = True class TransparencyPreferfence(widgets.ScalePreference, widgets.CheckConditional): default = 0.3 name = 'gui/transparency' condition_preference_name = 'gui/use_alpha' def __init__(self, preferences, widget): widgets.ScalePreference.__init__(self, preferences, widget) widgets.CheckConditional.__init__(self) class TrackCountsPreference(widgets.CheckPreference): default = True name = 'gui/display_track_counts' def apply(self, value=None): return_value = widgets.CheckPreference.apply(self, value) import xlgui xlgui.get_controller().get_panel('collection').load_tree() return return_value class UseTrayPreference(widgets.CheckPreference, widgets.Conditional): default = False name = 'gui/use_tray' def __init__(self, preferences, widget): widgets.CheckPreference.__init__(self, preferences, widget) widgets.Conditional.__init__(self) if not tray.is_supported(): self.widget.set_tooltip_text(_("Tray icons are not supported on your platform")) def on_check_condition(self): return tray.is_supported() class MinimizeToTrayPreference(widgets.CheckPreference, widgets.CheckConditional): default = False name = 'gui/minimize_to_tray' condition_preference_name = 'gui/use_tray' def __init__(self, preferences, widget): widgets.CheckPreference.__init__(self, preferences, widget) widgets.CheckConditional.__init__(self) class CloseToTrayPreference(widgets.CheckPreference, widgets.CheckConditional): default = False name = 'gui/close_to_tray' condition_preference_name = 'gui/use_tray' def __init__(self, preferences, widget): widgets.CheckPreference.__init__(self, preferences, widget) widgets.CheckConditional.__init__(self) class EnsureVisiblePreference(widgets.CheckPreference): default = True name = 'gui/ensure_visible' class TabPlacementPreference(widgets.ComboPreference): default = 'top' name = 'gui/tab_placement' def __init__(self, preferences, widget): widgets.ComboPreference.__init__(self, preferences, widget) """ class ProgressBarTextFormatPreference(widgets.ComboEntryPreference): name = 'gui/progress_bar_text_format' completion_items = { '$current_time': _('Current playback position'), '$remaining_time': _('Remaining playback time'), '$total_time': _('Length of a track') } preset_items = [ '$current_time / $remaining_time', '$current_time / $total_time' ] default = '$current_time / $remaining_time' """ # vim: et sts=4 sw=4
gpl-2.0
nonnib/eve-metrics
web2py/applications/admin/controllers/shell.py
25
1413
import sys import cStringIO import gluon.contrib.shell import code import thread import cgi from gluon.shell import env if DEMO_MODE or MULTI_USER_MODE: session.flash = T('disabled in demo mode') redirect(URL('default', 'site')) FE = 10 ** 9 def index(): app = request.args(0) or 'admin' reset() return dict(app=app) def callback(): app = request.args[0] command = request.vars.statement escape = command[:1] != '!' history = session['history:' + app] = session.get( 'history:' + app, gluon.contrib.shell.History()) if not escape: command = command[1:] if command == '%reset': reset() return '*** reset ***' elif command[0] == '%': try: command = session['commands:' + app][int(command[1:])] except ValueError: return '' session['commands:' + app].append(command) environ = env(app, True, extra_request=dict(is_https=request.is_https)) output = gluon.contrib.shell.run(history, command, environ) k = len(session['commands:' + app]) - 1 #output = PRE(output) #return TABLE(TR('In[%i]:'%k,PRE(command)),TR('Out[%i]:'%k,output)) return cgi.escape('In [%i] : %s%s\n' % (k + 1, command, output)) def reset(): app = request.args(0) or 'admin' session['commands:' + app] = [] session['history:' + app] = gluon.contrib.shell.History() return 'done'
mit
litex-hub/pythondata-cpu-blackparrot
pythondata_cpu_blackparrot/system_verilog/bp_me/src/asm/test/test.py
1
1385
from __future__ import print_function from argparse import ArgumentParser import os import sys parser = ArgumentParser(description='CCE Microcode Test Code Generator') parser.add_argument('-o', '--outfile', dest='outfile', type=str, default='test.S', help='Output assembly file') args = parser.parse_args() gprs = ['r0','r1','r2','r3','r4','r5','r6','r7'] gpr_pairs = [(x,y) for x in gprs for y in gprs] alu_2reg_ops = ['add', 'sub', 'lsh', 'rsh', 'and', 'or', 'xor'] alu_imm_ops = ['addi', 'subi', 'lshi', 'rshi'] alu_unary_ops = ['neg', 'inc', 'dec', 'not'] br_2reg_ops = ['beq', 'bne', 'ble', 'bge', 'blt', 'bgt'] br_imm_ops = ['beqi', 'bneqi'] br_1reg_ops = ['bz', 'bnz'] # bi # bs, bss, bsi # bz, bnz def nop(f): f.write('nop\n') def label(f, l): f.write(l) with open(os.path.abspath(args.outfile), 'w') as f: label(f, 'start: ') nop(f) for op in alu_2reg_ops: for opds in gpr_pairs: for dst in gprs: f.write('{0} {1} {2} {3}\n'.format(op, opds[0], opds[1], dst)) for op in alu_imm_ops: f.write('{0} r0 1 r1\n'.format(op)) for op in alu_unary_ops: f.write('{0} r2\n'.format(op)) for op in br_2reg_ops: f.write('{0} r0 r1 start\n'.format(op)) for op in br_imm_ops: f.write('{0} r0 2 start\n'.format(op)) for op in br_1reg_ops: f.write('{0} r1 start\n'.format(op)) f.write('bi start\n')
bsd-3-clause
percy-g2/Novathor_xperia_u8500
6.1.1.B.0.253/external/webkit/Tools/Scripts/webkitpy/tool/bot/botinfo.py
20
2016
# Copyright (c) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # FIXME: We should consider hanging one of these off the tool object. class BotInfo(object): def __init__(self, tool): self._tool = tool def summary_text(self): # bot_id is also stored on the options dictionary on the tool. bot_id = self._tool.status_server.bot_id bot_id_string = "Bot: %s " % (bot_id) if bot_id else "" return "%sPort: %s Platform: %s" % (bot_id_string, self._tool.port().name(), self._tool.platform.display_name())
gpl-2.0
newswangerd/ansible
test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/vars/custom_vars.py
58
1487
# Copyright 2019 RedHat, inc # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ############################################# from __future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = ''' vars: custom_vars version_added: "2.10" short_description: load host and group vars description: test loading host and group vars from a collection options: stage: choices: ['all', 'inventory', 'task'] type: str ini: - key: stage section: custom_vars env: - name: ANSIBLE_VARS_PLUGIN_STAGE ''' from ansible.plugins.vars import BaseVarsPlugin class VarsModule(BaseVarsPlugin): def get_vars(self, loader, path, entities, cache=True): super(VarsModule, self).get_vars(loader, path, entities) return {'collection': 'collection_root_user'}
gpl-3.0
kvandermast/hic_sunt
nodejs/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
526
54812
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import filecmp import gyp.common import gyp.xcodeproj_file import errno import os import sys import posixpath import re import shutil import subprocess import tempfile # Project files generated by this module will use _intermediate_var as a # custom Xcode setting whose value is a DerivedSources-like directory that's # project-specific and configuration-specific. The normal choice, # DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive # as it is likely that multiple targets within a single project file will want # to access the same set of generated files. The other option, # PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, # it is not configuration-specific. INTERMEDIATE_DIR is defined as # $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). _intermediate_var = 'INTERMEDIATE_DIR' # SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all # targets that share the same BUILT_PRODUCTS_DIR. _shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' _library_search_paths_var = 'LIBRARY_SEARCH_PATHS' generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', 'STATIC_LIB_PREFIX': 'lib', 'SHARED_LIB_PREFIX': 'lib', 'STATIC_LIB_SUFFIX': '.a', 'SHARED_LIB_SUFFIX': '.dylib', # INTERMEDIATE_DIR is a place for targets to build up intermediate products. # It is specific to each build environment. It is only guaranteed to exist # and be constant within the context of a project, corresponding to a single # input file. Some build environments may allow their intermediate directory # to be shared on a wider scale, but this is not guaranteed. 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, 'OS': 'mac', 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)', 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, 'CONFIGURATION_NAME': '$(CONFIGURATION)', } # The Xcode-specific sections that hold paths. generator_additional_path_sections = [ 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers', # 'mac_framework_dirs', input already handles _dirs endings. ] # The Xcode-specific keys that exist on targets and aren't moved down to # configurations. generator_additional_non_configuration_keys = [ 'mac_bundle', 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers', 'mac_xctest_bundle', 'xcode_create_dependents_test_runner', ] # We want to let any rules apply to files that are resources also. generator_extra_sources_for_rules = [ 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers', ] # Xcode's standard set of library directories, which don't need to be duplicated # in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. xcode_standard_library_dirs = frozenset([ '$(SDKROOT)/usr/lib', '$(SDKROOT)/usr/local/lib', ]) def CreateXCConfigurationList(configuration_names): xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) if len(configuration_names) == 0: configuration_names = ['Default'] for configuration_name in configuration_names: xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ 'name': configuration_name}) xccl.AppendProperty('buildConfigurations', xcbc) xccl.SetProperty('defaultConfigurationName', configuration_names[0]) return xccl class XcodeProject(object): def __init__(self, gyp_path, path, build_file_dict): self.gyp_path = gyp_path self.path = path self.project = gyp.xcodeproj_file.PBXProject(path=path) projectDirPath = gyp.common.RelativePath( os.path.dirname(os.path.abspath(self.gyp_path)), os.path.dirname(path) or '.') self.project.SetProperty('projectDirPath', projectDirPath) self.project_file = \ gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) self.build_file_dict = build_file_dict # TODO(mark): add destructor that cleans up self.path if created_dir is # True and things didn't complete successfully. Or do something even # better with "try"? self.created_dir = False try: os.makedirs(self.path) self.created_dir = True except OSError, e: if e.errno != errno.EEXIST: raise def Finalize1(self, xcode_targets, serialize_all_tests): # Collect a list of all of the build configuration names used by the # various targets in the file. It is very heavily advised to keep each # target in an entire project (even across multiple project files) using # the same set of configuration names. configurations = [] for xct in self.project.GetProperty('targets'): xccl = xct.GetProperty('buildConfigurationList') xcbcs = xccl.GetProperty('buildConfigurations') for xcbc in xcbcs: name = xcbc.GetProperty('name') if name not in configurations: configurations.append(name) # Replace the XCConfigurationList attached to the PBXProject object with # a new one specifying all of the configuration names used by the various # targets. try: xccl = CreateXCConfigurationList(configurations) self.project.SetProperty('buildConfigurationList', xccl) except: sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) raise # The need for this setting is explained above where _intermediate_var is # defined. The comments below about wanting to avoid project-wide build # settings apply here too, but this needs to be set on a project-wide basis # so that files relative to the _intermediate_var setting can be displayed # properly in the Xcode UI. # # Note that for configuration-relative files such as anything relative to # _intermediate_var, for the purposes of UI tree view display, Xcode will # only resolve the configuration name once, when the project file is # opened. If the active build configuration is changed, the project file # must be closed and reopened if it is desired for the tree view to update. # This is filed as Apple radar 6588391. xccl.SetBuildSetting(_intermediate_var, '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') xccl.SetBuildSetting(_shared_intermediate_var, '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') # Set user-specified project-wide build settings and config files. This # is intended to be used very sparingly. Really, almost everything should # go into target-specific build settings sections. The project-wide # settings are only intended to be used in cases where Xcode attempts to # resolve variable references in a project context as opposed to a target # context, such as when resolving sourceTree references while building up # the tree tree view for UI display. # Any values set globally are applied to all configurations, then any # per-configuration values are applied. for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): xccl.SetBuildSetting(xck, xcv) if 'xcode_config_file' in self.build_file_dict: config_ref = self.project.AddOrGetFileInRootGroup( self.build_file_dict['xcode_config_file']) xccl.SetBaseConfiguration(config_ref) build_file_configurations = self.build_file_dict.get('configurations', {}) if build_file_configurations: for config_name in configurations: build_file_configuration_named = \ build_file_configurations.get(config_name, {}) if build_file_configuration_named: xcc = xccl.ConfigurationNamed(config_name) for xck, xcv in build_file_configuration_named.get('xcode_settings', {}).iteritems(): xcc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in build_file_configuration_named: config_ref = self.project.AddOrGetFileInRootGroup( build_file_configurations[config_name]['xcode_config_file']) xcc.SetBaseConfiguration(config_ref) # Sort the targets based on how they appeared in the input. # TODO(mark): Like a lot of other things here, this assumes internal # knowledge of PBXProject - in this case, of its "targets" property. # ordinary_targets are ordinary targets that are already in the project # file. run_test_targets are the targets that run unittests and should be # used for the Run All Tests target. support_targets are the action/rule # targets used by GYP file targets, just kept for the assert check. ordinary_targets = [] run_test_targets = [] support_targets = [] # targets is full list of targets in the project. targets = [] # does the it define it's own "all"? has_custom_all = False # targets_for_all is the list of ordinary_targets that should be listed # in this project's "All" target. It includes each non_runtest_target # that does not have suppress_wildcard set. targets_for_all = [] for target in self.build_file_dict['targets']: target_name = target['target_name'] toolset = target['toolset'] qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, toolset) xcode_target = xcode_targets[qualified_target] # Make sure that the target being added to the sorted list is already in # the unsorted list. assert xcode_target in self.project._properties['targets'] targets.append(xcode_target) ordinary_targets.append(xcode_target) if xcode_target.support_target: support_targets.append(xcode_target.support_target) targets.append(xcode_target.support_target) if not int(target.get('suppress_wildcard', False)): targets_for_all.append(xcode_target) if target_name.lower() == 'all': has_custom_all = True; # If this target has a 'run_as' attribute, add its target to the # targets, and add it to the test targets. if target.get('run_as'): # Make a target to run something. It should have one # dependency, the parent xcode target. xccl = CreateXCConfigurationList(configurations) run_target = gyp.xcodeproj_file.PBXAggregateTarget({ 'name': 'Run ' + target_name, 'productName': xcode_target.GetProperty('productName'), 'buildConfigurationList': xccl, }, parent=self.project) run_target.AddDependency(xcode_target) command = target['run_as'] script = '' if command.get('working_directory'): script = script + 'cd "%s"\n' % \ gyp.xcodeproj_file.ConvertVariablesToShellSyntax( command.get('working_directory')) if command.get('environment'): script = script + "\n".join( ['export %s="%s"' % (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) for (key, val) in command.get('environment').iteritems()]) + "\n" # Some test end up using sockets, files on disk, etc. and can get # confused if more then one test runs at a time. The generator # flag 'xcode_serialize_all_test_runs' controls the forcing of all # tests serially. It defaults to True. To get serial runs this # little bit of python does the same as the linux flock utility to # make sure only one runs at a time. command_prefix = '' if serialize_all_tests: command_prefix = \ """python -c "import fcntl, subprocess, sys file = open('$TMPDIR/GYP_serialize_test_runs', 'a') fcntl.flock(file.fileno(), fcntl.LOCK_EX) sys.exit(subprocess.call(sys.argv[1:]))" """ # If we were unable to exec for some reason, we want to exit # with an error, and fixup variable references to be shell # syntax instead of xcode syntax. script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ gyp.xcodeproj_file.ConvertVariablesToShellSyntax( gyp.common.EncodePOSIXShellList(command.get('action'))) ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'shellScript': script, 'showEnvVarsInLog': 0, }) run_target.AppendProperty('buildPhases', ssbp) # Add the run target to the project file. targets.append(run_target) run_test_targets.append(run_target) xcode_target.test_runner = run_target # Make sure that the list of targets being replaced is the same length as # the one replacing it, but allow for the added test runner targets. assert len(self.project._properties['targets']) == \ len(ordinary_targets) + len(support_targets) self.project._properties['targets'] = targets # Get rid of unnecessary levels of depth in groups like the Source group. self.project.RootGroupsTakeOverOnlyChildren(True) # Sort the groups nicely. Do this after sorting the targets, because the # Products group is sorted based on the order of the targets. self.project.SortGroups() # Create an "All" target if there's more than one target in this project # file and the project didn't define its own "All" target. Put a generated # "All" target first so that people opening up the project for the first # time will build everything by default. if len(targets_for_all) > 1 and not has_custom_all: xccl = CreateXCConfigurationList(configurations) all_target = gyp.xcodeproj_file.PBXAggregateTarget( { 'buildConfigurationList': xccl, 'name': 'All', }, parent=self.project) for target in targets_for_all: all_target.AddDependency(target) # TODO(mark): This is evil because it relies on internal knowledge of # PBXProject._properties. It's important to get the "All" target first, # though. self.project._properties['targets'].insert(0, all_target) # The same, but for run_test_targets. if len(run_test_targets) > 1: xccl = CreateXCConfigurationList(configurations) run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( { 'buildConfigurationList': xccl, 'name': 'Run All Tests', }, parent=self.project) for run_test_target in run_test_targets: run_all_tests_target.AddDependency(run_test_target) # Insert after the "All" target, which must exist if there is more than # one run_test_target. self.project._properties['targets'].insert(1, run_all_tests_target) def Finalize2(self, xcode_targets, xcode_target_to_target_dict): # Finalize2 needs to happen in a separate step because the process of # updating references to other projects depends on the ordering of targets # within remote project files. Finalize1 is responsible for sorting duty, # and once all project files are sorted, Finalize2 can come in and update # these references. # To support making a "test runner" target that will run all the tests # that are direct dependents of any given target, we look for # xcode_create_dependents_test_runner being set on an Aggregate target, # and generate a second target that will run the tests runners found under # the marked target. for bf_tgt in self.build_file_dict['targets']: if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): tgt_name = bf_tgt['target_name'] toolset = bf_tgt['toolset'] qualified_target = gyp.common.QualifiedTarget(self.gyp_path, tgt_name, toolset) xcode_target = xcode_targets[qualified_target] if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): # Collect all the run test targets. all_run_tests = [] pbxtds = xcode_target.GetProperty('dependencies') for pbxtd in pbxtds: pbxcip = pbxtd.GetProperty('targetProxy') dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') if hasattr(dependency_xct, 'test_runner'): all_run_tests.append(dependency_xct.test_runner) # Directly depend on all the runners as they depend on the target # that builds them. if len(all_run_tests) > 0: run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ 'name': 'Run %s Tests' % tgt_name, 'productName': tgt_name, }, parent=self.project) for run_test_target in all_run_tests: run_all_target.AddDependency(run_test_target) # Insert the test runner after the related target. idx = self.project._properties['targets'].index(xcode_target) self.project._properties['targets'].insert(idx + 1, run_all_target) # Update all references to other projects, to make sure that the lists of # remote products are complete. Otherwise, Xcode will fill them in when # it opens the project file, which will result in unnecessary diffs. # TODO(mark): This is evil because it relies on internal knowledge of # PBXProject._other_pbxprojects. for other_pbxproject in self.project._other_pbxprojects.keys(): self.project.AddOrGetProjectReference(other_pbxproject) self.project.SortRemoteProductReferences() # Give everything an ID. self.project_file.ComputeIDs() # Make sure that no two objects in the project file have the same ID. If # multiple objects wind up with the same ID, upon loading the file, Xcode # will only recognize one object (the last one in the file?) and the # results are unpredictable. self.project_file.EnsureNoIDCollisions() def Write(self): # Write the project file to a temporary location first. Xcode watches for # changes to the project file and presents a UI sheet offering to reload # the project when it does change. However, in some cases, especially when # multiple projects are open or when Xcode is busy, things don't work so # seamlessly. Sometimes, Xcode is able to detect that a project file has # changed but can't unload it because something else is referencing it. # To mitigate this problem, and to avoid even having Xcode present the UI # sheet when an open project is rewritten for inconsequential changes, the # project file is written to a temporary file in the xcodeproj directory # first. The new temporary file is then compared to the existing project # file, if any. If they differ, the new file replaces the old; otherwise, # the new project file is simply deleted. Xcode properly detects a file # being renamed over an open project file as a change and so it remains # able to present the "project file changed" sheet under this system. # Writing to a temporary file first also avoids the possible problem of # Xcode rereading an incomplete project file. (output_fd, new_pbxproj_path) = \ tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', dir=self.path) try: output_file = os.fdopen(output_fd, 'wb') self.project_file.Print(output_file) output_file.close() pbxproj_path = os.path.join(self.path, 'project.pbxproj') same = False try: same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) except OSError, e: if e.errno != errno.ENOENT: raise if same: # The new file is identical to the old one, just get rid of the new # one. os.unlink(new_pbxproj_path) else: # The new file is different from the old one, or there is no old one. # Rename the new file to the permanent name. # # tempfile.mkstemp uses an overly restrictive mode, resulting in a # file that can only be read by the owner, regardless of the umask. # There's no reason to not respect the umask here, which means that # an extra hoop is required to fetch it and reset the new file's mode. # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. umask = os.umask(077) os.umask(umask) os.chmod(new_pbxproj_path, 0666 & ~umask) os.rename(new_pbxproj_path, pbxproj_path) except Exception: # Don't leave turds behind. In fact, if this code was responsible for # creating the xcodeproj directory, get rid of that too. os.unlink(new_pbxproj_path) if self.created_dir: shutil.rmtree(self.path, True) raise def AddSourceToTarget(source, type, pbxp, xct): # TODO(mark): Perhaps source_extensions and library_extensions can be made a # little bit fancier. source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] # .o is conceptually more of a "source" than a "library," but Xcode thinks # of "sources" as things to compile and "libraries" (or "frameworks") as # things to link with. Adding an object file to an Xcode target's frameworks # phase works properly. library_extensions = ['a', 'dylib', 'framework', 'o'] basename = posixpath.basename(source) (root, ext) = posixpath.splitext(basename) if ext: ext = ext[1:].lower() if ext in source_extensions and type != 'none': xct.SourcesPhase().AddFile(source) elif ext in library_extensions and type != 'none': xct.FrameworksPhase().AddFile(source) else: # Files that aren't added to a sources or frameworks build phase can still # go into the project file, just not as part of a build phase. pbxp.AddOrGetFileInRootGroup(source) def AddResourceToTarget(resource, pbxp, xct): # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call # where it's used. xct.ResourcesPhase().AddFile(resource) def AddHeaderToTarget(header, pbxp, xct, is_public): # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call # where it's used. settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public] xct.HeadersPhase().AddFile(header, settings) _xcode_variable_re = re.compile('(\$\((.*?)\))') def ExpandXcodeVariables(string, expansions): """Expands Xcode-style $(VARIABLES) in string per the expansions dict. In some rare cases, it is appropriate to expand Xcode variables when a project file is generated. For any substring $(VAR) in string, if VAR is a key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. Any $(VAR) substring in string for which VAR is not a key in the expansions dict will remain in the returned string. """ matches = _xcode_variable_re.findall(string) if matches == None: return string matches.reverse() for match in matches: (to_replace, variable) = match if not variable in expansions: continue replacement = expansions[variable] string = re.sub(re.escape(to_replace), replacement, string) return string _xcode_define_re = re.compile(r'([\\\"\' ])') def EscapeXcodeDefine(s): """We must escape the defines that we give to XCode so that it knows not to split on spaces and to respect backslash and quote literals. However, we must not quote the define, or Xcode will incorrectly intepret variables especially $(inherited).""" return re.sub(_xcode_define_re, r'\\\1', s) def PerformBuild(data, configurations, params): options = params['options'] for build_file, build_file_dict in data.iteritems(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' if options.generator_output: xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) for config in configurations: arguments = ['xcodebuild', '-project', xcodeproj_path] arguments += ['-configuration', config] print "Building [%s]: %s" % (config, arguments) subprocess.check_call(arguments) def GenerateOutput(target_list, target_dicts, data, params): options = params['options'] generator_flags = params.get('generator_flags', {}) parallel_builds = generator_flags.get('xcode_parallel_builds', True) serialize_all_tests = \ generator_flags.get('xcode_serialize_all_test_runs', True) project_version = generator_flags.get('xcode_project_version', None) skip_excluded_files = \ not generator_flags.get('xcode_list_excluded_files', True) xcode_projects = {} for build_file, build_file_dict in data.iteritems(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' if options.generator_output: xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) xcode_projects[build_file] = xcp pbxp = xcp.project if parallel_builds: pbxp.SetProperty('attributes', {'BuildIndependentTargetsInParallel': 'YES'}) if project_version: xcp.project_file.SetXcodeVersion(project_version) # Add gyp/gypi files to project if not generator_flags.get('standalone'): main_group = pbxp.GetProperty('mainGroup') build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) main_group.AppendChild(build_group) for included_file in build_file_dict['included_files']: build_group.AddOrGetFileByPath(included_file, False) xcode_targets = {} xcode_target_to_target_dict = {} for qualified_target in target_list: [build_file, target_name, toolset] = \ gyp.common.ParseQualifiedTarget(qualified_target) spec = target_dicts[qualified_target] if spec['toolset'] != 'target': raise Exception( 'Multiple toolsets not supported in xcode build (target %s)' % qualified_target) configuration_names = [spec['default_configuration']] for configuration_name in sorted(spec['configurations'].keys()): if configuration_name not in configuration_names: configuration_names.append(configuration_name) xcp = xcode_projects[build_file] pbxp = xcp.project # Set up the configurations for the target according to the list of names # supplied. xccl = CreateXCConfigurationList(configuration_names) # Create an XCTarget subclass object for the target. The type with # "+bundle" appended will be used if the target has "mac_bundle" set. # loadable_modules not in a mac_bundle are mapped to # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets # to create a single-file mh_bundle. _types = { 'executable': 'com.apple.product-type.tool', 'loadable_module': 'com.googlecode.gyp.xcode.bundle', 'shared_library': 'com.apple.product-type.library.dynamic', 'static_library': 'com.apple.product-type.library.static', 'executable+bundle': 'com.apple.product-type.application', 'loadable_module+bundle': 'com.apple.product-type.bundle', 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test', 'shared_library+bundle': 'com.apple.product-type.framework', } target_properties = { 'buildConfigurationList': xccl, 'name': target_name, } type = spec['type'] is_xctest = int(spec.get('mac_xctest_bundle', 0)) is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest if type != 'none': type_bundle_key = type if is_xctest: type_bundle_key += '+xctest' assert type == 'loadable_module', ( 'mac_xctest_bundle targets must have type loadable_module ' '(target %s)' % target_name) elif is_bundle: type_bundle_key += '+bundle' xctarget_type = gyp.xcodeproj_file.PBXNativeTarget try: target_properties['productType'] = _types[type_bundle_key] except KeyError, e: gyp.common.ExceptionAppend(e, "-- unknown product type while " "writing target %s" % target_name) raise else: xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget assert not is_bundle, ( 'mac_bundle targets cannot have type none (target "%s")' % target_name) assert not is_xctest, ( 'mac_xctest_bundle targets cannot have type none (target "%s")' % target_name) target_product_name = spec.get('product_name') if target_product_name is not None: target_properties['productName'] = target_product_name xct = xctarget_type(target_properties, parent=pbxp, force_outdir=spec.get('product_dir'), force_prefix=spec.get('product_prefix'), force_extension=spec.get('product_extension')) pbxp.AppendProperty('targets', xct) xcode_targets[qualified_target] = xct xcode_target_to_target_dict[xct] = spec spec_actions = spec.get('actions', []) spec_rules = spec.get('rules', []) # Xcode has some "issues" with checking dependencies for the "Compile # sources" step with any source files/headers generated by actions/rules. # To work around this, if a target is building anything directly (not # type "none"), then a second target is used to run the GYP actions/rules # and is made a dependency of this target. This way the work is done # before the dependency checks for what should be recompiled. support_xct = None if type != 'none' and (spec_actions or spec_rules): support_xccl = CreateXCConfigurationList(configuration_names); support_target_properties = { 'buildConfigurationList': support_xccl, 'name': target_name + ' Support', } if target_product_name: support_target_properties['productName'] = \ target_product_name + ' Support' support_xct = \ gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, parent=pbxp) pbxp.AppendProperty('targets', support_xct) xct.AddDependency(support_xct) # Hang the support target off the main target so it can be tested/found # by the generator during Finalize. xct.support_target = support_xct prebuild_index = 0 # Add custom shell script phases for "actions" sections. for action in spec_actions: # There's no need to write anything into the script to ensure that the # output directories already exist, because Xcode will look at the # declared outputs and automatically ensure that they exist for us. # Do we have a message to print when this action runs? message = action.get('message') if message: message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) else: message = '' # Turn the list into a string that can be passed to a shell. action_string = gyp.common.EncodePOSIXShellList(action['action']) # Convert Xcode-type variable references to sh-compatible environment # variable references. message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( action_string) script = '' # Include the optional message if message_sh: script += message_sh + '\n' # Be sure the script runs in exec, and that if exec fails, the script # exits signalling an error. script += 'exec ' + action_string_sh + '\nexit 1\n' ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'inputPaths': action['inputs'], 'name': 'Action "' + action['action_name'] + '"', 'outputPaths': action['outputs'], 'shellScript': script, 'showEnvVarsInLog': 0, }) if support_xct: support_xct.AppendProperty('buildPhases', ssbp) else: # TODO(mark): this assumes too much knowledge of the internals of # xcodeproj_file; some of these smarts should move into xcodeproj_file # itself. xct._properties['buildPhases'].insert(prebuild_index, ssbp) prebuild_index = prebuild_index + 1 # TODO(mark): Should verify that at most one of these is specified. if int(action.get('process_outputs_as_sources', False)): for output in action['outputs']: AddSourceToTarget(output, type, pbxp, xct) if int(action.get('process_outputs_as_mac_bundle_resources', False)): for output in action['outputs']: AddResourceToTarget(output, pbxp, xct) # tgt_mac_bundle_resources holds the list of bundle resources so # the rule processing can check against it. if is_bundle: tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) else: tgt_mac_bundle_resources = [] # Add custom shell script phases driving "make" for "rules" sections. # # Xcode's built-in rule support is almost powerful enough to use directly, # but there are a few significant deficiencies that render them unusable. # There are workarounds for some of its inadequacies, but in aggregate, # the workarounds added complexity to the generator, and some workarounds # actually require input files to be crafted more carefully than I'd like. # Consequently, until Xcode rules are made more capable, "rules" input # sections will be handled in Xcode output by shell script build phases # performed prior to the compilation phase. # # The following problems with Xcode rules were found. The numbers are # Apple radar IDs. I hope that these shortcomings are addressed, I really # liked having the rules handled directly in Xcode during the period that # I was prototyping this. # # 6588600 Xcode compiles custom script rule outputs too soon, compilation # fails. This occurs when rule outputs from distinct inputs are # interdependent. The only workaround is to put rules and their # inputs in a separate target from the one that compiles the rule # outputs. This requires input file cooperation and it means that # process_outputs_as_sources is unusable. # 6584932 Need to declare that custom rule outputs should be excluded from # compilation. A possible workaround is to lie to Xcode about a # rule's output, giving it a dummy file it doesn't know how to # compile. The rule action script would need to touch the dummy. # 6584839 I need a way to declare additional inputs to a custom rule. # A possible workaround is a shell script phase prior to # compilation that touches a rule's primary input files if any # would-be additional inputs are newer than the output. Modifying # the source tree - even just modification times - feels dirty. # 6564240 Xcode "custom script" build rules always dump all environment # variables. This is a low-prioroty problem and is not a # show-stopper. rules_by_ext = {} for rule in spec_rules: rules_by_ext[rule['extension']] = rule # First, some definitions: # # A "rule source" is a file that was listed in a target's "sources" # list and will have a rule applied to it on the basis of matching the # rule's "extensions" attribute. Rule sources are direct inputs to # rules. # # Rule definitions may specify additional inputs in their "inputs" # attribute. These additional inputs are used for dependency tracking # purposes. # # A "concrete output" is a rule output with input-dependent variables # resolved. For example, given a rule with: # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], # if the target's "sources" list contained "one.ext" and "two.ext", # the "concrete output" for rule input "two.ext" would be "two.cc". If # a rule specifies multiple outputs, each input file that the rule is # applied to will have the same number of concrete outputs. # # If any concrete outputs are outdated or missing relative to their # corresponding rule_source or to any specified additional input, the # rule action must be performed to generate the concrete outputs. # concrete_outputs_by_rule_source will have an item at the same index # as the rule['rule_sources'] that it corresponds to. Each item is a # list of all of the concrete outputs for the rule_source. concrete_outputs_by_rule_source = [] # concrete_outputs_all is a flat list of all concrete outputs that this # rule is able to produce, given the known set of input files # (rule_sources) that apply to it. concrete_outputs_all = [] # messages & actions are keyed by the same indices as rule['rule_sources'] # and concrete_outputs_by_rule_source. They contain the message and # action to perform after resolving input-dependent variables. The # message is optional, in which case None is stored for each rule source. messages = [] actions = [] for rule_source in rule.get('rule_sources', []): rule_source_dirname, rule_source_basename = \ posixpath.split(rule_source) (rule_source_root, rule_source_ext) = \ posixpath.splitext(rule_source_basename) # These are the same variable names that Xcode uses for its own native # rule support. Because Xcode's rule engine is not being used, they # need to be expanded as they are written to the makefile. rule_input_dict = { 'INPUT_FILE_BASE': rule_source_root, 'INPUT_FILE_SUFFIX': rule_source_ext, 'INPUT_FILE_NAME': rule_source_basename, 'INPUT_FILE_PATH': rule_source, 'INPUT_FILE_DIRNAME': rule_source_dirname, } concrete_outputs_for_this_rule_source = [] for output in rule.get('outputs', []): # Fortunately, Xcode and make both use $(VAR) format for their # variables, so the expansion is the only transformation necessary. # Any remaning $(VAR)-type variables in the string can be given # directly to make, which will pick up the correct settings from # what Xcode puts into the environment. concrete_output = ExpandXcodeVariables(output, rule_input_dict) concrete_outputs_for_this_rule_source.append(concrete_output) # Add all concrete outputs to the project. pbxp.AddOrGetFileInRootGroup(concrete_output) concrete_outputs_by_rule_source.append( \ concrete_outputs_for_this_rule_source) concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) # TODO(mark): Should verify that at most one of these is specified. if int(rule.get('process_outputs_as_sources', False)): for output in concrete_outputs_for_this_rule_source: AddSourceToTarget(output, type, pbxp, xct) # If the file came from the mac_bundle_resources list or if the rule # is marked to process outputs as bundle resource, do so. was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources if was_mac_bundle_resource or \ int(rule.get('process_outputs_as_mac_bundle_resources', False)): for output in concrete_outputs_for_this_rule_source: AddResourceToTarget(output, pbxp, xct) # Do we have a message to print when this rule runs? message = rule.get('message') if message: message = gyp.common.EncodePOSIXShellArgument(message) message = ExpandXcodeVariables(message, rule_input_dict) messages.append(message) # Turn the list into a string that can be passed to a shell. action_string = gyp.common.EncodePOSIXShellList(rule['action']) action = ExpandXcodeVariables(action_string, rule_input_dict) actions.append(action) if len(concrete_outputs_all) > 0: # TODO(mark): There's a possibilty for collision here. Consider # target "t" rule "A_r" and target "t_A" rule "r". makefile_name = '%s.make' % re.sub( '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name'])) makefile_path = os.path.join(xcode_projects[build_file].path, makefile_name) # TODO(mark): try/close? Write to a temporary file and swap it only # if it's got changes? makefile = open(makefile_path, 'wb') # make will build the first target in the makefile by default. By # convention, it's called "all". List all (or at least one) # concrete output for each rule source as a prerequisite of the "all" # target. makefile.write('all: \\\n') for concrete_output_index in \ xrange(0, len(concrete_outputs_by_rule_source)): # Only list the first (index [0]) concrete output of each input # in the "all" target. Otherwise, a parallel make (-j > 1) would # attempt to process each input multiple times simultaneously. # Otherwise, "all" could just contain the entire list of # concrete_outputs_all. concrete_output = \ concrete_outputs_by_rule_source[concrete_output_index][0] if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: eol = '' else: eol = ' \\' makefile.write(' %s%s\n' % (concrete_output, eol)) for (rule_source, concrete_outputs, message, action) in \ zip(rule['rule_sources'], concrete_outputs_by_rule_source, messages, actions): makefile.write('\n') # Add a rule that declares it can build each concrete output of a # rule source. Collect the names of the directories that are # required. concrete_output_dirs = [] for concrete_output_index in xrange(0, len(concrete_outputs)): concrete_output = concrete_outputs[concrete_output_index] if concrete_output_index == 0: bol = '' else: bol = ' ' makefile.write('%s%s \\\n' % (bol, concrete_output)) concrete_output_dir = posixpath.dirname(concrete_output) if (concrete_output_dir and concrete_output_dir not in concrete_output_dirs): concrete_output_dirs.append(concrete_output_dir) makefile.write(' : \\\n') # The prerequisites for this rule are the rule source itself and # the set of additional rule inputs, if any. prerequisites = [rule_source] prerequisites.extend(rule.get('inputs', [])) for prerequisite_index in xrange(0, len(prerequisites)): prerequisite = prerequisites[prerequisite_index] if prerequisite_index == len(prerequisites) - 1: eol = '' else: eol = ' \\' makefile.write(' %s%s\n' % (prerequisite, eol)) # Make sure that output directories exist before executing the rule # action. if len(concrete_output_dirs) > 0: makefile.write('\t@mkdir -p "%s"\n' % '" "'.join(concrete_output_dirs)) # The rule message and action have already had the necessary variable # substitutions performed. if message: # Mark it with note: so Xcode picks it up in build output. makefile.write('\t@echo note: %s\n' % message) makefile.write('\t%s\n' % action) makefile.close() # It might be nice to ensure that needed output directories exist # here rather than in each target in the Makefile, but that wouldn't # work if there ever was a concrete output that had an input-dependent # variable anywhere other than in the leaf position. # Don't declare any inputPaths or outputPaths. If they're present, # Xcode will provide a slight optimization by only running the script # phase if any output is missing or outdated relative to any input. # Unfortunately, it will also assume that all outputs are touched by # the script, and if the outputs serve as files in a compilation # phase, they will be unconditionally rebuilt. Since make might not # rebuild everything that could be declared here as an output, this # extra compilation activity is unnecessary. With inputPaths and # outputPaths not supplied, make will always be called, but it knows # enough to not do anything when everything is up-to-date. # To help speed things up, pass -j COUNT to make so it does some work # in parallel. Don't use ncpus because Xcode will build ncpus targets # in parallel and if each target happens to have a rules step, there # would be ncpus^2 things going. With a machine that has 2 quad-core # Xeons, a build can quickly run out of processes based on # scheduling/other tasks, and randomly failing builds are no good. script = \ """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" if [ "${JOB_COUNT}" -gt 4 ]; then JOB_COUNT=4 fi exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" exit 1 """ % makefile_name ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'name': 'Rule "' + rule['rule_name'] + '"', 'shellScript': script, 'showEnvVarsInLog': 0, }) if support_xct: support_xct.AppendProperty('buildPhases', ssbp) else: # TODO(mark): this assumes too much knowledge of the internals of # xcodeproj_file; some of these smarts should move into xcodeproj_file # itself. xct._properties['buildPhases'].insert(prebuild_index, ssbp) prebuild_index = prebuild_index + 1 # Extra rule inputs also go into the project file. Concrete outputs were # already added when they were computed. groups = ['inputs', 'inputs_excluded'] if skip_excluded_files: groups = [x for x in groups if not x.endswith('_excluded')] for group in groups: for item in rule.get(group, []): pbxp.AddOrGetFileInRootGroup(item) # Add "sources". for source in spec.get('sources', []): (source_root, source_extension) = posixpath.splitext(source) if source_extension[1:] not in rules_by_ext: # AddSourceToTarget will add the file to a root group if it's not # already there. AddSourceToTarget(source, type, pbxp, xct) else: pbxp.AddOrGetFileInRootGroup(source) # Add "mac_bundle_resources" and "mac_framework_private_headers" if # it's a bundle of any type. if is_bundle: for resource in tgt_mac_bundle_resources: (resource_root, resource_extension) = posixpath.splitext(resource) if resource_extension[1:] not in rules_by_ext: AddResourceToTarget(resource, pbxp, xct) else: pbxp.AddOrGetFileInRootGroup(resource) for header in spec.get('mac_framework_private_headers', []): AddHeaderToTarget(header, pbxp, xct, False) # Add "mac_framework_headers". These can be valid for both frameworks # and static libraries. if is_bundle or type == 'static_library': for header in spec.get('mac_framework_headers', []): AddHeaderToTarget(header, pbxp, xct, True) # Add "copies". pbxcp_dict = {} for copy_group in spec.get('copies', []): dest = copy_group['destination'] if dest[0] not in ('/', '$'): # Relative paths are relative to $(SRCROOT). dest = '$(SRCROOT)/' + dest # Coalesce multiple "copies" sections in the same target with the same # "destination" property into the same PBXCopyFilesBuildPhase, otherwise # they'll wind up with ID collisions. pbxcp = pbxcp_dict.get(dest, None) if pbxcp is None: pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ 'name': 'Copy to ' + copy_group['destination'] }, parent=xct) pbxcp.SetDestination(dest) # TODO(mark): The usual comment about this knowing too much about # gyp.xcodeproj_file internals applies. xct._properties['buildPhases'].insert(prebuild_index, pbxcp) pbxcp_dict[dest] = pbxcp for file in copy_group['files']: pbxcp.AddFile(file) # Excluded files can also go into the project file. if not skip_excluded_files: for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers']: excluded_key = key + '_excluded' for item in spec.get(excluded_key, []): pbxp.AddOrGetFileInRootGroup(item) # So can "inputs" and "outputs" sections of "actions" groups. groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] if skip_excluded_files: groups = [x for x in groups if not x.endswith('_excluded')] for action in spec.get('actions', []): for group in groups: for item in action.get(group, []): # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not # sources. if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): pbxp.AddOrGetFileInRootGroup(item) for postbuild in spec.get('postbuilds', []): action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) script = 'exec ' + action_string_sh + '\nexit 1\n' # Make the postbuild step depend on the output of ld or ar from this # target. Apparently putting the script step after the link step isn't # sufficient to ensure proper ordering in all cases. With an input # declared but no outputs, the script step should run every time, as # desired. ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'], 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', 'shellScript': script, 'showEnvVarsInLog': 0, }) xct.AppendProperty('buildPhases', ssbp) # Add dependencies before libraries, because adding a dependency may imply # adding a library. It's preferable to keep dependencies listed first # during a link phase so that they can override symbols that would # otherwise be provided by libraries, which will usually include system # libraries. On some systems, ld is finicky and even requires the # libraries to be ordered in such a way that unresolved symbols in # earlier-listed libraries may only be resolved by later-listed libraries. # The Mac linker doesn't work that way, but other platforms do, and so # their linker invocations need to be constructed in this way. There's # no compelling reason for Xcode's linker invocations to differ. if 'dependencies' in spec: for dependency in spec['dependencies']: xct.AddDependency(xcode_targets[dependency]) # The support project also gets the dependencies (in case they are # needed for the actions/rules to work). if support_xct: support_xct.AddDependency(xcode_targets[dependency]) if 'libraries' in spec: for library in spec['libraries']: xct.FrameworksPhase().AddFile(library) # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. # I wish Xcode handled this automatically. library_dir = posixpath.dirname(library) if library_dir not in xcode_standard_library_dirs and ( not xct.HasBuildSetting(_library_search_paths_var) or library_dir not in xct.GetBuildSetting(_library_search_paths_var)): xct.AppendBuildSetting(_library_search_paths_var, library_dir) for configuration_name in configuration_names: configuration = spec['configurations'][configuration_name] xcbc = xct.ConfigurationNamed(configuration_name) for include_dir in configuration.get('mac_framework_dirs', []): xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) for include_dir in configuration.get('include_dirs', []): xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) for library_dir in configuration.get('library_dirs', []): if library_dir not in xcode_standard_library_dirs and ( not xcbc.HasBuildSetting(_library_search_paths_var) or library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)): xcbc.AppendBuildSetting(_library_search_paths_var, library_dir) if 'defines' in configuration: for define in configuration['defines']: set_define = EscapeXcodeDefine(define) xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) if 'xcode_settings' in configuration: for xck, xcv in configuration['xcode_settings'].iteritems(): xcbc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in configuration: config_ref = pbxp.AddOrGetFileInRootGroup( configuration['xcode_config_file']) xcbc.SetBaseConfiguration(config_ref) build_files = [] for build_file, build_file_dict in data.iteritems(): if build_file.endswith('.gyp'): build_files.append(build_file) for build_file in build_files: xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) for build_file in build_files: xcode_projects[build_file].Finalize2(xcode_targets, xcode_target_to_target_dict) for build_file in build_files: xcode_projects[build_file].Write()
gpl-3.0
solin319/incubator-mxnet
tools/accnn/acc_conv.py
52
3352
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import numpy as np from scipy import linalg as LA import mxnet as mx import argparse import utils def conv_vh_decomposition(model, args): W = model.arg_params[args.layer+'_weight'].asnumpy() N, C, y, x = W.shape b = model.arg_params[args.layer+'_bias'].asnumpy() W = W.transpose((1,2,0,3)).reshape((C*y, -1)) U, D, Q = np.linalg.svd(W, full_matrices=False) sqrt_D = LA.sqrtm(np.diag(D)) K = args.K V = U[:,:K].dot(sqrt_D[:K, :K]) H = Q.T[:,:K].dot(sqrt_D[:K, :K]) V = V.T.reshape(K, C, y, 1) b_1 = np.zeros((K, )) H = H.reshape(N, x, 1, K).transpose((0,3,2,1)) b_2 = b W1, b1, W2, b2 = V, b_1, H, b_2 def sym_handle(data, node): kernel = eval(node['param']['kernel']) pad = eval(node['param']['pad']) name = node['name'] name1 = name + '_v' kernel1 = tuple((kernel[0], 1)) pad1 = tuple((pad[0], 0)) num_filter = W1.shape[0] sym1 = mx.symbol.Convolution(data=data, kernel=kernel1, pad=pad1, num_filter=num_filter, name=name1) name2 = name + '_h' kernel2 = tuple((1, kernel[1])) pad2 = tuple((0, pad[1])) num_filter = W2.shape[0] sym2 = mx.symbol.Convolution(data=sym1, kernel=kernel2, pad=pad2, num_filter=num_filter, name=name2) return sym2 def arg_handle(arg_shape_dic, arg_params): name1 = args.layer + '_v' name2 = args.layer + '_h' weight1 = mx.ndarray.array(W1) bias1 = mx.ndarray.array(b1) weight2 = mx.ndarray.array(W2) bias2 = mx.ndarray.array(b2) assert weight1.shape == arg_shape_dic[name1+'_weight'], 'weight1' assert weight2.shape == arg_shape_dic[name2+'_weight'], 'weight2' assert bias1.shape == arg_shape_dic[name1+'_bias'], 'bias1' assert bias2.shape == arg_shape_dic[name2+'_bias'], 'bias2' arg_params[name1 + '_weight'] = weight1 arg_params[name1 + '_bias'] = bias1 arg_params[name2 + '_weight'] = weight2 arg_params[name2 + '_bias'] = bias2 new_model = utils.replace_conv_layer(args.layer, model, sym_handle, arg_handle) return new_model def main(): model = utils.load_model(args) new_model = conv_vh_decomposition(model, args) new_model.save(args.save_model) if __name__ == '__main__': parser=argparse.ArgumentParser() parser.add_argument('-m', '--model', help='the model to speed up') parser.add_argument('-g', '--gpus', default='0', help='the gpus to be used in ctx') parser.add_argument('--load-epoch',type=int,default=1) parser.add_argument('--layer') parser.add_argument('--K', type=int) parser.add_argument('--save-model') args = parser.parse_args() main()
apache-2.0
Turbo87/skylines
skylines/commands/import_/airspace.py
1
15825
from __future__ import print_function from flask_script import Command, Option import re import shutil import os.path import subprocess from flask import current_app from geoalchemy2.shape import from_shape from shapely.geometry import polygon from shapely.wkt import loads from sqlalchemy.sql.expression import case from sqlalchemy import func from skylines.database import db from skylines.model import Airspace from skylines.lib.geo import FEET_PER_METER airspace_re = re.compile(r'^([^#]{1}.*?)\s+(openair|sua)\s+(https?://.*|file://.*)') airspace_blacklist_re = re.compile(r'^([^#]{1}.*?)\s+(.*)') msl_re = re.compile(r'^(\d+)\s*(f|ft|m)?\s*([a]?msl|asl|alt)') msld_re = re.compile(r'^(\d+)\s*(f|ft|m)?$') flightlevel_re = re.compile(r'^fl\s?(\d+)$') agl_re = re.compile(r'^(\d+)\s*(f|ft|m)?\s*(agl|gnd|asfc|sfc)') unl_re = re.compile(r'^unl') notam_re = re.compile(r'^notam') airspace_tnp_class = [ ("A", "CLASSA"), ("B", "CLASSB"), ("C", "CLASSC"), ("D", "CLASSD"), ("E", "CLASSE"), ("F", "CLASSF"), ("G", "CLASSG"), ] airspace_tnp_types = [ ("C", "CTR"), ("CTA", "CTR"), ("CTR", "CTR"), ("CTA/CTR", "CTR"), ("CTR/CTA", "CTR"), ("R", "RESTRICT"), ("RESTRICTED", "RESTRICT"), ("P", "PROHIBITED"), ("PROHIBITED", "PROHIBITED"), ("D", "DANGER"), ("DANGER", "DANGER"), ("G", "WAVE"), ("GSEC", "WAVE"), ("T", "TMZ"), ("TMZ", "TMZ"), ("CYR", "RESTRICT"), ("CYD", "DANGER"), ("CYA", "CLASSF"), ("MATZ", "MATZ"), ("RMZ", "RMZ"), ] airspace_openair_class = [ ("R", "RESTRICT"), ("Q", "DANGER"), ("P", "PROHIBITED"), ("CTR", "CTR"), ("A", "CLASSA"), ("B", "CLASSB"), ("C", "CLASSC"), ("D", "CLASSD"), ("GP", "NOGLIDER"), ("W", "WAVE"), ("E", "CLASSE"), ("F", "CLASSF"), ("TMZ", "TMZ"), ("G", "CLASSG"), ("RMZ", "RMZ"), ] class AirspaceCommand(Command): """ Download and import airspace files for the mapserver """ option_list = ( Option('airspace_list', nargs='?', help='airspace list file'), Option('airspace_blacklist', nargs='?', help='airspace blacklist file'), Option('--country', action='append', help='Update only the airspace of this country.'), Option('--url', help='Import single airspace file from file/url. ' 'You need to specify a country and the filetype ' 'when using this option.'), Option('--filetype', help='Choose \'sua\' or \'openair\'.'), Option('--debug', action='store_true', help='Be more verbose'), ) def run(self, airspace_list, airspace_blacklist, country, url, filetype, debug): # the lines in airspace_list contain the following: # de openair http://www.daec.de/download/ASDF.txt # for openair files # at sua http://www.austrocontrol.at/download/ASDF.sua # for SUA files # us sua file://assets/airspace/adsf.sua # for local files self.blacklist = {} if airspace_blacklist: self.import_blacklist(airspace_blacklist) if url and len(country) == 1 and filetype: self.import_airspace(url, country[0], filetype, debug) else: with open(airspace_list, "r") as f: for line in f: match = airspace_re.match(line) if not match: continue country_code = match.group(1).strip() file_type = match.group(2).strip() url = match.group(3).strip() if debug: print("Found {} with filetype {} and URL {}".format(country_code, file_type, url)) if country and country_code.lower() not in country: continue self.import_airspace(url, country_code, file_type, debug) def import_blacklist(self, blacklist_file): # import airspace blacklist to remove unwanted airspaces (e.g. borderlines) # each line contains the country code and the airspace name to remove # see provided file for example with open(blacklist_file, "r") as f: for line in f: match = airspace_blacklist_re.match(line) if not match: continue country_code = match.group(1).strip() name = match.group(2).strip() if country_code == '' or name == '': continue if country_code not in self.blacklist: self.blacklist[country_code] = list() self.blacklist[country_code].append(name) def import_airspace(self, url, country_code, file_type, debug): country_code = country_code.lower() filename = os.path.join( current_app.config['SKYLINES_TEMPORARY_DIR'], country_code, country_code + '.' + file_type) if url.startswith('http://') or url.startswith('https://'): print("\nDownloading " + url) filename = self.download_file(filename, url) elif url.startswith('file://'): filename = url[7:] # remove all airspace definitions for the current country self.remove_country(country_code) if file_type == 'sua': self.import_sua(filename, country_code, debug) elif file_type == 'openair': self.import_openair(filename, country_code, debug) if filename.startswith(current_app.config['SKYLINES_TEMPORARY_DIR']): shutil.rmtree(os.path.dirname(filename)) def import_sua(self, filename, country_code, debug): from osgeo import ogr print("reading " + filename) country_blacklist = self.blacklist.get(country_code, []) temporary_file = os.path.join( current_app.config['SKYLINES_TEMPORARY_DIR'], os.path.basename(filename) + '.tmp' ) # try to uncomment a CLASS definition, as many SUA files from soaringweb.org have a CLASS comment with open(filename, 'r') as in_file: with open(temporary_file, 'w') as out_file: for line in in_file.xreadlines(): out_file.write(line.replace('# CLASS', 'CLASS')) if debug: print("Trying to open " + temporary_file) airspace_file = ogr.Open(temporary_file) if not airspace_file: if debug: print("OGR doesn't think that's a airspace file...") return layer = airspace_file.GetLayerByIndex(0) feature = layer.GetFeature(0) i = 0 j = 0 while(feature): feature = layer.GetFeature(i) i += 1 if not feature: continue name = feature.GetFieldAsString('title').decode('latin1').strip() if name in country_blacklist: print(name + " is in blacklist") continue if debug: print("Adding " + name) airspace_class = feature.GetFieldAsString('class').strip() airspace_type = feature.GetFieldAsString('type').strip() if airspace_type: airspace_class = self.parse_airspace_type_tnp(airspace_type) elif airspace_class: airspace_class = self.parse_airspace_class_tnp(airspace_class) else: print(name + " has neither class nor type") continue added = self.add_airspace( country_code, airspace_class, name, feature.GetFieldAsString('base'), feature.GetFieldAsString('tops'), "POLYGON" + str(feature.geometry())[8:] ) if not added: continue if i % 100 == 0: print("inserting geometry " + str(i)) j += 1 airspace_file.Destroy() db.session.commit() os.remove(temporary_file) print("added " + str(j) + " features for country " + country_code) def import_openair(self, filename, country_code, debug): from osgeo import ogr print("reading " + filename) country_blacklist = self.blacklist.get(country_code, []) airspace_file = ogr.Open(filename) if not airspace_file: if debug: print("OGR doesn't think that's a airspace file...") return layer = airspace_file.GetLayerByIndex(0) feature = layer.GetFeature(0) i = 0 j = 0 while(feature): feature = layer.GetFeature(i) i += 1 if not feature: continue name = feature.GetFieldAsString('name').decode('latin1').strip() if name in country_blacklist: print(name + " is in blacklist") continue if debug: print("Adding " + name) airspace_class = feature.GetFieldAsString('class').strip() if airspace_class: airspace_class = self.parse_airspace_class_openair(airspace_class) else: print(name + " has no class") continue added = self.add_airspace( country_code, airspace_class, name, feature.GetFieldAsString('floor'), feature.GetFieldAsString('ceiling'), "POLYGON" + str(feature.geometry())[8:] ) if not added: continue if i % 100 == 0: print("inserting geometry " + str(i)) j += 1 airspace_file.Destroy() db.session.commit() print("added " + str(j) + " features for country " + country_code) def remove_country(self, country_code): print("removing all entries for country_code " + country_code) query = db.session.query(Airspace) \ .filter(Airspace.country_code == country_code) query.delete(synchronize_session=False) db.session.commit() def download_file(self, path, url): # Create data folder if necessary if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) # Download the current file # (only if server file is newer than local file) subprocess.check_call(['wget', '-q', '-N', '--no-check-certificate', '-U', 'Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0', '-P', os.path.dirname(path), '-O', path, url]) # Check if download succeeded if not os.path.exists(path): raise RuntimeError('File not found at {}'.format(path)) # Return path to the file return path def parse_airspace_type_tnp(self, airspace_type): if airspace_type.startswith('CLASS '): as_class = self.parse_airspace_class_tnp(airspace_type[6:]) if as_class != "OTHER": return as_class for airspace in airspace_tnp_types: if re.search("^" + airspace[0] + "$", airspace_type): return airspace[1] return "OTHER" def parse_airspace_class_tnp(self, airspace_class): for airspace in airspace_tnp_class: if re.search("^" + airspace[0] + "$", airspace_class[0]): return airspace[1] return "OTHER" def parse_airspace_class_openair(self, airspace_class): for airspace in airspace_openair_class: if re.search("^" + airspace[0] + "$", airspace_class): return airspace[1] return "OTHER" def add_airspace(self, country_code, airspace_class, name, base, top, geom_str): try: geom = loads(geom_str) except: print(name + "(" + airspace_class + ") is not a polygon (maybe not enough points?)") return False # orient polygon clockwise geom = polygon.orient(geom, sign=-1) if not airspace_class: print(name + " has no airspace class") return False base = self.normalise_height(base, name) top = self.normalise_height(top, name) flightlevel_re = re.compile(r'^FL (\d+)$') match = flightlevel_re.match(base) if match and int(match.group(1)) >= 200: print(name + " has it's base above FL 200 and is therefore disregarded") return False airspace = Airspace() airspace.country_code = country_code airspace.airspace_class = airspace_class airspace.name = name airspace.base = base airspace.top = top # Check geometry type, disregard everything except POLYGON if geom.geom_type != 'Polygon': print(name + " is not a polygon (it's a " + geom.geom_type + ")") return False wkb = from_shape(geom, srid=4326) # Try to fix invalid (self-intersecting) geometries valid_dump = (func.ST_Dump(func.ST_MakeValid(wkb))).geom valid_query = db.session.query(func.ST_SetSRID(valid_dump, 4326)).order_by(func.ST_Area(valid_dump).desc()).first() if not valid_query: print('Error importing ' + name) print('Could not validate geometry') return False else: wkb = valid_query[0] geom_type = db.session.query(func.ST_GeometryType(wkb)).first()[0] if geom_type != 'ST_Polygon': print(name + " got some errors makeing it valid...") return False tolerance = 0.0000001 def simplify(x): return func.ST_SimplifyPreserveTopology(x, tolerance) airspace.the_geom = case( [ (func.ST_IsValid(wkb), wkb), (func.ST_IsValid(simplify(wkb)), simplify(wkb)), ], else_=None) db.session.add(airspace) return True def normalise_height(self, height, name): height = height.lower().strip() # is it GND or SFC? if re.search('^(ground|gnd|sfc|msl)$', height): return 'GND' # is it a flightlevel? match = flightlevel_re.match(height) if match: return 'FL {0}'.format(int(match.group(1))) # is it AGL? match = agl_re.match(height) if match and match.group(2) == 'm': return '{0} AGL'.format((int(match.group(1)) * FEET_PER_METER)) elif match: return '{0} AGL'.format(int(match.group(1))) # is it MSL? match = msl_re.match(height) if match and match.group(2) == 'm': return '{0} MSL'.format(int(match.group(1)) * FEET_PER_METER) elif match: return '{0} MSL'.format(int(match.group(1))) # is it MSL without the msl moniker? match = msld_re.match(height) if match and match.group(2) == 'm': return '{0} MSL'.format(int(match.group(1)) * FEET_PER_METER) elif match: return '{0} MSL'.format(int(match.group(1))) # is it unlimited? match = unl_re.match(height) if match: return 'FL 999' # is it notam limited? match = notam_re.match(height) if match: return 'NOTAM' print(name + " has unknown height format: '" + height + "'") return height
agpl-3.0
verma-varsha/zulip
zerver/webhooks/slack/tests.py
13
2694
# -*- coding: utf-8 -*- from zerver.lib.test_classes import WebhookTestCase from six import text_type class SlackWebhookTests(WebhookTestCase): STREAM_NAME = 'slack' URL_TEMPLATE = "/api/v1/external/slack?stream={stream}&api_key={api_key}" FIXTURE_DIR_NAME = 'slack' def test_slack_channel_to_topic(self): # type: () -> None expected_subject = u"channel: general" expected_message = u"**slack_user**: `test\n`" self.send_and_test_stream_message('message_info', expected_subject, expected_message, content_type="application/x-www-form-urlencoded") def test_slack_channel_to_stream(self): # type: () -> None self.STREAM_NAME = 'general' self.url = "{}{}".format(self.url, "&channels_map_to_topics=0") expected_subject = u"Message from Slack" expected_message = u"**slack_user**: `test\n`" self.send_and_test_stream_message('message_info', expected_subject, expected_message, content_type="application/x-www-form-urlencoded") def test_missing_data_user_name(self): # type: () -> None payload = self.get_body('message_info_missing_user_name') url = self.build_webhook_url() result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded") self.assert_json_error(result, "Missing 'user_name' argument") def test_missing_data_channel_name(self): # type: () -> None payload = self.get_body('message_info_missing_channel_name') url = self.build_webhook_url() result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded") self.assert_json_error(result, "Missing 'channel_name' argument") def test_missing_data_text(self): # type: () -> None payload = self.get_body('message_info_missing_text') url = self.build_webhook_url() result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded") self.assert_json_error(result, "Missing 'text' argument") def test_invalid_channels_map_to_topics(self): # type: () -> None payload = self.get_body('message_info') url = "{}{}".format(self.url, "&channels_map_to_topics=abc") result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded") self.assert_json_error(result, 'Error: channels_map_to_topics parameter other than 0 or 1') def get_body(self, fixture_name): # type: (text_type) -> text_type return self.fixture_data("slack", fixture_name, file_type="txt")
apache-2.0
cloudbase/cinder
cinder/tests/unit/volume/drivers/fusionstorage/test_dsware.py
6
35841
# Copyright (c) 2013 - 2016 Huawei Technologies Co., Ltd. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for Huawei FusionStorage drivers. """ import mock from oslo_config import cfg from oslo_service import loopingcall from cinder import exception from cinder.image import image_utils from cinder import test from cinder.volume import configuration as conf from cinder.volume.drivers.fusionstorage import dsware from cinder.volume.drivers.fusionstorage import fspythonapi test_volume = {'name': 'test_vol1', 'size': 4, 'volume_metadata': '', 'host': 'host01@dsware', 'instance_uuid': None, 'provider_id': '127.0.0.1'} test_src_volume = {'name': 'test_vol2', 'size': 4, 'status': 'available'} test_snapshot = { 'name': 'test_snapshot1', 'volume_id': 'vol1', 'volume_size': '2'} class FakeDSWAREDriver(dsware.DSWAREDriver): def __init__(self): configuration = conf.Configuration( [ cfg.StrOpt('fake'), ], None ) super(FakeDSWAREDriver, self).__init__(configuration=configuration) self.dsware_client = fspythonapi.FSPythonApi() self.manage_ip = '127.0.0.1' self.pool_type = '1' class DSwareDriverTestCase(test.TestCase): def setUp(self): super(DSwareDriverTestCase, self).setUp() self.driver = FakeDSWAREDriver() def test_private_get_dsware_manage_ip(self): retval = self.driver._get_dsware_manage_ip(test_volume) self.assertEqual('127.0.0.1', retval) test_volume_fail = {'name': 'test_vol', 'size': 4, 'volume_metadata': '', 'host': 'host01@dsware', 'provider_id': None} self.assertRaises(exception.CinderException, self.driver._get_dsware_manage_ip, test_volume_fail) def test_private_get_poolid_from_host(self): retval = self.driver._get_poolid_from_host( 'abc@fusionstorage_sas2copy#0') self.assertEqual('0', retval) retval = self.driver._get_poolid_from_host( 'abc@fusionstorage_sas2copy@0') self.assertEqual(self.driver.pool_type, retval) retval = self.driver._get_poolid_from_host(None) self.assertEqual(self.driver.pool_type, retval) @mock.patch.object(fspythonapi.FSPythonApi, 'create_volume') @mock.patch.object(fspythonapi.FSPythonApi, 'query_dsware_version') @mock.patch.object(dsware.DSWAREDriver, '_get_poolid_from_host') def test_private_create_volume_old_version(self, mock_get_poolid, mock_query_dsware, mock_create_volume): # query_dsware_version return 1, old version mock_query_dsware.return_value = 1 mock_create_volume.return_value = 0 self.driver._create_volume(test_volume['name'], test_volume['size'], True, 'abc@fusionstorage_sas2copy') mock_create_volume.assert_called_with(test_volume['name'], 0, test_volume['size'], 1) self.driver._create_volume(test_volume['name'], test_volume['size'], False, 'abc@fusionstorage_sas2copy') mock_create_volume.assert_called_with(test_volume['name'], 0, test_volume['size'], 0) @mock.patch.object(fspythonapi.FSPythonApi, 'create_volume') @mock.patch.object(fspythonapi.FSPythonApi, 'query_dsware_version') @mock.patch.object(dsware.DSWAREDriver, '_get_poolid_from_host') def test_private_create_volume_new_version(self, mock_get_poolid, mock_query_dsware, mock_create_volume): # query_dsware_version return 0, new version mock_query_dsware.return_value = 0 mock_get_poolid.return_value = 0 mock_create_volume.return_value = 0 self.driver._create_volume(test_volume['name'], test_volume['size'], True, 'abcE@fusionstorage_sas2copy#0') mock_create_volume.assert_called_with(test_volume['name'], 0, test_volume['size'], 1) self.driver._create_volume(test_volume['name'], test_volume['size'], False, 'abc@fusionstorage_sas2copy#0') mock_create_volume.assert_called_with(test_volume['name'], 0, test_volume['size'], 0) mock_query_dsware.return_value = 0 mock_get_poolid.return_value = 1 mock_create_volume.return_value = 0 self.driver._create_volume(test_volume['name'], test_volume['size'], True, 'abc@fusionstorage_sas2copy#1') mock_create_volume.assert_called_with(test_volume['name'], 1, test_volume['size'], 1) self.driver._create_volume(test_volume['name'], test_volume['size'], False, 'abc@fusionstorage_sas2copy#1') mock_create_volume.assert_called_with(test_volume['name'], 1, test_volume['size'], 0) @mock.patch.object(fspythonapi.FSPythonApi, 'create_volume') @mock.patch.object(fspythonapi.FSPythonApi, 'query_dsware_version') @mock.patch.object(dsware.DSWAREDriver, '_get_poolid_from_host') def test_private_create_volume_query_version_fail(self, mock_get_poolid, mock_query_dsware, mock_create_volume): # query_dsware_version return 500015, query dsware version failed! mock_query_dsware.return_value = 500015 self.assertRaises(exception.CinderException, self.driver._create_volume, test_volume['name'], test_volume['size'], True, 'abc@fusionstorage_sas2copy#0') self.assertRaises(exception.CinderException, self.driver._create_volume, test_volume['name'], test_volume['size'], False, 'abc@fusionstorage_sas2copy#0') @mock.patch.object(fspythonapi.FSPythonApi, 'create_volume') @mock.patch.object(fspythonapi.FSPythonApi, 'query_dsware_version') @mock.patch.object(dsware.DSWAREDriver, '_get_poolid_from_host') def test_private_create_volume_fail(self, mock_get_poolid, mock_query_dsware, mock_create_volume): mock_query_dsware.return_value = 1 # create_volume return 1, create volume failed mock_create_volume.return_value = 1 self.assertRaises(exception.CinderException, self.driver._create_volume, test_volume['name'], test_volume['size'], True, 'abc@fusionstorage_sas2copy#0') self.assertRaises(exception.CinderException, self.driver._create_volume, test_volume['name'], test_volume['size'], False, 'abc@fusionstorage_sas2copy#0') @mock.patch.object(dsware.DSWAREDriver, '_create_volume') @mock.patch.object(fspythonapi.FSPythonApi, 'get_manage_ip') def test_create_volume(self, mock_get_manage_ip, mock_create_volume): # success mock_get_manage_ip.return_value = self.driver.manage_ip retval = self.driver.create_volume(test_volume) self.assertEqual({"provider_id": self.driver.manage_ip}, retval) # failure mock_create_volume.side_effect = exception.CinderException( 'DSWARE Create Volume failed!') self.assertRaises(exception.CinderException, self.driver.create_volume, test_volume) @mock.patch.object(fspythonapi.FSPythonApi, 'create_volume_from_snap') def test_private_create_volume_from_snap(self, mock_create_volume): mock_create_volume.side_effect = [0, 1] self.driver._create_volume_from_snap(test_volume['name'], test_volume['size'], test_snapshot['name']) # failure self.assertRaises(exception.CinderException, self.driver._create_volume_from_snap, test_volume['name'], test_volume['size'], test_snapshot['name']) @mock.patch.object(fspythonapi.FSPythonApi, 'extend_volume') def test_extend_volume(self, mock_extend_volume): mock_extend_volume.return_value = 0 self.driver.extend_volume(test_volume, 5) mock_extend_volume.return_value = 0 self.assertRaises(exception.CinderException, self.driver.extend_volume, test_volume, 3) mock_extend_volume.return_value = 1 self.assertRaises(exception.CinderException, self.driver.extend_volume, test_volume, 5) @mock.patch.object(dsware.DSWAREDriver, '_create_volume_from_snap') @mock.patch.object(fspythonapi.FSPythonApi, 'get_manage_ip') def test_create_volume_from_snap(self, mock_manage_ip, mock_create_vol): # success mock_manage_ip.return_value = self.driver.manage_ip retval = self.driver.create_volume_from_snapshot(test_volume, test_snapshot) self.assertEqual({"provider_id": self.driver.manage_ip}, retval) # failure mock_create_vol.side_effect = exception.CinderException( 'DSWARE:create volume from snap failed') self.assertRaises(exception.CinderException, self.driver.create_volume_from_snapshot, test_volume, test_snapshot) @mock.patch.object(fspythonapi.FSPythonApi, 'create_volume_from_volume') @mock.patch.object(fspythonapi.FSPythonApi, 'get_manage_ip') @mock.patch.object(dsware.DSWAREDriver, '_wait_for_create_cloned_volume_finish_timer') def test_create_cloned_volume(self, mock_wait_finish, mock_get_manage_ip, mock_create_volume): # success mock_create_volume.return_value = None mock_get_manage_ip.return_value = self.driver.manage_ip mock_wait_finish.return_value = True retval = self.driver.create_cloned_volume(test_volume, test_src_volume) self.assertEqual({"provider_id": "127.0.0.1"}, retval) # failure:create exception mock_create_volume.return_value = 500015 self.assertRaises(exception.CinderException, self.driver.create_cloned_volume, test_volume, test_src_volume) # failure:wait exception mock_create_volume.return_value = None mock_wait_finish.return_value = False self.assertRaises(exception.CinderException, self.driver.create_cloned_volume, test_volume, test_src_volume) @mock.patch.object(fspythonapi.FSPythonApi, 'query_volume') def test_private_check_create_cloned_volume_finish(self, mock_query_volume): query_result_done = {'result': 0, 'vol_name': 'vol1', 'father_name': 'vol1_father', 'status': '0', 'vol_size': '1024', 'real_size': '1024', 'pool_id': 'pool1', 'create_time': '01/01/2015'} query_result_doing = {'result': 0, 'vol_name': 'vol1', 'father_name': 'vol1_father', 'status': '6', 'vol_size': '1024', 'real_size': '1024', 'pool_id': 'pool1', 'create_time': '01/01/2015'} mock_query_volume.side_effect = [ query_result_done, query_result_doing, query_result_doing] # success self.assertRaises(loopingcall.LoopingCallDone, self.driver._check_create_cloned_volume_finish, test_volume['name']) # in the process of creating volume self.driver.count = self.driver.configuration.clone_volume_timeout - 1 self.driver._check_create_cloned_volume_finish(test_volume['name']) self.assertEqual(self.driver.configuration.clone_volume_timeout, self.driver.count) # timeout self.driver.count = self.driver.configuration.clone_volume_timeout self.assertRaises(loopingcall.LoopingCallDone, self.driver._check_create_cloned_volume_finish, test_volume['name']) @mock.patch.object(dsware.DSWAREDriver, '_check_create_cloned_volume_finish') def test_private_wait_for_create_cloned_volume_finish_timer(self, mock_check): mock_check.side_effect = [loopingcall.LoopingCallDone(retvalue=True), loopingcall.LoopingCallDone(retvalue=False)] retval = self.driver._wait_for_create_cloned_volume_finish_timer( test_volume['name']) self.assertTrue(retval) retval = self.driver._wait_for_create_cloned_volume_finish_timer( test_volume['name']) self.assertFalse(retval) def test_private_analyse_output(self): out = 'ret_code=10\nret_desc=test\ndev_addr=/sda\n' retval = self.driver._analyse_output(out) self.assertEqual({'dev_addr': '/sda', 'ret_desc': 'test', 'ret_code': '10'}, retval) out = 'abcdefg' retval = self.driver._analyse_output(out) self.assertEqual({}, retval) def test_private_attach_volume(self): success = ['ret_code=0\nret_desc=success\ndev_addr=/dev/sdb\n', ''] failure = ['ret_code=50510011\nret_desc=failed\ndev_addr=/dev/sdb\n', ''] mock_execute = self.mock_object(self.driver, '_execute') mock_execute.side_effect = [success, failure] # attached successful retval = self.driver._attach_volume(test_volume['name'], self.driver.manage_ip) self.assertEqual({'dev_addr': '/dev/sdb', 'ret_desc': 'success', 'ret_code': '0'}, retval) # attached failure retval = self.driver._attach_volume(test_volume['name'], self.driver.manage_ip) self.assertEqual({'dev_addr': '/dev/sdb', 'ret_desc': 'failed', 'ret_code': '50510011'}, retval) def test_private_detach_volume(self): success = ['ret_code=0\nret_desc=success\ndev_addr=/dev/sdb\n', ''] failure = ['ret_code=50510011\nret_desc=failed\ndev_addr=/dev/sdb\n', ''] mock_execute = self.mock_object(self.driver, '_execute') mock_execute.side_effect = [success, failure] # detached successful retval = self.driver._detach_volume(test_volume['name'], self.driver.manage_ip) self.assertEqual({'dev_addr': '/dev/sdb', 'ret_desc': 'success', 'ret_code': '0'}, retval) # detached failure retval = self.driver._detach_volume(test_volume['name'], self.driver.manage_ip) self.assertEqual({'dev_addr': '/dev/sdb', 'ret_desc': 'failed', 'ret_code': '50510011'}, retval) def test_private_query_volume_attach(self): success = ['ret_code=0\nret_desc=success\ndev_addr=/dev/sdb\n', ''] failure = ['ret_code=50510011\nret_desc=failed\ndev_addr=/dev/sdb\n', ''] mock_execute = self.mock_object(self.driver, '_execute') mock_execute.side_effect = [success, failure] # query successful retval = self.driver._query_volume_attach(test_volume['name'], self.driver.manage_ip) self.assertEqual({'dev_addr': '/dev/sdb', 'ret_desc': 'success', 'ret_code': '0'}, retval) # query failure retval = self.driver._query_volume_attach(test_volume['name'], self.driver.manage_ip) self.assertEqual({'dev_addr': '/dev/sdb', 'ret_desc': 'failed', 'ret_code': '50510011'}, retval) @mock.patch.object(dsware.DSWAREDriver, '_get_dsware_manage_ip') @mock.patch.object(dsware.DSWAREDriver, '_attach_volume') @mock.patch.object(image_utils, 'fetch_to_raw') @mock.patch.object(dsware.DSWAREDriver, '_detach_volume') def test_copy_image_to_volume(self, mock_detach, mock_fetch, mock_attach, mock_get_manage_ip): success = {'ret_code': '0', 'ret_desc': 'success', 'dev_addr': '/dev/sdb'} failure = {'ret_code': '50510011', 'ret_desc': 'failed', 'dev_addr': '/dev/sdb'} context = '' image_service = '' image_id = '' mock_get_manage_ip.return_value = '127.0.0.1' mock_attach.side_effect = [success, failure, success] mock_detach.side_effect = [success, failure, failure] # success self.driver.copy_image_to_volume(context, test_volume, image_service, image_id) # failure - attach failure self.assertRaises(exception.CinderException, self.driver.copy_image_to_volume, context, test_volume, image_service, image_id) # failure - detach failure self.assertRaises(exception.CinderException, self.driver.copy_image_to_volume, context, test_volume, image_service, image_id) @mock.patch.object(dsware.DSWAREDriver, '_get_dsware_manage_ip') @mock.patch.object(dsware.DSWAREDriver, '_attach_volume') @mock.patch.object(dsware.DSWAREDriver, '_query_volume_attach') @mock.patch.object(image_utils, 'upload_volume') @mock.patch.object(dsware.DSWAREDriver, '_detach_volume') def test_copy_volume_to_image_success(self, mock_detach, mock_upload, mock_query, mock_attach, mock_get_manage_ip): success = {'ret_code': '0', 'ret_desc': 'success', 'dev_addr': '/dev/sdb'} already_attached = {'ret_code': '50151401', 'ret_desc': 'already_attached', 'dev_addr': '/dev/sdb'} context = '' image_service = '' image_meta = '' mock_get_manage_ip.return_value = '127.0.0.1' mock_attach.return_value = success mock_detach.return_value = success self.driver.copy_volume_to_image(context, test_volume, image_service, image_meta) mock_upload.assert_called_with('', '', '', '/dev/sdb') mock_attach.return_value = already_attached mock_query.return_value = success mock_detach.return_value = success self.driver.copy_volume_to_image(context, test_volume, image_service, image_meta) mock_upload.assert_called_with('', '', '', '/dev/sdb') @mock.patch.object(dsware.DSWAREDriver, '_get_dsware_manage_ip') @mock.patch.object(dsware.DSWAREDriver, '_attach_volume') @mock.patch.object(dsware.DSWAREDriver, '_query_volume_attach') @mock.patch.object(image_utils, 'upload_volume') @mock.patch.object(dsware.DSWAREDriver, '_detach_volume') def test_copy_volume_to_image_attach_fail(self, mock_detach, mock_upload, mock_query, mock_attach, mock_get_manage_ip): failure = {'ret_code': '50510011', 'ret_desc': 'failed', 'dev_addr': '/dev/sdb'} context = '' image_service = '' image_meta = '' mock_get_manage_ip.return_value = '127.0.0.1' mock_attach.return_value = failure self.assertRaises(exception.CinderException, self.driver.copy_volume_to_image, context, test_volume, image_service, image_meta) mock_attach.return_value = None self.assertRaises(exception.CinderException, self.driver.copy_volume_to_image, context, test_volume, image_service, image_meta) @mock.patch.object(dsware.DSWAREDriver, '_get_dsware_manage_ip') @mock.patch.object(dsware.DSWAREDriver, '_attach_volume') @mock.patch.object(dsware.DSWAREDriver, '_query_volume_attach') @mock.patch.object(image_utils, 'upload_volume') @mock.patch.object(dsware.DSWAREDriver, '_detach_volume') def test_copy_volume_to_image_query_attach_fail(self, mock_detach, mock_upload, mock_query, mock_attach, mock_get_manage_ip): already_attached = {'ret_code': '50151401', 'ret_desc': 'already_attached', 'dev_addr': '/dev/sdb'} failure = {'ret_code': '50510011', 'ret_desc': 'failed', 'dev_addr': '/dev/sdb'} context = '' image_service = '' image_meta = '' mock_get_manage_ip.return_value = '127.0.0.1' mock_attach.return_value = already_attached mock_query.return_value = failure self.assertRaises(exception.CinderException, self.driver.copy_volume_to_image, context, test_volume, image_service, image_meta) mock_query.return_value = None self.assertRaises(exception.CinderException, self.driver.copy_volume_to_image, context, test_volume, image_service, image_meta) @mock.patch.object(dsware.DSWAREDriver, '_get_dsware_manage_ip') @mock.patch.object(dsware.DSWAREDriver, '_attach_volume') @mock.patch.object(dsware.DSWAREDriver, '_query_volume_attach') @mock.patch.object(image_utils, 'upload_volume') @mock.patch.object(dsware.DSWAREDriver, '_detach_volume') def test_copy_volume_to_image_upload_fail(self, mock_detach, mock_upload, mock_query, mock_attach, mock_get_manage_ip): success = {'ret_code': '0', 'ret_desc': 'success', 'dev_addr': '/dev/sdb'} already_attached = {'ret_code': '50151401', 'ret_desc': 'already_attached', 'dev_addr': '/dev/sdb'} context = '' image_service = '' image_meta = '' mock_get_manage_ip.return_value = '127.0.0.1' mock_attach.return_value = already_attached mock_query.return_value = success mock_upload.side_effect = exception.CinderException( 'upload_volume error') self.assertRaises(exception.CinderException, self.driver.copy_volume_to_image, context, test_volume, image_service, image_meta) @mock.patch.object(fspythonapi.FSPythonApi, 'query_volume') def test_private_get_volume(self, mock_query): result_success = {'result': 0} result_not_exist = {'result': "50150005\n"} result_exception = {'result': "50510006\n"} mock_query.side_effect = [ result_success, result_not_exist, result_exception] retval = self.driver._get_volume(test_volume['name']) self.assertTrue(retval) retval = self.driver._get_volume(test_volume['name']) self.assertFalse(retval) self.assertRaises(exception.CinderException, self.driver._get_volume, test_volume['name']) @mock.patch.object(fspythonapi.FSPythonApi, 'delete_volume') def test_private_delete_volume(self, mock_delete): result_success = 0 result_not_exist = '50150005\n' result_being_deleted = '50151002\n' result_exception = '51050006\n' mock_delete.side_effect = [result_success, result_not_exist, result_being_deleted, result_exception] retval = self.driver._delete_volume(test_volume['name']) self.assertTrue(retval) retval = self.driver._delete_volume(test_volume['name']) self.assertTrue(retval) retval = self.driver._delete_volume(test_volume['name']) self.assertTrue(retval) self.assertRaises(exception.CinderException, self.driver._delete_volume, test_volume['name']) @mock.patch.object(dsware.DSWAREDriver, '_get_volume') @mock.patch.object(dsware.DSWAREDriver, '_delete_volume') def test_delete_volume(self, mock_delete, mock_get): mock_get.return_value = False retval = self.driver.delete_volume(test_volume) self.assertTrue(retval) mock_get.return_value = True mock_delete.return_value = True retval = self.driver.delete_volume(test_volume) self.assertTrue(retval) mock_get.return_value = True mock_delete.side_effect = exception.CinderException( 'delete volume exception') self.assertRaises(exception.CinderException, self.driver.delete_volume, test_volume) mock_get.side_effect = exception.CinderException( 'get volume exception') self.assertRaises(exception.CinderException, self.driver.delete_volume, test_volume) @mock.patch.object(fspythonapi.FSPythonApi, 'query_snap') def test_private_get_snapshot(self, mock_query): result_success = {'result': 0} result_not_found = {'result': "50150006\n"} result_exception = {'result': "51050007\n"} mock_query.side_effect = [result_success, result_not_found, result_exception] retval = self.driver._get_snapshot(test_snapshot['name']) self.assertTrue(retval) retval = self.driver._get_snapshot(test_snapshot['name']) self.assertFalse(retval) self.assertRaises(exception.CinderException, self.driver._get_snapshot, test_snapshot['name']) @mock.patch.object(fspythonapi.FSPythonApi, 'create_snapshot') def test_private_create_snapshot(self, mock_create): mock_create.side_effect = [0, 1] self.driver._create_snapshot(test_snapshot['name'], test_volume['name']) self.assertRaises(exception.CinderException, self.driver._create_snapshot, test_snapshot['name'], test_volume['name']) @mock.patch.object(fspythonapi.FSPythonApi, 'delete_snapshot') def test_private_delete_snapshot(self, mock_delete): mock_delete.side_effect = [0, 1] self.driver._delete_snapshot(test_snapshot['name']) self.assertRaises(exception.CinderException, self.driver._delete_snapshot, test_snapshot['name']) @mock.patch.object(dsware.DSWAREDriver, '_get_volume') @mock.patch.object(dsware.DSWAREDriver, '_create_snapshot') def test_create_snapshot(self, mock_create, mock_get): mock_get.return_value = True self.driver.create_snapshot(test_snapshot) mock_create.side_effect = exception.CinderException( 'create snapshot failed') self.assertRaises(exception.CinderException, self.driver.create_snapshot, test_snapshot) mock_get.side_effect = [ False, exception.CinderException('get volume failed')] self.assertRaises(exception.CinderException, self.driver.create_snapshot, test_snapshot) self.assertRaises(exception.CinderException, self.driver.create_snapshot, test_snapshot) @mock.patch.object(dsware.DSWAREDriver, '_get_snapshot') @mock.patch.object(dsware.DSWAREDriver, '_delete_snapshot') def test_delete_snapshot(self, mock_delete, mock_get): mock_get.side_effect = [True, False, exception.CinderException, True] self.driver.delete_snapshot(test_snapshot) self.driver.delete_snapshot(test_snapshot) self.assertRaises(exception.CinderException, self.driver.delete_snapshot, test_snapshot) mock_delete.side_effect = exception.CinderException( 'delete snapshot exception') self.assertRaises(exception.CinderException, self.driver.delete_snapshot, test_snapshot) @mock.patch.object(fspythonapi.FSPythonApi, 'query_pool_info') def test_private_update_single_pool_info_status(self, mock_query): pool_info = {'result': 0, 'pool_id': 10, 'total_capacity': 10240, 'used_capacity': 5120, 'alloc_capacity': 7168} pool_info_none = {'result': 1} mock_query.side_effect = [pool_info, pool_info_none] self.driver._update_single_pool_info_status() self.assertEqual({'total_capacity_gb': 10.0, 'free_capacity_gb': 5.0, 'volume_backend_name': None, 'vendor_name': 'Open Source', 'driver_version': '1.0', 'storage_protocol': 'dsware', 'reserved_percentage': 0, 'QoS_support': False}, self.driver._stats) self.driver._update_single_pool_info_status() self.assertIsNone(self.driver._stats) @mock.patch.object(fspythonapi.FSPythonApi, 'query_pool_type') def test_private_update_multi_pool_of_same_type_status(self, mock_query): query_result = (0, [{'result': 0, 'pool_id': '0', 'total_capacity': '10240', 'used_capacity': '5120', 'alloc_capacity': '7168'}]) query_result_none = (0, []) mock_query.side_effect = [query_result, query_result_none] self.driver._update_multi_pool_of_same_type_status() self.assertEqual({'volume_backend_name': None, 'vendor_name': 'Open Source', 'driver_version': '1.0', 'storage_protocol': 'dsware', 'pools': [{'pool_name': '0', 'total_capacity_gb': 10.0, 'allocated_capacity_gb': 5.0, 'free_capacity_gb': 5.0, 'QoS_support': False, 'reserved_percentage': 0}]}, self.driver._stats) self.driver._update_multi_pool_of_same_type_status() self.assertIsNone(self.driver._stats) def test_private_calculate_pool_info(self): pool_sets = [{'pool_id': 0, 'total_capacity': 10240, 'used_capacity': 5120, 'QoS_support': False, 'reserved_percentage': 0}] retval = self.driver._calculate_pool_info(pool_sets) self.assertEqual([{'pool_name': 0, 'total_capacity_gb': 10.0, 'allocated_capacity_gb': 5.0, 'free_capacity_gb': 5.0, 'QoS_support': False, 'reserved_percentage': 0}], retval) @mock.patch.object(dsware.DSWAREDriver, '_update_single_pool_info_status') @mock.patch.object(dsware.DSWAREDriver, '_update_multi_pool_of_same_type_status') @mock.patch.object(fspythonapi.FSPythonApi, 'query_dsware_version') def test_get_volume_stats(self, mock_query, mock_type, mock_info): mock_query.return_value = 1 self.driver.get_volume_stats(False) mock_query.assert_not_called() self.driver.get_volume_stats(True) mock_query.assert_called_once_with()
apache-2.0
hajuuk/asuswrt
release/src/router/samba-3.5.8/examples/scripts/shares/python/generate_parm_table.py
90
6683
#!/usr/bin/env python ###################################################################### ## ## Generate parameter dictionary from param/loadparm.c ## ## Copyright (C) Gerald Carter 2004. ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 3 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, see <http://www.gnu.org/licenses/>. ## ###################################################################### import re, string, sys, commands HEADER = """###################################################################### ## ## autogenerated file of smb.conf parameters ## generate_parm_table <..../param/loadparm.c> ## ## Copyright (C) Gerald Carter 2004. ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 3 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, see <http://www.gnu.org/licenses/>. ## ###################################################################### from SambaParm import SambaParmString, SambaParmBool, SambaParmBoolRev ## boolean defines for parm_table P_LOCAL = 0 P_GLOBAL = 1 """ FOOTER = """##### end of smbparm.y ########################################## #################################################################""" TESTPARM = "/usr/bin/testparm" ## fields in Samba's parameter table displayName = 0 type = 1 scope = 2 variable = 3 flags = 6 parm_table = {} var_table = {} def_values = {} obj_table = { 'P_BOOL' : 'SambaParmBool', 'P_BOOLREV' : 'SambaParmBoolRev', 'P_STRING' : 'SambaParmString', 'P_USTRING' : 'SambaParmString', 'P_GSTRING' : 'SambaParmString', 'P_LIST' : 'SambaParmString', 'P_ENUM' : 'SambaParmString', 'P_CHAR' : 'SambaParmString', 'P_OCTAL' : 'SambaParmString', 'P_INTEGER' : 'SambaParmString', } ###################################################################### ## BEGIN MAIN CODE ## ###################################################################### ## First thing is to build the dictionary of parmeter names ## ## based on the output from testparm ## cmd = "/usr/bin/testparm -s -v /dev/null" ( status, testparm_output ) = commands.getstatusoutput( cmd ) if status: sys.stderr.write( "Failed to execute testparm!\n%s\n" % testparm_output ) ## break the output into a list ## lines = string.split( testparm_output, "\n" ) ## loop through list -- parameters in testparm output have ## ## whitespace at the beginning of the line ## pattern = re.compile( "^\s+" ) for input_str in lines: if not pattern.search( input_str ): continue input_str = string.strip( input_str ) parts = string.split( input_str, "=" ) parts[0] = string.strip( parts[0] ) parts[1] = string.strip( parts[1] ) key = string.upper( string.join(string.split(parts[0]), "") ) new = parts[1].replace('\\', '\\\\') def_values[key] = new ## open loadparm.c and get the entire list of parameters ## ## including synonums ## if len(sys.argv) != 2: print "Usage: %s <.../param/loadparm.c>" % ( sys.argv[0] ) sys.exit( 1 ) try: fconfig = open( sys.argv[1], "r" ) except IOError: print "%s does not exist!" % sys.argv[1] sys.exit (1) ## Loop through loadparm.c -- all parameters are either ## ## P_LOCAL or P_GLOBAL ## synonyms = [] pattern = re.compile( '{".*P_[GL]' ) while True: input_str= fconfig.readline() if len(input_str) == 0 : break input_str= string.strip(input_str) ## see if we have a patch for a parameter definition ## parm = [] if pattern.search( input_str) : ## strip the surrounding '{.*},' ## input_str= input_str[1:-2] parm = string.split(input_str, ",") ## strip the ""'s and upper case ## name = (string.strip(parm[displayName])[1:-1]) key = string.upper( string.join(string.split(name), "") ) var_name = string.strip( parm[variable] ) ## try to catch synonyms -- if the parameter was not reported ## ## by testparm, then save it and come back after we will out ## ## the variable list ## if not def_values.has_key( key ): synonyms.append( input_str) continue var_table[var_name] = key parmType = string.strip(parm[type]) parm_table[key] = [ name , string.strip(parm[type]), string.strip(parm[scope]), def_values[key] ] ## Deal with any synonyms ## for input_str in synonyms: parm = string.split(input_str, ",") name = (string.strip(parm[displayName])[1:-1]) key = string.upper( string.join(string.split(name), "") ) var_name = string.strip( parm[variable] ) ## if there's no pre-existing key, then testparm doesn't know about it if not var_table.has_key( var_name ): continue ## just make a copy parm_table[key] = parm_table[var_table[var_name]][:] # parm_table[key][1] = parm[1] parm_table[key][1] = string.strip(parm[1]) ## ## ## print out smbparm.py ## ## ## try: smbparm = open ( "smbparm.py", "w" ) except IOError: print "Cannot write to smbparm.py" sys.exit( 1 ) smbparm.write( HEADER ) smbparm.write( "parm_table = {\n" ) for x in parm_table.keys(): key = "\"%s\"" % x smbparm.write("\t%-25s: (\"%s\", %s, %s, \"%s\"),\n" % ( key, parm_table[x][0], obj_table[parm_table[x][1]], parm_table[x][2], parm_table[x][3] )) smbparm.write( "}\n" ) smbparm.write( FOOTER ) smbparm.write( "\n" ) sys.exit(0) ## ## ## cut-n-paste area ## ## ## for x in parm_table.keys(): if def_values.has_key( x ): parm_table[x].append( def_values[x] ) else: parm_table[x].append( "" )
gpl-2.0
Johnzero/erp
openerp/addons/account/report/common_report_header.py
6
6502
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import pooler from tools.translate import _ class common_report_header(object): def _sum_debit(self, period_id=False, journal_id=False): if journal_id and isinstance(journal_id, int): journal_id = [journal_id] if period_id and isinstance(period_id, int): period_id = [period_id] if not journal_id: journal_id = self.journal_ids if not period_id: period_id = self.period_ids if not (period_id and journal_id): return 0.0 self.cr.execute('SELECT SUM(debit) FROM account_move_line l ' 'WHERE period_id IN %s AND journal_id IN %s ' + self.query_get_clause + ' ', (tuple(period_id), tuple(journal_id))) return self.cr.fetchone()[0] or 0.0 def _sum_credit(self, period_id=False, journal_id=False): if journal_id and isinstance(journal_id, int): journal_id = [journal_id] if period_id and isinstance(period_id, int): period_id = [period_id] if not journal_id: journal_id = self.journal_ids if not period_id: period_id = self.period_ids if not (period_id and journal_id): return 0.0 self.cr.execute('SELECT SUM(credit) FROM account_move_line l ' 'WHERE period_id IN %s AND journal_id IN %s '+ self.query_get_clause+'', (tuple(period_id), tuple(journal_id))) return self.cr.fetchone()[0] or 0.0 def _get_start_date(self, data): if data.get('form', False) and data['form'].get('date_from', False): return data['form']['date_from'] return '' def _get_target_move(self, data): if data.get('form', False) and data['form'].get('target_move', False): if data['form']['target_move'] == 'all': return _('All Entries') return _('All Posted Entries') return '' def _get_end_date(self, data): if data.get('form', False) and data['form'].get('date_to', False): return data['form']['date_to'] return '' def get_start_period(self, data): if data.get('form', False) and data['form'].get('period_from', False): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,data['form']['period_from']).name return '' def get_end_period(self, data): if data.get('form', False) and data['form'].get('period_to', False): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr, self.uid, data['form']['period_to']).name return '' def _get_account(self, data): if data.get('form', False) and data['form'].get('chart_account_id', False): return pooler.get_pool(self.cr.dbname).get('account.account').browse(self.cr, self.uid, data['form']['chart_account_id']).name return '' def _get_sortby(self, data): raise (_('Error'), _('Not implemented')) def _get_filter(self, data): if data.get('form', False) and data['form'].get('filter', False): if data['form']['filter'] == 'filter_date': return 'Date' elif data['form']['filter'] == 'filter_period': return 'Periods' return 'No Filter' def _sum_debit_period(self, period_id, journal_id=None): journals = journal_id or self.journal_ids if not journals: return 0.0 self.cr.execute('SELECT SUM(debit) FROM account_move_line l ' 'WHERE period_id=%s AND journal_id IN %s '+ self.query_get_clause +'', (period_id, tuple(journals))) return self.cr.fetchone()[0] or 0.0 def _sum_credit_period(self, period_id, journal_id=None): journals = journal_id or self.journal_ids if not journals: return 0.0 self.cr.execute('SELECT SUM(credit) FROM account_move_line l ' 'WHERE period_id=%s AND journal_id IN %s ' + self.query_get_clause +' ', (period_id, tuple(journals))) return self.cr.fetchone()[0] or 0.0 def _get_fiscalyear(self, data): if data.get('form', False) and data['form'].get('fiscalyear_id', False): return pooler.get_pool(self.cr.dbname).get('account.fiscalyear').browse(self.cr, self.uid, data['form']['fiscalyear_id']).name return '' def _get_company(self, data): if data.get('form', False) and data['form'].get('chart_account_id', False): return pooler.get_pool(self.cr.dbname).get('account.account').browse(self.cr, self.uid, data['form']['chart_account_id']).company_id.name return '' def _get_journal(self, data): codes = [] if data.get('form', False) and data['form'].get('journal_ids', False): self.cr.execute('select code from account_journal where id IN %s',(tuple(data['form']['journal_ids']),)) codes = [x for x, in self.cr.fetchall()] return codes def _get_currency(self, data): if data.get('form', False) and data['form'].get('chart_account_id', False): return pooler.get_pool(self.cr.dbname).get('account.account').browse(self.cr, self.uid, data['form']['chart_account_id']).company_id.currency_id.symbol return '' #vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
h4wkmoon/shinken
shinken/brok.py
1
1976
#!/usr/bin/env python # # -*- coding: utf-8 -*- # # Copyright (C) 2009-2014: # Gabes Jean, [email protected] # Gerhard Lausser, [email protected] # Gregory Starck, [email protected] # Hartmut Goebel, [email protected] # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. import cPickle class Brok: """A Brok is a piece of information exported by Shinken to the Broker. Broker can do whatever he wants with it. """ __slots__ = ('__dict__', 'id', 'type', 'data', 'prepared', 'instance_id') id = 0 my_type = 'brok' def __init__(self, type, data): self.type = type self.id = self.__class__.id self.__class__.id += 1 self.data = cPickle.dumps(data, cPickle.HIGHEST_PROTOCOL) self.prepared = False def __str__(self): return str(self.__dict__) + '\n' # We unserialize the data, and if some prop were # add after the serialize pass, we integer them in the data def prepare(self): # Maybe the brok is a old daemon one or was already prepared # if so, the data is already ok if hasattr(self, 'prepared') and not self.prepared: self.data = cPickle.loads(self.data) if hasattr(self, 'instance_id'): self.data['instance_id'] = self.instance_id self.prepared = True
agpl-3.0
potatolondon/potato-mapreduce
mapreduce/test_support.py
36
7134
#!/usr/bin/env python # # Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities to aid in testing mapreduces.""" import base64 import collections import logging import os import re from mapreduce import main from mapreduce import model from google.appengine.ext.webapp import mock_webapp # TODO(user): Add tests for this file. # Change level to logging.DEBUG to see stacktrack on failed task executions. _LOGGING_LEVEL = logging.ERROR logging.getLogger().setLevel(_LOGGING_LEVEL) def decode_task_payload(task): """Decodes POST task payload. This can only decode POST payload for a normal task. For huge task, use model.HugeTask.decode_payload. Args: task: a dict representing a taskqueue task as documented in taskqueue_stub. Returns: parameter_name -> parameter_value dict. If multiple parameter values are present, then parameter_value will be a list. """ if not task: return {} # taskqueue_stub base64 encodes body when it returns the task to us. body = base64.b64decode(task["body"]) # pylint: disable=protected-access return model.HugeTask._decode_payload(body) def execute_task(task, retries=0, handlers_map=None): """Execute mapper's executor task. This will try to determine the correct mapper handler for the task, will set up all mock environment necessary for task execution, and execute the task itself. This function can be used for functional-style testing of functionality depending on mapper framework. Args: task: a taskqueue task. retries: the current retry of this task. handlers_map: a dict from url regex to handler. Returns: the handler instance used for this task. Raises: Exception: whatever the task raises. """ # Find the handler class if not handlers_map: handlers_map = main.create_handlers_map() url = task["url"] handler = None params = [] for (re_str, handler_class) in handlers_map: re_str = "^" + re_str + "($|\\?)" m = re.match(re_str, url) if m: params = m.groups()[:-1] # last groups was added by ($|\\?) above break else: raise Exception("Can't determine handler for %s" % task) request = mock_webapp.MockRequest() request.set_url(url) # Set dependent env vars if test hasn't set them. version = "mr-test-support-version.1" module = "mr-test-support-module" default_version_hostname = "mr-test-support.appspot.com" host = "%s.%s.%s" % (version.split(".")[0], module, default_version_hostname) if "CURRENT_VERSION_ID" not in os.environ: request.environ["CURRENT_VERSION_ID"] = version if "DEFAULT_VERSION_HOSTNAME" not in os.environ: request.environ["DEFAULT_VERSION_HOSTNAME"] = ( default_version_hostname) if "CURRENT_MODULE_ID" not in os.environ: request.environ["CURRENT_MODULE_ID"] = module if "HTTP_HOST" not in os.environ: request.environ["HTTP_HOST"] = host # Set taskqueue specific headers and env vars. for k, v in task.get("headers", []): request.headers[k] = v environ_key = "HTTP_" + k.replace("-", "_").upper() request.environ[environ_key] = v request.headers["X-AppEngine-TaskExecutionCount"] = retries request.environ["HTTP_X_APPENGINE_TASKNAME"] = ( task.get("name", "default_task_name")) request.environ["HTTP_X_APPENGINE_QUEUENAME"] = ( task.get("queue_name", "default")) request.environ["PATH_INFO"] = request.path if task["method"] == "POST": # taskqueue_stub base64 encodes body when it returns the task to us. request.body = base64.b64decode(task["body"]) for k, v in decode_task_payload(task).iteritems(): request.set(k, v) response = mock_webapp.MockResponse() saved_os_environ = os.environ copy_os_environ = dict(os.environ) copy_os_environ.update(request.environ) try: os.environ = copy_os_environ # Webapp2 expects request/response in the handler instantiation, and calls # initialize automatically. handler = handler_class(request, response) except TypeError: # For webapp, setup request before calling initialize. handler = handler_class() handler.initialize(request, response) finally: os.environ = saved_os_environ try: os.environ = copy_os_environ if task["method"] == "POST": handler.post(*params) elif task["method"] == "GET": handler.get(*params) else: raise Exception("Unsupported method: %s" % task.method) finally: os.environ = saved_os_environ if handler.response.status != 200: raise Exception("Handler failure: %s (%s). \nTask: %s\nHandler: %s" % (handler.response.status, handler.response.status_message, task, handler)) return handler def execute_all_tasks(taskqueue, queue="default", handlers_map=None): """Run and remove all tasks in the taskqueue. Args: taskqueue: An instance of taskqueue stub. queue: Queue name to run all tasks from. hanlders_map: see main.create_handlers_map. Returns: task_run_counts: a dict from handler class to the number of tasks it handled. """ tasks = taskqueue.GetTasks(queue) taskqueue.FlushQueue(queue) task_run_counts = collections.defaultdict(lambda: 0) for task in tasks: retries = 0 while True: try: handler = execute_task(task, retries, handlers_map=handlers_map) task_run_counts[handler.__class__] += 1 break # pylint: disable=broad-except except Exception, e: retries += 1 # Arbitrary large number. if retries > 100: logging.debug("Task %s failed for too many times. Giving up.", task["name"]) raise logging.debug( "Task %s is being retried for the %s time", task["name"], retries) logging.debug(e) return task_run_counts def execute_until_empty(taskqueue, queue="default", handlers_map=None): """Execute taskqueue tasks until it becomes empty. Args: taskqueue: An instance of taskqueue stub. queue: Queue name to run all tasks from. hanlders_map: see main.create_handlers_map. Returns: task_run_counts: a dict from handler class to the number of tasks it handled. """ task_run_counts = collections.defaultdict(lambda: 0) while taskqueue.GetTasks(queue): new_counts = execute_all_tasks(taskqueue, queue, handlers_map) for handler_cls in new_counts: task_run_counts[handler_cls] += new_counts[handler_cls] return task_run_counts
apache-2.0
jasonwee/asus-rt-n14uhp-mrtg
tmp/ve_asus-rt-n14uhp-mrtg/lib/python3.4/site-packages/requests/packages/chardet/euckrfreq.py
3121
45978
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # 128 --> 0.79 # 256 --> 0.92 # 512 --> 0.986 # 1024 --> 0.99944 # 2048 --> 0.99999 # # Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 # Random Distribution Ration = 512 / (2350-512) = 0.279. # # Typical Distribution Ratio EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 EUCKR_TABLE_SIZE = 2352 # Char to FreqOrder table , EUCKRCharToFreqOrder = ( \ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, 1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, 1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, 1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, 1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, 1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, 1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, 1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, 1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, 1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, 1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, 1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, 1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, 1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, 1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, 1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, 1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, 1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, 1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, 1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, 1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, 1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, 1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, 1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, 1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, 1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, 1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, 1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, 1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, 2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, 2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, 2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, 2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, 2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, 1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, 2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, 1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, 2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, 2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, 1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, 2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, 2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, 2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, 1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, 2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, 2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, 2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, 2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, 2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, 2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, 1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, 2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, 2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, 2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, 2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, 2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, 1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, 1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, 2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, 1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, 2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, 1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, 2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, 2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, 2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, 2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, 2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, 1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, 1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, 2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, 1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, 2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, 2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, 1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, 2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, 1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, 2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, 1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, 2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, 2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, 1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, 1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, 2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, 2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, 2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, 2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, 2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, 2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, 1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, 2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, 2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, 2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, 2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, 2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, 2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, 1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, 2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 #Everything below is of no interest for detection purpose 2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658, 2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674, 2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690, 2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704, 2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720, 2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734, 2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750, 2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765, 2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779, 2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793, 2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809, 2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824, 2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840, 2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856, 1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869, 2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883, 2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899, 2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915, 2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331, 2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945, 2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961, 2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976, 2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992, 2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008, 3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021, 3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037, 3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052, 3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066, 3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080, 3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095, 3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110, 3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124, 3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140, 3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156, 3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172, 3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187, 3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201, 3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217, 3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233, 3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248, 3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264, 3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279, 3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295, 3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311, 3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327, 3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343, 3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359, 3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374, 3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389, 3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405, 3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338, 3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432, 3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446, 3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191, 3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471, 3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486, 1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499, 1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513, 3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525, 3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541, 3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557, 3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573, 3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587, 3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603, 3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618, 3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632, 3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648, 3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663, 3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679, 3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695, 3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583, 1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722, 3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738, 3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753, 3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767, 3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782, 3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796, 3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810, 3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591, 1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836, 3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851, 3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866, 3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880, 3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895, 1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905, 3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921, 3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934, 3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603, 3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964, 3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978, 3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993, 3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009, 4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024, 4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040, 1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055, 4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069, 4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083, 4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098, 4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113, 4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610, 4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142, 4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157, 4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173, 4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189, 4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205, 4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220, 4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234, 4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249, 4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265, 4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279, 4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294, 4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310, 4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326, 4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341, 4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357, 4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371, 4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387, 4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403, 4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418, 4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432, 4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446, 4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461, 4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476, 4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491, 4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507, 4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623, 4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536, 4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551, 4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567, 4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581, 4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627, 4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611, 4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626, 4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642, 4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657, 4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672, 4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687, 1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700, 4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715, 4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731, 4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633, 4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758, 4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773, 4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788, 4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803, 4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817, 4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832, 4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847, 4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863, 4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879, 4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893, 4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909, 4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923, 4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938, 4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954, 4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970, 4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645, 4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999, 5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078, 5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028, 1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042, 5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056, 5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072, 5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087, 5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103, 5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118, 1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132, 5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148, 5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161, 5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177, 5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192, 5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206, 1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218, 5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234, 5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249, 5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262, 5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278, 5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293, 5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308, 5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323, 5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338, 5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353, 5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369, 5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385, 5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400, 5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415, 5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430, 5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445, 5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461, 5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477, 5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491, 5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507, 5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523, 5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539, 5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554, 5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570, 1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585, 5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600, 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615, 5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631, 5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646, 5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660, 1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673, 5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688, 5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703, 5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716, 5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729, 5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744, 1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758, 5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773, 1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786, 5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801, 5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815, 5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831, 5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847, 5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862, 5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876, 5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889, 5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905, 5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687, 5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951, 5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963, 5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979, 5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993, 5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009, 6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025, 6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039, 6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055, 6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071, 6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086, 6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102, 6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118, 6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133, 6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147, 6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163, 6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179, 6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194, 6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210, 6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225, 6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241, 6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256, 6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024 6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287, 6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699, 6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317, 6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333, 6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347, 6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363, 6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379, 6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395, 6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411, 6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425, 6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440, 6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456, 6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472, 6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488, 6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266, 6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519, 6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535, 6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551, 1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565, 6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581, 6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597, 6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613, 6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629, 6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644, 1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659, 6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674, 1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689, 6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705, 6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721, 6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736, 1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748, 6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763, 6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779, 6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794, 6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711, 6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825, 6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840, 6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856, 6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872, 6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888, 6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903, 6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918, 6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934, 6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950, 6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966, 6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981, 6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996, 6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011, 7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027, 7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042, 7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058, 7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074, 7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090, 7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106, 7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122, 7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138, 7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154, 7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170, 7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186, 7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202, 7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216, 7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232, 7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248, 7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264, 7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280, 7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296, 7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312, 7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327, 7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343, 7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359, 7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375, 7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391, 7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407, 7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423, 7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439, 7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455, 7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471, 7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487, 7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503, 7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519, 7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535, 7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551, 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, 7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583, 7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599, 7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615, 7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631, 7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647, 7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663, 7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679, 7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695, 7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711, 7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727, 7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743, 7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759, 7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775, 7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791, 7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807, 7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823, 7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839, 7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855, 7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871, 7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887, 7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903, 7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919, 7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, 7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, 7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, 7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, 7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, 8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, 8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, 8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, 8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, 8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, 8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, 8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, 8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, 8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, 8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, 8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, 8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, 8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, 8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, 8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, 8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, 8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271, 8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287, 8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303, 8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319, 8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335, 8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351, 8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367, 8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383, 8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399, 8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415, 8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431, 8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447, 8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463, 8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479, 8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495, 8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511, 8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527, 8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543, 8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559, 8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575, 8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591, 8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607, 8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623, 8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639, 8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655, 8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671, 8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687, 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, 8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719, 8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735, 8736,8737,8738,8739,8740,8741) # flake8: noqa
apache-2.0
consulo/consulo-python
plugin/src/main/dist/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py
326
3352
"""Fixer for except statements with named exceptions. The following cases will be converted: - "except E, T:" where T is a name: except E as T: - "except E, T:" where T is not a name, tuple or list: except E as t: T = t This is done because the target of an "except" clause must be a name. - "except E, T:" where T is a tuple or list literal: except E as t: T = t.args """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms def find_excepts(nodes): for i, n in enumerate(nodes): if n.type == syms.except_clause: if n.children[0].value == u'except': yield (n, nodes[i+2]) class FixExcept(fixer_base.BaseFix): BM_compatible = True PATTERN = """ try_stmt< 'try' ':' (simple_stmt | suite) cleanup=(except_clause ':' (simple_stmt | suite))+ tail=(['except' ':' (simple_stmt | suite)] ['else' ':' (simple_stmt | suite)] ['finally' ':' (simple_stmt | suite)]) > """ def transform(self, node, results): syms = self.syms tail = [n.clone() for n in results["tail"]] try_cleanup = [ch.clone() for ch in results["cleanup"]] for except_clause, e_suite in find_excepts(try_cleanup): if len(except_clause.children) == 4: (E, comma, N) = except_clause.children[1:4] comma.replace(Name(u"as", prefix=u" ")) if N.type != token.NAME: # Generate a new N for the except clause new_N = Name(self.new_name(), prefix=u" ") target = N.clone() target.prefix = u"" N.replace(new_N) new_N = new_N.clone() # Insert "old_N = new_N" as the first statement in # the except body. This loop skips leading whitespace # and indents #TODO(cwinter) suite-cleanup suite_stmts = e_suite.children for i, stmt in enumerate(suite_stmts): if isinstance(stmt, pytree.Node): break # The assignment is different if old_N is a tuple or list # In that case, the assignment is old_N = new_N.args if is_tuple(N) or is_list(N): assign = Assign(target, Attr(new_N, Name(u'args'))) else: assign = Assign(target, new_N) #TODO(cwinter) stopgap until children becomes a smart list for child in reversed(suite_stmts[:i]): e_suite.insert_child(0, child) e_suite.insert_child(i, assign) elif N.prefix == u"": # No space after a comma is legal; no space after "as", # not so much. N.prefix = u" " #TODO(cwinter) fix this when children becomes a smart list children = [c.clone() for c in node.children[:3]] + try_cleanup + tail return pytree.Node(node.type, children)
apache-2.0
alibarkatali/module_web
venv/lib/python2.7/site-packages/pip/utils/packaging.py
343
2080
from __future__ import absolute_import from email.parser import FeedParser import logging import sys from pip._vendor.packaging import specifiers from pip._vendor.packaging import version from pip._vendor import pkg_resources from pip import exceptions logger = logging.getLogger(__name__) def check_requires_python(requires_python): """ Check if the python version in use match the `requires_python` specifier. Returns `True` if the version of python in use matches the requirement. Returns `False` if the version of python in use does not matches the requirement. Raises an InvalidSpecifier if `requires_python` have an invalid format. """ if requires_python is None: # The package provides no information return True requires_python_specifier = specifiers.SpecifierSet(requires_python) # We only use major.minor.micro python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) return python_version in requires_python_specifier def get_metadata(dist): if (isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata('METADATA')): return dist.get_metadata('METADATA') elif dist.has_metadata('PKG-INFO'): return dist.get_metadata('PKG-INFO') def check_dist_requires_python(dist): metadata = get_metadata(dist) feed_parser = FeedParser() feed_parser.feed(metadata) pkg_info_dict = feed_parser.close() requires_python = pkg_info_dict.get('Requires-Python') try: if not check_requires_python(requires_python): raise exceptions.UnsupportedPythonVersion( "%s requires Python '%s' but the running Python is %s" % ( dist.project_name, requires_python, '.'.join(map(str, sys.version_info[:3])),) ) except specifiers.InvalidSpecifier as e: logger.warning( "Package %s has an invalid Requires-Python entry %s - %s" % ( dist.project_name, requires_python, e)) return
mit
TheShed/OpenOCD-CMSIS-DAP
tools/xsvf_tools/svf2xsvf.py
101
26710
#!/usr/bin/python3.0 # Copyright 2008, SoftPLC Corporation http://softplc.com # Dick Hollenbeck [email protected] # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, you may find one here: # http://www.gnu.org/licenses/old-licenses/gpl-2.0.html # or you may search the http://www.gnu.org website for the version 2 license, # or you may write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA # A python program to convert an SVF file to an XSVF file. There is an # option to include comments containing the source file line number from the origin # SVF file before each outputted XSVF statement. # # We deviate from the XSVF spec in that we introduce a new command called # XWAITSTATE which directly flows from the SVF RUNTEST command. Unfortunately # XRUNSTATE was ill conceived and is not used here. We also add support for the # three Lattice extensions to SVF: LCOUNT, LDELAY, and LSDR. The xsvf file # generated from this program is suitable for use with the xsvf player in # OpenOCD with my modifications to xsvf.c. # # This program is written for python 3.0, and it is not easy to change this # back to 2.x. You may find it easier to use python 3.x even if that means # building it. import re import sys import struct # There are both ---<Lexer>--- and ---<Parser>--- sections to this program if len( sys.argv ) < 3: print("usage %s <svf_filename> <xsvf_filename>" % sys.argv[0]) exit(1) inputFilename = sys.argv[1] outputFilename = sys.argv[2] doCOMMENTs = True # Save XCOMMENTs in the output xsvf file #doCOMMENTs = False # Save XCOMMENTs in the output xsvf file # pick your file encoding file_encoding = 'ISO-8859-1' #file_encoding = 'utf-8' xrepeat = 0 # argument to XREPEAT, gives retry count for masked compares #-----< Lexer >--------------------------------------------------------------- StateBin = (RESET,IDLE, DRSELECT,DRCAPTURE,DRSHIFT,DREXIT1,DRPAUSE,DREXIT2,DRUPDATE, IRSELECT,IRCAPTURE,IRSHIFT,IREXIT1,IRPAUSE,IREXIT2,IRUPDATE) = range(16) # Any integer index into this tuple will be equal to its corresponding StateBin value StateTxt = ("RESET","IDLE", "DRSELECT","DRCAPTURE","DRSHIFT","DREXIT1","DRPAUSE","DREXIT2","DRUPDATE", "IRSELECT","IRCAPTURE","IRSHIFT","IREXIT1","IRPAUSE","IREXIT2","IRUPDATE") (XCOMPLETE,XTDOMASK,XSIR,XSDR,XRUNTEST,hole0,hole1,XREPEAT,XSDRSIZE,XSDRTDO, XSETSDRMASKS,XSDRINC,XSDRB,XSDRC,XSDRE,XSDRTDOB,XSDRTDOC, XSDRTDOE,XSTATE,XENDIR,XENDDR,XSIR2,XCOMMENT,XWAIT,XWAITSTATE, LCOUNT,LDELAY,LSDR,XTRST) = range(29) #Note: LCOUNT, LDELAY, and LSDR are Lattice extensions to SVF and provide a way to loop back # and check a completion status, essentially waiting on a part until it signals that it is done. # For example below: loop 25 times, each time through the loop do a LDELAY (same as a true RUNTEST) # and exit loop when LSDR compares match. """ LCOUNT 25; ! Step to DRPAUSE give 5 clocks and wait for 1.00e+000 SEC. LDELAY DRPAUSE 5 TCK 1.00E-003 SEC; ! Test for the completed status. Match means pass. ! Loop back to LDELAY line if not match and loop count less than 25. LSDR 1 TDI (0) TDO (1); """ #XTRST is an opcode Xilinx seemed to have missed and it comes from the SVF TRST statement. LineNumber = 1 def s_ident(scanner, token): return ("ident", token.upper(), LineNumber) def s_hex(scanner, token): global LineNumber LineNumber = LineNumber + token.count('\n') token = ''.join(token.split()) return ("hex", token[1:-1], LineNumber) def s_int(scanner, token): return ("int", int(token), LineNumber) def s_float(scanner, token): return ("float", float(token), LineNumber) #def s_comment(scanner, token): return ("comment", token, LineNumber) def s_semicolon(scanner, token): return ("semi", token, LineNumber) def s_nl(scanner,token): global LineNumber LineNumber = LineNumber + 1 #print( 'LineNumber=', LineNumber, file=sys.stderr ) return None #2.00E-002 scanner = re.Scanner([ (r"[a-zA-Z]\w*", s_ident), # (r"[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?", s_float), (r"[-+]?[0-9]+(([.][0-9eE+-]*)|([eE]+[-+]?[0-9]+))", s_float), (r"\d+", s_int), (r"\(([0-9a-fA-F]|\s)*\)", s_hex), (r"(!|//).*$", None), (r";", s_semicolon), (r"\n",s_nl), (r"\s*", None), ], re.MULTILINE ) # open the file using the given encoding file = open( sys.argv[1], encoding=file_encoding ) # read all svf file input into string "input" input = file.read() file.close() # Lexer: # create a list of tuples containing (tokenType, tokenValue, LineNumber) tokens = scanner.scan( input )[0] input = None # allow gc to reclaim memory holding file #for tokenType, tokenValue, ln in tokens: print( "line %d: %s" % (ln, tokenType), tokenValue ) #-----<parser>----------------------------------------------------------------- tokVal = tokType = tokLn = None tup = iter( tokens ) def nextTok(): """ Function to read the next token from tup into tokType, tokVal, tokLn (linenumber) which are globals. """ global tokType, tokVal, tokLn, tup tokType, tokVal, tokLn = tup.__next__() class ParseError(Exception): """A class to hold a parsing error message""" def __init__(self, linenumber, token, message): self.linenumber = linenumber self.token = token self.message = message def __str__(self): global inputFilename return "Error in file \'%s\' at line %d near token %s\n %s" % ( inputFilename, self.linenumber, repr(self.token), self.message) class MASKSET(object): """ Class MASKSET holds a set of bit vectors, all of which are related, will all have the same length, and are associated with one of the seven shiftOps: HIR, HDR, TIR, TDR, SIR, SDR, LSDR. One of these holds a mask, smask, tdi, tdo, and a size. """ def __init__(self, name): self.empty() self.name = name def empty(self): self.mask = bytearray() self.smask = bytearray() self.tdi = bytearray() self.tdo = bytearray() self.size = 0 def syncLengths( self, sawTDI, sawTDO, sawMASK, sawSMASK, newSize ): """ Set all the lengths equal in the event some of the masks were not seen as part of the last change set. """ if self.size == newSize: return if newSize == 0: self.empty() return # If an SIR was given without a MASK(), then use a mask of all zeros. # this is not consistent with the SVF spec, but it makes sense because # it would be odd to be testing an instruction register read out of a # tap without giving a mask for it. Also, lattice seems to agree and is # generating SVF files that comply with this philosophy. if self.name == 'SIR' and not sawMASK: self.mask = bytearray( newSize ) if newSize != len(self.mask): self.mask = bytearray( newSize ) if self.name == 'SDR': # leave mask for HIR,HDR,TIR,TDR,SIR zeros for i in range( newSize ): self.mask[i] = 1 if newSize != len(self.tdo): self.tdo = bytearray( newSize ) if newSize != len(self.tdi): self.tdi = bytearray( newSize ) if newSize != len(self.smask): self.smask = bytearray( newSize ) self.size = newSize #-----</MASKSET>----- def makeBitArray( hexString, bitCount ): """ Converts a packed sequence of hex ascii characters into a bytearray where each element in the array holds exactly one bit. Only "bitCount" bits are scanned and these must be the least significant bits in the hex number. That is, it is legal to have some unused bits in the must significant hex nibble of the input "hexString". The string is scanned starting from the backend, then just before returning we reverse the array. This way the append() method can be used, which I assume is faster than an insert. """ global tokLn a = bytearray() length = bitCount hexString = list(hexString) hexString.reverse() #print(hexString) for c in hexString: if length <= 0: break; c = int(c, 16) for mask in [1,2,4,8]: if length <= 0: break; length = length - 1 a.append( (c & mask) != 0 ) if length > 0: raise ParseError( tokLn, hexString, "Insufficient hex characters for given length of %d" % bitCount ) a.reverse() #print(a) return a def makeXSVFbytes( bitarray ): """ Make a bytearray which is contains the XSVF bits which will be written directly to disk. The number of bytes needed is calculated from the size of the argument bitarray. """ bitCount = len(bitarray) byteCount = (bitCount+7)//8 ba = bytearray( byteCount ) firstBit = (bitCount % 8) - 1 if firstBit == -1: firstBit = 7 bitNdx = 0 for byteNdx in range(byteCount): mask = 1<<firstBit byte = 0 while mask: if bitarray[bitNdx]: byte |= mask; mask = mask >> 1 bitNdx = bitNdx + 1 ba[byteNdx] = byte firstBit = 7 return ba def writeComment( outputFile, shiftOp_linenum, shiftOp ): """ Write an XCOMMENT record to outputFile """ comment = "%s @%d\0" % (shiftOp, shiftOp_linenum) # \0 is terminating nul ba = bytearray(1) ba[0] = XCOMMENT ba += comment.encode() outputFile.write( ba ) def combineBitVectors( trailer, meat, header ): """ Combine the 3 bit vectors comprizing a transmission. Since the least significant bits are sent first, the header is put onto the list last so they are sent first from that least significant position. """ ret = bytearray() ret.extend( trailer ) ret.extend( meat ) ret.extend( header ) return ret def writeRUNTEST( outputFile, run_state, end_state, run_count, min_time, tokenTxt ): """ Write the output for the SVF RUNTEST command. run_count - the number of clocks min_time - the number of seconds tokenTxt - either RUNTEST or LDELAY """ # convert from secs to usecs min_time = int( min_time * 1000000) # the SVF RUNTEST command does NOT map to the XSVF XRUNTEST command. Check the SVF spec, then # read the XSVF command. They are not the same. Use an XSVF XWAITSTATE to # implement the required behavior of the SVF RUNTEST command. if doCOMMENTs: writeComment( output, tokLn, tokenTxt ) if tokenTxt == 'RUNTEST': obuf = bytearray(11) obuf[0] = XWAITSTATE obuf[1] = run_state obuf[2] = end_state struct.pack_into(">i", obuf, 3, run_count ) # big endian 4 byte int to obuf struct.pack_into(">i", obuf, 7, min_time ) # big endian 4 byte int to obuf outputFile.write( obuf ) else: # == 'LDELAY' obuf = bytearray(10) obuf[0] = LDELAY obuf[1] = run_state # LDELAY has no end_state struct.pack_into(">i", obuf, 2, run_count ) # big endian 4 byte int to obuf struct.pack_into(">i", obuf, 6, min_time ) # big endian 4 byte int to obuf outputFile.write( obuf ) output = open( outputFilename, mode='wb' ) hir = MASKSET('HIR') hdr = MASKSET('HDR') tir = MASKSET('TIR') tdr = MASKSET('TDR') sir = MASKSET('SIR') sdr = MASKSET('SDR') expecting_eof = True # one of the commands that take the shiftParts after the length, the parse # template for all of these commands is identical shiftOps = ('SDR', 'SIR', 'LSDR', 'HDR', 'HIR', 'TDR', 'TIR') # the order must correspond to shiftOps, this holds the MASKSETS. 'LSDR' shares sdr with 'SDR' shiftSets = (sdr, sir, sdr, hdr, hir, tdr, tir ) # what to expect as parameters to a shiftOp, i.e. after a SDR length or SIR length shiftParts = ('TDI', 'TDO', 'MASK', 'SMASK') # the set of legal states which can trail the RUNTEST command run_state_allowed = ('IRPAUSE', 'DRPAUSE', 'RESET', 'IDLE') enddr_state_allowed = ('DRPAUSE', 'IDLE') endir_state_allowed = ('IRPAUSE', 'IDLE') trst_mode_allowed = ('ON', 'OFF', 'Z', 'ABSENT') enddr_state = IDLE endir_state = IDLE frequency = 1.00e+006 # HZ; # change detection for xsdrsize and xtdomask xsdrsize = -1 # the last one sent, send only on change xtdomask = bytearray() # the last one sent, send only on change # we use a number of single byte writes for the XSVF command below cmdbuf = bytearray(1) # Save the XREPEAT setting into the file as first thing. obuf = bytearray(2) obuf[0] = XREPEAT obuf[1] = xrepeat output.write( obuf ) try: while 1: expecting_eof = True nextTok() expecting_eof = False # print( tokType, tokVal, tokLn ) if tokVal in shiftOps: shiftOp_linenum = tokLn shiftOp = tokVal set = shiftSets[shiftOps.index(shiftOp)] # set flags false, if we see one later, set that one true later sawTDI = sawTDO = sawMASK = sawSMASK = False nextTok() if tokType != 'int': raise ParseError( tokLn, tokVal, "Expecting 'int' giving %s length, got '%s'" % (shiftOp, tokType) ) length = tokVal nextTok() while tokVal != ';': if tokVal not in shiftParts: raise ParseError( tokLn, tokVal, "Expecting TDI, TDO, MASK, SMASK, or ';'") shiftPart = tokVal nextTok() if tokType != 'hex': raise ParseError( tokLn, tokVal, "Expecting hex bits" ) bits = makeBitArray( tokVal, length ) if shiftPart == 'TDI': sawTDI = True set.tdi = bits elif shiftPart == 'TDO': sawTDO = True set.tdo = bits elif shiftPart == 'MASK': sawMASK = True set.mask = bits elif shiftPart == 'SMASK': sawSMASK = True set.smask = bits nextTok() set.syncLengths( sawTDI, sawTDO, sawMASK, sawSMASK, length ) # process all the gathered parameters and generate outputs here if shiftOp == 'SIR': if doCOMMENTs: writeComment( output, shiftOp_linenum, 'SIR' ) tdi = combineBitVectors( tir.tdi, sir.tdi, hir.tdi ) if len(tdi) > 255: obuf = bytearray(3) obuf[0] = XSIR2 struct.pack_into( ">h", obuf, 1, len(tdi) ) else: obuf = bytearray(2) obuf[0] = XSIR obuf[1] = len(tdi) output.write( obuf ) obuf = makeXSVFbytes( tdi ) output.write( obuf ) elif shiftOp == 'SDR': if doCOMMENTs: writeComment( output, shiftOp_linenum, shiftOp ) if not sawTDO: # pass a zero filled bit vector for the sdr.mask mask = combineBitVectors( tdr.mask, bytearray(sdr.size), hdr.mask ) tdi = combineBitVectors( tdr.tdi, sdr.tdi, hdr.tdi ) if xsdrsize != len(tdi): xsdrsize = len(tdi) cmdbuf[0] = XSDRSIZE output.write( cmdbuf ) obuf = bytearray(4) struct.pack_into( ">i", obuf, 0, xsdrsize ) # big endian 4 byte int to obuf output.write( obuf ) if xtdomask != mask: xtdomask = mask cmdbuf[0] = XTDOMASK output.write( cmdbuf ) obuf = makeXSVFbytes( mask ) output.write( obuf ) cmdbuf[0] = XSDR output.write( cmdbuf ) obuf = makeXSVFbytes( tdi ) output.write( obuf ) else: mask = combineBitVectors( tdr.mask, sdr.mask, hdr.mask ) tdi = combineBitVectors( tdr.tdi, sdr.tdi, hdr.tdi ) tdo = combineBitVectors( tdr.tdo, sdr.tdo, hdr.tdo ) if xsdrsize != len(tdi): xsdrsize = len(tdi) cmdbuf[0] = XSDRSIZE output.write( cmdbuf ) obuf = bytearray(4) struct.pack_into(">i", obuf, 0, xsdrsize ) # big endian 4 byte int to obuf output.write( obuf ) if xtdomask != mask: xtdomask = mask cmdbuf[0] = XTDOMASK output.write( cmdbuf ) obuf = makeXSVFbytes( mask ) output.write( obuf ) cmdbuf[0] = XSDRTDO output.write( cmdbuf ) obuf = makeXSVFbytes( tdi ) output.write( obuf ) obuf = makeXSVFbytes( tdo ) output.write( obuf ) #print( "len(tdo)=", len(tdo), "len(tdr.tdo)=", len(tdr.tdo), "len(sdr.tdo)=", len(sdr.tdo), "len(hdr.tdo)=", len(hdr.tdo) ) elif shiftOp == 'LSDR': if doCOMMENTs: writeComment( output, shiftOp_linenum, shiftOp ) mask = combineBitVectors( tdr.mask, sdr.mask, hdr.mask ) tdi = combineBitVectors( tdr.tdi, sdr.tdi, hdr.tdi ) tdo = combineBitVectors( tdr.tdo, sdr.tdo, hdr.tdo ) if xsdrsize != len(tdi): xsdrsize = len(tdi) cmdbuf[0] = XSDRSIZE output.write( cmdbuf ) obuf = bytearray(4) struct.pack_into(">i", obuf, 0, xsdrsize ) # big endian 4 byte int to obuf output.write( obuf ) if xtdomask != mask: xtdomask = mask cmdbuf[0] = XTDOMASK output.write( cmdbuf ) obuf = makeXSVFbytes( mask ) output.write( obuf ) cmdbuf[0] = LSDR output.write( cmdbuf ) obuf = makeXSVFbytes( tdi ) output.write( obuf ) obuf = makeXSVFbytes( tdo ) output.write( obuf ) #print( "len(tdo)=", len(tdo), "len(tdr.tdo)=", len(tdr.tdo), "len(sdr.tdo)=", len(sdr.tdo), "len(hdr.tdo)=", len(hdr.tdo) ) elif tokVal == 'RUNTEST' or tokVal == 'LDELAY': # e.g. from lattice tools: # "RUNTEST IDLE 5 TCK 1.00E-003 SEC;" saveTok = tokVal nextTok() min_time = 0 run_count = 0 max_time = 600 # ten minutes if tokVal in run_state_allowed: run_state = StateTxt.index(tokVal) end_state = run_state # bottom of page 17 of SVF spec nextTok() if tokType != 'int' and tokType != 'float': raise ParseError( tokLn, tokVal, "Expecting 'int' or 'float' after RUNTEST [run_state]") timeval = tokVal; nextTok() if tokVal != 'TCK' and tokVal != 'SEC' and tokVal != 'SCK': raise ParseError( tokLn, tokVal, "Expecting 'TCK' or 'SEC' or 'SCK' after RUNTEST [run_state] (run_count|min_time)") if tokVal == 'TCK' or tokVal == 'SCK': run_count = int( timeval ) else: min_time = timeval nextTok() if tokType == 'int' or tokType == 'float': min_time = tokVal nextTok() if tokVal != 'SEC': raise ParseError( tokLn, tokVal, "Expecting 'SEC' after RUNTEST [run_state] run_count min_time") nextTok() if tokVal == 'MAXIMUM': nextTok() if tokType != 'int' and tokType != 'float': raise ParseError( tokLn, tokVal, "Expecting 'max_time' after RUNTEST [run_state] min_time SEC MAXIMUM") max_time = tokVal nextTok() if tokVal != 'SEC': raise ParseError( tokLn, tokVal, "Expecting 'max_time' after RUNTEST [run_state] min_time SEC MAXIMUM max_time") nextTok() if tokVal == 'ENDSTATE': nextTok() if tokVal not in run_state_allowed: raise ParseError( tokLn, tokVal, "Expecting 'run_state' after RUNTEST .... ENDSTATE") end_state = StateTxt.index(tokVal) nextTok() if tokVal != ';': raise ParseError( tokLn, tokVal, "Expecting ';' after RUNTEST ....") # print( "run_count=", run_count, "min_time=", min_time, # "max_time=", max_time, "run_state=", State[run_state], "end_state=", State[end_state] ) writeRUNTEST( output, run_state, end_state, run_count, min_time, saveTok ) elif tokVal == 'LCOUNT': nextTok() if tokType != 'int': raise ParseError( tokLn, tokVal, "Expecting integer 'count' after LCOUNT") loopCount = tokVal nextTok() if tokVal != ';': raise ParseError( tokLn, tokVal, "Expecting ';' after LCOUNT count") if doCOMMENTs: writeComment( output, tokLn, 'LCOUNT' ) obuf = bytearray(5) obuf[0] = LCOUNT struct.pack_into(">i", obuf, 1, loopCount ) # big endian 4 byte int to obuf output.write( obuf ) elif tokVal == 'ENDDR': nextTok() if tokVal not in enddr_state_allowed: raise ParseError( tokLn, tokVal, "Expecting 'stable_state' after ENDDR. (one of: DRPAUSE, IDLE)") enddr_state = StateTxt.index(tokVal) nextTok() if tokVal != ';': raise ParseError( tokLn, tokVal, "Expecting ';' after ENDDR stable_state") if doCOMMENTs: writeComment( output, tokLn, 'ENDDR' ) obuf = bytearray(2) obuf[0] = XENDDR # Page 10 of the March 1999 SVF spec shows that RESET is also allowed here. # Yet the XSVF spec has no provision for that, and uses a non-standard, i.e. # boolean argument to XENDDR which only handles two of the 3 intended states. obuf[1] = 1 if enddr_state == DRPAUSE else 0 output.write( obuf ) elif tokVal == 'ENDIR': nextTok() if tokVal not in endir_state_allowed: raise ParseError( tokLn, tokVal, "Expecting 'stable_state' after ENDIR. (one of: IRPAUSE, IDLE)") endir_state = StateTxt.index(tokVal) nextTok() if tokVal != ';': raise ParseError( tokLn, tokVal, "Expecting ';' after ENDIR stable_state") if doCOMMENTs: writeComment( output, tokLn, 'ENDIR' ) obuf = bytearray(2) obuf[0] = XENDIR # Page 10 of the March 1999 SVF spec shows that RESET is also allowed here. # Yet the XSVF spec has no provision for that, and uses a non-standard, i.e. # boolean argument to XENDDR which only handles two of the 3 intended states. obuf[1] = 1 if endir_state == IRPAUSE else 0 output.write( obuf ) elif tokVal == 'STATE': nextTok() ln = tokLn while tokVal != ';': if tokVal not in StateTxt: raise ParseError( tokLn, tokVal, "Expecting 'stable_state' after STATE") stable_state = StateTxt.index( tokVal ) if doCOMMENTs and ln != -1: writeComment( output, ln, 'STATE' ) ln = -1 # save comment only once obuf = bytearray(2) obuf[0] = XSTATE obuf[1] = stable_state output.write( obuf ) nextTok() elif tokVal == 'FREQUENCY': nextTok() if tokVal != ';': if tokType != 'int' and tokType != 'float': raise ParseError( tokLn, tokVal, "Expecting 'cycles HZ' after FREQUENCY") frequency = tokVal nextTok() if tokVal != 'HZ': raise ParseError( tokLn, tokVal, "Expecting 'HZ' after FREQUENCY cycles") nextTok() if tokVal != ';': raise ParseError( tokLn, tokVal, "Expecting ';' after FREQUENCY cycles HZ") elif tokVal == 'TRST': nextTok() if tokVal not in trst_mode_allowed: raise ParseError( tokLn, tokVal, "Expecting 'ON|OFF|Z|ABSENT' after TRST") trst_mode = tokVal nextTok() if tokVal != ';': raise ParseError( tokLn, tokVal, "Expecting ';' after TRST trst_mode") if doCOMMENTs: writeComment( output, tokLn, 'TRST %s' % trst_mode ) obuf = bytearray( 2 ) obuf[0] = XTRST obuf[1] = trst_mode_allowed.index( trst_mode ) # use the index as the binary argument to XTRST opcode output.write( obuf ) else: raise ParseError( tokLn, tokVal, "Unknown token '%s'" % tokVal) except StopIteration: if not expecting_eof: print( "Unexpected End of File at line ", tokLn ) except ParseError as pe: print( "\n", pe ) finally: # print( "closing file" ) cmdbuf[0] = XCOMPLETE output.write( cmdbuf ) output.close()
gpl-2.0
jonludlam/xen
tools/xm-test/tests/cpupool/01_cpupool_basic_pos.py
36
2132
#!/usr/bin/python import sys import re import time from XmTestLib import * # # Check output of xm info. It must include field 'free_cpus' # The value must be between 0 - nr_cpus # free_cpus = getInfo("free_cpus") if free_cpus == "": FAIL("Missing 'free_cpus' entry in xm info output") if int(free_cpus) not in range(int(getInfo("nr_cpus")) + 1): FAIL("Wrong value of 'free_cpus' (%s)" % int(free_cpus)) # # Check output of xm list -l. It must contain the key 'pool_name' # If XM_USES_API is set, output must also contain 'cpu_pool'. # status, output = traceCommand("xm list -l Domain-0") if status != 0 or "Traceback" in output: raise XmError("xm failed", trace=output, status=status) if not re.search("pool_name Pool-0", output): FAIL("Missing or wrong attribute 'pool_name' in output of 'xm list -l'") if os.getenv("XM_USES_API"): if not re.search("cpu_pool (.+)", output): FAIL("Missing or wrong attribute 'cpu_pool' in output of 'xm list -l'") # # Test pool selection option of xm list. # status, output = traceCommand("xm list --pool=Pool-0") if status != 0 or "Traceback" in output: raise XmError("xm failed", trace=output, status=status) if not re.search("Domain-0 +0 +", output): FAIL("Missing 'Domain-0' in Pool-0") status, output = traceCommand("xm list --pool=Dummy-Pool") if status != 0 or "Traceback" in output: raise XmError("xm failed", trace=output, status=status) if len(output.splitlines()) != 1: FAIL("Wrong pool selection; output must be empty") # # Create a Domain without pool specification. # Default pool is Pool-0 # name = "TestDomPool-1" domain = XmTestDomain(name=name) try: domain.start(noConsole=True) except DomainError, ex: FAIL(str(e)) if not isDomainRunning(name): FAIL("Couldn't start domain without pool specification") status, output = traceCommand("xm list -l %s" % name) if status != 0 or "Traceback" in output: raise XmError("xm failed", trace=output, status=status) if not re.search("pool_name Pool-0", output): FAIL("Missing or wrong attribute 'pool_name' in output of 'xm list -l %s'" % name) destroyAllDomUs()
gpl-2.0
hdd/bigcouch
couchjs/scons/scons-local-2.0.1/SCons/Tool/RCS.py
61
2343
"""SCons.Tool.RCS.py Tool-specific initialization for RCS. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. __revision__ = "src/engine/SCons/Tool/RCS.py 5134 2010/08/16 23:02:40 bdeegan" import SCons.Action import SCons.Builder import SCons.Util def generate(env): """Add a Builder factory function and construction variables for RCS to an Environment.""" def RCSFactory(env=env): """ """ import SCons.Warnings as W W.warn(W.DeprecatedSourceCodeWarning, """The RCS() factory is deprecated and there is no replacement.""") act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') return SCons.Builder.Builder(action = act, env = env) #setattr(env, 'RCS', RCSFactory) env.RCS = RCSFactory env['RCS'] = 'rcs' env['RCS_CO'] = 'co' env['RCS_COFLAGS'] = SCons.Util.CLVar('') env['RCS_COCOM'] = '$RCS_CO $RCS_COFLAGS $TARGET' def exists(env): return env.Detect('rcs') # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
apache-2.0
hetajen/vnpy161
vn.tutorial/tick2trade/vn.trader_t2t/uiBasicWidget.py
9
44280
# encoding: UTF-8 import json import csv import os from collections import OrderedDict from PyQt4 import QtGui, QtCore from eventEngine import * from vtFunction import * from vtGateway import * #---------------------------------------------------------------------- def loadFont(): """载入字体设置""" fileName = 'VT_setting.json' path = os.path.abspath(os.path.dirname(__file__)) fileName = os.path.join(path, fileName) try: f = file(fileName) setting = json.load(f) family = setting['fontFamily'] size = setting['fontSize'] font = QtGui.QFont(family, size) except: font = QtGui.QFont(u'微软雅黑', 12) return font BASIC_FONT = loadFont() ######################################################################## class BasicCell(QtGui.QTableWidgetItem): """基础的单元格""" #---------------------------------------------------------------------- def __init__(self, text=None, mainEngine=None): """Constructor""" super(BasicCell, self).__init__() self.data = None if text: self.setContent(text) #---------------------------------------------------------------------- def setContent(self, text): """设置内容""" if text == '0' or text == '0.0': self.setText('') else: self.setText(text) ######################################################################## class NumCell(QtGui.QTableWidgetItem): """用来显示数字的单元格""" #---------------------------------------------------------------------- def __init__(self, text=None, mainEngine=None): """Constructor""" super(NumCell, self).__init__() self.data = None if text: self.setContent(text) #---------------------------------------------------------------------- def setContent(self, text): """设置内容""" # 考虑到NumCell主要用来显示OrderID和TradeID之类的整数字段, # 这里的数据转化方式使用int类型。但是由于部分交易接口的委托 # 号和成交号可能不是纯数字的形式,因此补充了一个try...except try: num = int(text) self.setData(QtCore.Qt.DisplayRole, num) except ValueError: self.setText(text) ######################################################################## class DirectionCell(QtGui.QTableWidgetItem): """用来显示买卖方向的单元格""" #---------------------------------------------------------------------- def __init__(self, text=None, mainEngine=None): """Constructor""" super(DirectionCell, self).__init__() self.data = None if text: self.setContent(text) #---------------------------------------------------------------------- def setContent(self, text): """设置内容""" if text == DIRECTION_LONG or text == DIRECTION_NET: self.setForeground(QtGui.QColor('red')) elif text == DIRECTION_SHORT: self.setForeground(QtGui.QColor('green')) self.setText(text) ######################################################################## class NameCell(QtGui.QTableWidgetItem): """用来显示合约中文的单元格""" #---------------------------------------------------------------------- def __init__(self, text=None, mainEngine=None): """Constructor""" super(NameCell, self).__init__() self.mainEngine = mainEngine self.data = None if text: self.setContent(text) #---------------------------------------------------------------------- def setContent(self, text): """设置内容""" if self.mainEngine: # 首先尝试正常获取合约对象 contract = self.mainEngine.getContract(text) # 如果能读取合约信息 if contract: self.setText(contract.name) ######################################################################## class BidCell(QtGui.QTableWidgetItem): """买价单元格""" #---------------------------------------------------------------------- def __init__(self, text=None, mainEngine=None): """Constructor""" super(BidCell, self).__init__() self.data = None self.setForeground(QtGui.QColor('black')) self.setBackground(QtGui.QColor(255,174,201)) if text: self.setContent(text) #---------------------------------------------------------------------- def setContent(self, text): """设置内容""" self.setText(text) ######################################################################## class AskCell(QtGui.QTableWidgetItem): """买价单元格""" #---------------------------------------------------------------------- def __init__(self, text=None, mainEngine=None): """Constructor""" super(AskCell, self).__init__() self.data = None self.setForeground(QtGui.QColor('black')) self.setBackground(QtGui.QColor(160,255,160)) if text: self.setContent(text) #---------------------------------------------------------------------- def setContent(self, text): """设置内容""" self.setText(text) ######################################################################## class BasicMonitor(QtGui.QTableWidget): """ 基础监控 headerDict中的值对应的字典格式如下 {'chinese': u'中文名', 'cellType': BasicCell} """ signal = QtCore.pyqtSignal(type(Event())) #---------------------------------------------------------------------- def __init__(self, mainEngine=None, eventEngine=None, parent=None): """Constructor""" super(BasicMonitor, self).__init__(parent) self.mainEngine = mainEngine self.eventEngine = eventEngine # 保存表头标签用 self.headerDict = OrderedDict() # 有序字典,key是英文名,value是对应的配置字典 self.headerList = [] # 对应self.headerDict.keys() # 保存相关数据用 self.dataDict = {} # 字典,key是字段对应的数据,value是保存相关单元格的字典 self.dataKey = '' # 字典键对应的数据字段 # 监控的事件类型 self.eventType = '' # 字体 self.font = None # 保存数据对象到单元格 self.saveData = False # 默认不允许根据表头进行排序,需要的组件可以开启 self.sorting = False # 初始化右键菜单 self.initMenu() #---------------------------------------------------------------------- def setHeaderDict(self, headerDict): """设置表头有序字典""" self.headerDict = headerDict self.headerList = headerDict.keys() #---------------------------------------------------------------------- def setDataKey(self, dataKey): """设置数据字典的键""" self.dataKey = dataKey #---------------------------------------------------------------------- def setEventType(self, eventType): """设置监控的事件类型""" self.eventType = eventType #---------------------------------------------------------------------- def setFont(self, font): """设置字体""" self.font = font #---------------------------------------------------------------------- def setSaveData(self, saveData): """设置是否要保存数据到单元格""" self.saveData = saveData #---------------------------------------------------------------------- def initTable(self): """初始化表格""" # 设置表格的列数 col = len(self.headerDict) self.setColumnCount(col) # 设置列表头 labels = [d['chinese'] for d in self.headerDict.values()] self.setHorizontalHeaderLabels(labels) # 关闭左边的垂直表头 self.verticalHeader().setVisible(False) # 设为不可编辑 self.setEditTriggers(self.NoEditTriggers) # 设为行交替颜色 self.setAlternatingRowColors(True) # 设置允许排序 self.setSortingEnabled(self.sorting) #---------------------------------------------------------------------- def registerEvent(self): """注册GUI更新相关的事件监听""" self.signal.connect(self.updateEvent) self.eventEngine.register(self.eventType, self.signal.emit) #---------------------------------------------------------------------- def updateEvent(self, event): """收到事件更新""" data = event.dict_['data'] self.updateData(data) #---------------------------------------------------------------------- def updateData(self, data): """将数据更新到表格中""" # 如果允许了排序功能,则插入数据前必须关闭,否则插入新的数据会变乱 if self.sorting: self.setSortingEnabled(False) # 如果设置了dataKey,则采用存量更新模式 if self.dataKey: key = data.__getattribute__(self.dataKey) # 如果键在数据字典中不存在,则先插入新的一行,并创建对应单元格 if key not in self.dataDict: self.insertRow(0) d = {} for n, header in enumerate(self.headerList): content = safeUnicode(data.__getattribute__(header)) cellType = self.headerDict[header]['cellType'] cell = cellType(content, self.mainEngine) if self.font: cell.setFont(self.font) # 如果设置了特殊字体,则进行单元格设置 if self.saveData: # 如果设置了保存数据对象,则进行对象保存 cell.data = data self.setItem(0, n, cell) d[header] = cell self.dataDict[key] = d # 否则如果已经存在,则直接更新相关单元格 else: d = self.dataDict[key] for header in self.headerList: content = safeUnicode(data.__getattribute__(header)) cell = d[header] cell.setContent(content) if self.saveData: # 如果设置了保存数据对象,则进行对象保存 cell.data = data # 否则采用增量更新模式 else: self.insertRow(0) for n, header in enumerate(self.headerList): content = safeUnicode(data.__getattribute__(header)) cellType = self.headerDict[header]['cellType'] cell = cellType(content, self.mainEngine) if self.font: cell.setFont(self.font) if self.saveData: cell.data = data self.setItem(0, n, cell) # 调整列宽 self.resizeColumns() # 重新打开排序 if self.sorting: self.setSortingEnabled(True) #---------------------------------------------------------------------- def resizeColumns(self): """调整各列的大小""" self.horizontalHeader().resizeSections(QtGui.QHeaderView.ResizeToContents) #---------------------------------------------------------------------- def setSorting(self, sorting): """设置是否允许根据表头排序""" self.sorting = sorting #---------------------------------------------------------------------- def saveToCsv(self): """保存表格内容到CSV文件""" # 先隐藏右键菜单 self.menu.close() # 获取想要保存的文件名 path = QtGui.QFileDialog.getSaveFileName(self, '保存数据', '', 'CSV(*.csv)') try: if not path.isEmpty(): with open(unicode(path), 'wb') as f: writer = csv.writer(f) # 保存标签 headers = [header.encode('gbk') for header in self.headerList] writer.writerow(headers) # 保存每行内容 for row in range(self.rowCount()): rowdata = [] for column in range(self.columnCount()): item = self.item(row, column) if item is not None: rowdata.append( unicode(item.text()).encode('gbk')) else: rowdata.append('') writer.writerow(rowdata) except IOError: pass #---------------------------------------------------------------------- def initMenu(self): """初始化右键菜单""" self.menu = QtGui.QMenu(self) saveAction = QtGui.QAction(u'保存内容', self) saveAction.triggered.connect(self.saveToCsv) self.menu.addAction(saveAction) #---------------------------------------------------------------------- def contextMenuEvent(self, event): """右键点击事件""" self.menu.popup(QtGui.QCursor.pos()) ######################################################################## class MarketMonitor(BasicMonitor): """市场监控组件""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(MarketMonitor, self).__init__(mainEngine, eventEngine, parent) # 设置表头有序字典 d = OrderedDict() d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell} d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell} d['lastPrice'] = {'chinese':u'最新价', 'cellType':BasicCell} d['preClosePrice'] = {'chinese':u'昨收盘价', 'cellType':BasicCell} d['volume'] = {'chinese':u'成交量', 'cellType':BasicCell} d['openInterest'] = {'chinese':u'持仓量', 'cellType':BasicCell} d['openPrice'] = {'chinese':u'开盘价', 'cellType':BasicCell} d['highPrice'] = {'chinese':u'最高价', 'cellType':BasicCell} d['lowPrice'] = {'chinese':u'最低价', 'cellType':BasicCell} d['bidPrice1'] = {'chinese':u'买一价', 'cellType':BidCell} d['bidVolume1'] = {'chinese':u'买一量', 'cellType':BidCell} d['askPrice1'] = {'chinese':u'卖一价', 'cellType':AskCell} d['askVolume1'] = {'chinese':u'卖一量', 'cellType':AskCell} d['time'] = {'chinese':u'时间', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) # 设置数据键 self.setDataKey('vtSymbol') # 设置监控事件类型 self.setEventType(EVENT_TICK) # 设置字体 self.setFont(BASIC_FONT) # 设置允许排序 self.setSorting(True) # 初始化表格 self.initTable() # 注册事件监听 self.registerEvent() ######################################################################## class LogMonitor(BasicMonitor): """日志监控""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(LogMonitor, self).__init__(mainEngine, eventEngine, parent) d = OrderedDict() d['logTime'] = {'chinese':u'时间', 'cellType':BasicCell} d['logContent'] = {'chinese':u'内容', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) self.setEventType(EVENT_LOG) self.setFont(BASIC_FONT) self.initTable() self.registerEvent() ######################################################################## class ErrorMonitor(BasicMonitor): """错误监控""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(ErrorMonitor, self).__init__(mainEngine, eventEngine, parent) d = OrderedDict() d['errorTime'] = {'chinese':u'错误时间', 'cellType':BasicCell} d['errorID'] = {'chinese':u'错误代码', 'cellType':BasicCell} d['errorMsg'] = {'chinese':u'错误信息', 'cellType':BasicCell} d['additionalInfo'] = {'chinese':u'补充信息', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) self.setEventType(EVENT_ERROR) self.setFont(BASIC_FONT) self.initTable() self.registerEvent() ######################################################################## class TradeMonitor(BasicMonitor): """成交监控""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(TradeMonitor, self).__init__(mainEngine, eventEngine, parent) d = OrderedDict() d['tradeID'] = {'chinese':u'成交编号', 'cellType':NumCell} d['orderID'] = {'chinese':u'委托编号', 'cellType':NumCell} d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell} d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell} d['direction'] = {'chinese':u'方向', 'cellType':DirectionCell} d['offset'] = {'chinese':u'开平', 'cellType':BasicCell} d['price'] = {'chinese':u'价格', 'cellType':BasicCell} d['volume'] = {'chinese':u'数量', 'cellType':BasicCell} d['tradeTime'] = {'chinese':u'成交时间', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) self.setEventType(EVENT_TRADE) self.setFont(BASIC_FONT) self.setSorting(True) self.initTable() self.registerEvent() ######################################################################## class OrderMonitor(BasicMonitor): """委托监控""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(OrderMonitor, self).__init__(mainEngine, eventEngine, parent) self.mainEngine = mainEngine d = OrderedDict() d['orderID'] = {'chinese':u'委托编号', 'cellType':NumCell} d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell} d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell} d['direction'] = {'chinese':u'方向', 'cellType':DirectionCell} d['offset'] = {'chinese':u'开平', 'cellType':BasicCell} d['price'] = {'chinese':u'价格', 'cellType':BasicCell} d['totalVolume'] = {'chinese':u'委托数量', 'cellType':BasicCell} d['tradedVolume'] = {'chinese':u'成交数量', 'cellType':BasicCell} d['status'] = {'chinese':u'状态', 'cellType':BasicCell} d['orderTime'] = {'chinese':u'委托时间', 'cellType':BasicCell} d['cancelTime'] = {'chinese':u'撤销时间', 'cellType':BasicCell} d['frontID'] = {'chinese':u'前置编号', 'cellType':BasicCell} d['sessionID'] = {'chinese':u'会话编号', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) self.setDataKey('vtOrderID') self.setEventType(EVENT_ORDER) self.setFont(BASIC_FONT) self.setSaveData(True) self.setSorting(True) self.initTable() self.registerEvent() self.connectSignal() #---------------------------------------------------------------------- def connectSignal(self): """连接信号""" # 双击单元格撤单 self.itemDoubleClicked.connect(self.cancelOrder) #---------------------------------------------------------------------- def cancelOrder(self, cell): """根据单元格的数据撤单""" order = cell.data req = VtCancelOrderReq() req.symbol = order.symbol req.exchange = order.exchange req.frontID = order.frontID req.sessionID = order.sessionID req.orderID = order.orderID self.mainEngine.cancelOrder(req, order.gatewayName) ######################################################################## class PositionMonitor(BasicMonitor): """持仓监控""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(PositionMonitor, self).__init__(mainEngine, eventEngine, parent) d = OrderedDict() d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell} d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell} d['direction'] = {'chinese':u'方向', 'cellType':DirectionCell} d['position'] = {'chinese':u'持仓量', 'cellType':BasicCell} d['ydPosition'] = {'chinese':u'昨持仓', 'cellType':BasicCell} d['frozen'] = {'chinese':u'冻结量', 'cellType':BasicCell} d['price'] = {'chinese':u'价格', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) self.setDataKey('vtPositionName') self.setEventType(EVENT_POSITION) self.setFont(BASIC_FONT) self.setSaveData(True) self.initTable() self.registerEvent() ######################################################################## class AccountMonitor(BasicMonitor): """账户监控""" #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(AccountMonitor, self).__init__(mainEngine, eventEngine, parent) d = OrderedDict() d['accountID'] = {'chinese':u'账户', 'cellType':BasicCell} d['preBalance'] = {'chinese':u'昨结', 'cellType':BasicCell} d['balance'] = {'chinese':u'净值', 'cellType':BasicCell} d['available'] = {'chinese':u'可用', 'cellType':BasicCell} d['commission'] = {'chinese':u'手续费', 'cellType':BasicCell} d['margin'] = {'chinese':u'保证金', 'cellType':BasicCell} d['closeProfit'] = {'chinese':u'平仓盈亏', 'cellType':BasicCell} d['positionProfit'] = {'chinese':u'持仓盈亏', 'cellType':BasicCell} d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell} self.setHeaderDict(d) self.setDataKey('vtAccountID') self.setEventType(EVENT_ACCOUNT) self.setFont(BASIC_FONT) self.initTable() self.registerEvent() ######################################################################## class TradingWidget(QtGui.QFrame): """简单交易组件""" signal = QtCore.pyqtSignal(type(Event())) directionList = [DIRECTION_LONG, DIRECTION_SHORT] offsetList = [OFFSET_OPEN, OFFSET_CLOSE, OFFSET_CLOSEYESTERDAY, OFFSET_CLOSETODAY] priceTypeList = [PRICETYPE_LIMITPRICE, PRICETYPE_MARKETPRICE, PRICETYPE_FAK, PRICETYPE_FOK] exchangeList = [EXCHANGE_NONE, EXCHANGE_CFFEX, EXCHANGE_SHFE, EXCHANGE_DCE, EXCHANGE_CZCE, EXCHANGE_SSE, EXCHANGE_SZSE, EXCHANGE_SGE, EXCHANGE_HKEX, EXCHANGE_SMART, EXCHANGE_ICE, EXCHANGE_CME, EXCHANGE_NYMEX, EXCHANGE_GLOBEX, EXCHANGE_IDEALPRO] currencyList = [CURRENCY_NONE, CURRENCY_CNY, CURRENCY_USD] productClassList = [PRODUCT_NONE, PRODUCT_EQUITY, PRODUCT_FUTURES, PRODUCT_OPTION, PRODUCT_FOREX] gatewayList = [''] #---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, parent=None): """Constructor""" super(TradingWidget, self).__init__(parent) self.mainEngine = mainEngine self.eventEngine = eventEngine self.symbol = '' # 添加交易接口 self.gatewayList.extend(mainEngine.gatewayDict.keys()) self.initUi() self.connectSignal() #---------------------------------------------------------------------- def initUi(self): """初始化界面""" self.setWindowTitle(u'交易') self.setMaximumWidth(400) self.setFrameShape(self.Box) # 设置边框 self.setLineWidth(1) # 左边部分 labelSymbol = QtGui.QLabel(u'代码') labelName = QtGui.QLabel(u'名称') labelDirection = QtGui.QLabel(u'方向类型') labelOffset = QtGui.QLabel(u'开平') labelPrice = QtGui.QLabel(u'价格') labelVolume = QtGui.QLabel(u'数量') labelPriceType = QtGui.QLabel(u'价格类型') labelExchange = QtGui.QLabel(u'交易所') labelCurrency = QtGui.QLabel(u'货币') labelProductClass = QtGui.QLabel(u'产品类型') labelGateway = QtGui.QLabel(u'交易接口') self.lineSymbol = QtGui.QLineEdit() self.lineName = QtGui.QLineEdit() self.comboDirection = QtGui.QComboBox() self.comboDirection.addItems(self.directionList) self.comboOffset = QtGui.QComboBox() self.comboOffset.addItems(self.offsetList) self.spinPrice = QtGui.QDoubleSpinBox() self.spinPrice.setDecimals(4) self.spinPrice.setMinimum(0) self.spinPrice.setMaximum(100000) self.spinVolume = QtGui.QSpinBox() self.spinVolume.setMinimum(0) self.spinVolume.setMaximum(1000000) self.comboPriceType = QtGui.QComboBox() self.comboPriceType.addItems(self.priceTypeList) self.comboExchange = QtGui.QComboBox() self.comboExchange.addItems(self.exchangeList) self.comboCurrency = QtGui.QComboBox() self.comboCurrency.addItems(self.currencyList) self.comboProductClass = QtGui.QComboBox() self.comboProductClass.addItems(self.productClassList) self.comboGateway = QtGui.QComboBox() self.comboGateway.addItems(self.gatewayList) gridleft = QtGui.QGridLayout() gridleft.addWidget(labelSymbol, 0, 0) gridleft.addWidget(labelName, 1, 0) gridleft.addWidget(labelDirection, 2, 0) gridleft.addWidget(labelOffset, 3, 0) gridleft.addWidget(labelPrice, 4, 0) gridleft.addWidget(labelVolume, 5, 0) gridleft.addWidget(labelPriceType, 6, 0) gridleft.addWidget(labelExchange, 7, 0) gridleft.addWidget(labelCurrency, 8, 0) gridleft.addWidget(labelProductClass, 9, 0) gridleft.addWidget(labelGateway, 10, 0) gridleft.addWidget(self.lineSymbol, 0, 1) gridleft.addWidget(self.lineName, 1, 1) gridleft.addWidget(self.comboDirection, 2, 1) gridleft.addWidget(self.comboOffset, 3, 1) gridleft.addWidget(self.spinPrice, 4, 1) gridleft.addWidget(self.spinVolume, 5, 1) gridleft.addWidget(self.comboPriceType, 6, 1) gridleft.addWidget(self.comboExchange, 7, 1) gridleft.addWidget(self.comboCurrency, 8, 1) gridleft.addWidget(self.comboProductClass, 9, 1) gridleft.addWidget(self.comboGateway, 10, 1) # 右边部分 labelBid1 = QtGui.QLabel(u'买一') labelBid2 = QtGui.QLabel(u'买二') labelBid3 = QtGui.QLabel(u'买三') labelBid4 = QtGui.QLabel(u'买四') labelBid5 = QtGui.QLabel(u'买五') labelAsk1 = QtGui.QLabel(u'卖一') labelAsk2 = QtGui.QLabel(u'卖二') labelAsk3 = QtGui.QLabel(u'卖三') labelAsk4 = QtGui.QLabel(u'卖四') labelAsk5 = QtGui.QLabel(u'卖五') self.labelBidPrice1 = QtGui.QLabel() self.labelBidPrice2 = QtGui.QLabel() self.labelBidPrice3 = QtGui.QLabel() self.labelBidPrice4 = QtGui.QLabel() self.labelBidPrice5 = QtGui.QLabel() self.labelBidVolume1 = QtGui.QLabel() self.labelBidVolume2 = QtGui.QLabel() self.labelBidVolume3 = QtGui.QLabel() self.labelBidVolume4 = QtGui.QLabel() self.labelBidVolume5 = QtGui.QLabel() self.labelAskPrice1 = QtGui.QLabel() self.labelAskPrice2 = QtGui.QLabel() self.labelAskPrice3 = QtGui.QLabel() self.labelAskPrice4 = QtGui.QLabel() self.labelAskPrice5 = QtGui.QLabel() self.labelAskVolume1 = QtGui.QLabel() self.labelAskVolume2 = QtGui.QLabel() self.labelAskVolume3 = QtGui.QLabel() self.labelAskVolume4 = QtGui.QLabel() self.labelAskVolume5 = QtGui.QLabel() labelLast = QtGui.QLabel(u'最新') self.labelLastPrice = QtGui.QLabel() self.labelReturn = QtGui.QLabel() self.labelLastPrice.setMinimumWidth(60) self.labelReturn.setMinimumWidth(60) gridRight = QtGui.QGridLayout() gridRight.addWidget(labelAsk5, 0, 0) gridRight.addWidget(labelAsk4, 1, 0) gridRight.addWidget(labelAsk3, 2, 0) gridRight.addWidget(labelAsk2, 3, 0) gridRight.addWidget(labelAsk1, 4, 0) gridRight.addWidget(labelLast, 5, 0) gridRight.addWidget(labelBid1, 6, 0) gridRight.addWidget(labelBid2, 7, 0) gridRight.addWidget(labelBid3, 8, 0) gridRight.addWidget(labelBid4, 9, 0) gridRight.addWidget(labelBid5, 10, 0) gridRight.addWidget(self.labelAskPrice5, 0, 1) gridRight.addWidget(self.labelAskPrice4, 1, 1) gridRight.addWidget(self.labelAskPrice3, 2, 1) gridRight.addWidget(self.labelAskPrice2, 3, 1) gridRight.addWidget(self.labelAskPrice1, 4, 1) gridRight.addWidget(self.labelLastPrice, 5, 1) gridRight.addWidget(self.labelBidPrice1, 6, 1) gridRight.addWidget(self.labelBidPrice2, 7, 1) gridRight.addWidget(self.labelBidPrice3, 8, 1) gridRight.addWidget(self.labelBidPrice4, 9, 1) gridRight.addWidget(self.labelBidPrice5, 10, 1) gridRight.addWidget(self.labelAskVolume5, 0, 2) gridRight.addWidget(self.labelAskVolume4, 1, 2) gridRight.addWidget(self.labelAskVolume3, 2, 2) gridRight.addWidget(self.labelAskVolume2, 3, 2) gridRight.addWidget(self.labelAskVolume1, 4, 2) gridRight.addWidget(self.labelReturn, 5, 2) gridRight.addWidget(self.labelBidVolume1, 6, 2) gridRight.addWidget(self.labelBidVolume2, 7, 2) gridRight.addWidget(self.labelBidVolume3, 8, 2) gridRight.addWidget(self.labelBidVolume4, 9, 2) gridRight.addWidget(self.labelBidVolume5, 10, 2) # 发单按钮 buttonSendOrder = QtGui.QPushButton(u'发单') buttonCancelAll = QtGui.QPushButton(u'全撤') size = buttonSendOrder.sizeHint() buttonSendOrder.setMinimumHeight(size.height()*2) # 把按钮高度设为默认两倍 buttonCancelAll.setMinimumHeight(size.height()*2) # 整合布局 hbox = QtGui.QHBoxLayout() hbox.addLayout(gridleft) hbox.addLayout(gridRight) vbox = QtGui.QVBoxLayout() vbox.addLayout(hbox) vbox.addWidget(buttonSendOrder) vbox.addWidget(buttonCancelAll) vbox.addStretch() self.setLayout(vbox) # 关联更新 buttonSendOrder.clicked.connect(self.sendOrder) buttonCancelAll.clicked.connect(self.cancelAll) self.lineSymbol.returnPressed.connect(self.updateSymbol) #---------------------------------------------------------------------- def updateSymbol(self): """合约变化""" # 读取组件数据 symbol = str(self.lineSymbol.text()) exchange = unicode(self.comboExchange.currentText()) currency = unicode(self.comboCurrency.currentText()) productClass = unicode(self.comboProductClass.currentText()) gatewayName = unicode(self.comboGateway.currentText()) # 查询合约 if exchange: vtSymbol = '.'.join([symbol, exchange]) contract = self.mainEngine.getContract(vtSymbol) else: vtSymbol = symbol contract = self.mainEngine.getContract(symbol) if contract: vtSymbol = contract.vtSymbol gatewayName = contract.gatewayName self.lineName.setText(contract.name) exchange = contract.exchange # 保证有交易所代码 # 清空价格数量 self.spinPrice.setValue(0) self.spinVolume.setValue(0) # 清空行情显示 self.labelBidPrice1.setText('') self.labelBidPrice2.setText('') self.labelBidPrice3.setText('') self.labelBidPrice4.setText('') self.labelBidPrice5.setText('') self.labelBidVolume1.setText('') self.labelBidVolume2.setText('') self.labelBidVolume3.setText('') self.labelBidVolume4.setText('') self.labelBidVolume5.setText('') self.labelAskPrice1.setText('') self.labelAskPrice2.setText('') self.labelAskPrice3.setText('') self.labelAskPrice4.setText('') self.labelAskPrice5.setText('') self.labelAskVolume1.setText('') self.labelAskVolume2.setText('') self.labelAskVolume3.setText('') self.labelAskVolume4.setText('') self.labelAskVolume5.setText('') self.labelLastPrice.setText('') self.labelReturn.setText('') # 重新注册事件监听 self.eventEngine.unregister(EVENT_TICK + self.symbol, self.signal.emit) self.eventEngine.register(EVENT_TICK + vtSymbol, self.signal.emit) # 订阅合约 req = VtSubscribeReq() req.symbol = symbol req.exchange = exchange req.currency = currency req.productClass = productClass self.mainEngine.subscribe(req, gatewayName) # 更新组件当前交易的合约 self.symbol = vtSymbol #---------------------------------------------------------------------- def updateTick(self, event): """更新行情""" tick = event.dict_['data'] if tick.vtSymbol == self.symbol: self.labelBidPrice1.setText(str(tick.bidPrice1)) self.labelAskPrice1.setText(str(tick.askPrice1)) self.labelBidVolume1.setText(str(tick.bidVolume1)) self.labelAskVolume1.setText(str(tick.askVolume1)) if tick.bidPrice2: self.labelBidPrice2.setText(str(tick.bidPrice2)) self.labelBidPrice3.setText(str(tick.bidPrice3)) self.labelBidPrice4.setText(str(tick.bidPrice4)) self.labelBidPrice5.setText(str(tick.bidPrice5)) self.labelAskPrice2.setText(str(tick.askPrice2)) self.labelAskPrice3.setText(str(tick.askPrice3)) self.labelAskPrice4.setText(str(tick.askPrice4)) self.labelAskPrice5.setText(str(tick.askPrice5)) self.labelBidVolume2.setText(str(tick.bidVolume2)) self.labelBidVolume3.setText(str(tick.bidVolume3)) self.labelBidVolume4.setText(str(tick.bidVolume4)) self.labelBidVolume5.setText(str(tick.bidVolume5)) self.labelAskVolume2.setText(str(tick.askVolume2)) self.labelAskVolume3.setText(str(tick.askVolume3)) self.labelAskVolume4.setText(str(tick.askVolume4)) self.labelAskVolume5.setText(str(tick.askVolume5)) self.labelLastPrice.setText(str(tick.lastPrice)) if tick.preClosePrice: rt = (tick.lastPrice/tick.preClosePrice)-1 self.labelReturn.setText(('%.2f' %(rt*100))+'%') else: self.labelReturn.setText('') #---------------------------------------------------------------------- def connectSignal(self): """连接Signal""" self.signal.connect(self.updateTick) #---------------------------------------------------------------------- def sendOrder(self): """发单""" symbol = str(self.lineSymbol.text()) exchange = unicode(self.comboExchange.currentText()) currency = unicode(self.comboCurrency.currentText()) productClass = unicode(self.comboProductClass.currentText()) gatewayName = unicode(self.comboGateway.currentText()) # 查询合约 if exchange: vtSymbol = '.'.join([symbol, exchange]) contract = self.mainEngine.getContract(vtSymbol) else: vtSymbol = symbol contract = self.mainEngine.getContract(symbol) if contract: gatewayName = contract.gatewayName exchange = contract.exchange # 保证有交易所代码 req = VtOrderReq() req.symbol = symbol req.exchange = exchange req.price = self.spinPrice.value() req.volume = self.spinVolume.value() req.direction = unicode(self.comboDirection.currentText()) req.priceType = unicode(self.comboPriceType.currentText()) req.offset = unicode(self.comboOffset.currentText()) req.currency = currency req.productClass = productClass self.mainEngine.sendOrder(req, gatewayName) #---------------------------------------------------------------------- def cancelAll(self): """一键撤销所有委托""" l = self.mainEngine.getAllWorkingOrders() for order in l: req = VtCancelOrderReq() req.symbol = order.symbol req.exchange = order.exchange req.frontID = order.frontID req.sessionID = order.sessionID req.orderID = order.orderID self.mainEngine.cancelOrder(req, order.gatewayName) #---------------------------------------------------------------------- def closePosition(self, cell): """根据持仓信息自动填写交易组件""" # 读取持仓数据,cell是一个表格中的单元格对象 pos = cell.data symbol = pos.symbol # 更新交易组件的显示合约 self.lineSymbol.setText(symbol) self.updateSymbol() # 自动填写信息 self.comboPriceType.setCurrentIndex(self.priceTypeList.index(PRICETYPE_LIMITPRICE)) self.comboOffset.setCurrentIndex(self.offsetList.index(OFFSET_CLOSE)) self.spinVolume.setValue(pos.position) if pos.direction == DIRECTION_LONG or pos.direction == DIRECTION_NET: self.comboDirection.setCurrentIndex(self.directionList.index(DIRECTION_SHORT)) else: self.comboDirection.setCurrentIndex(self.directionList.index(DIRECTION_LONG)) # 价格留待更新后由用户输入,防止有误操作 ######################################################################## class ContractMonitor(BasicMonitor): """合约查询""" #---------------------------------------------------------------------- def __init__(self, mainEngine, parent=None): """Constructor""" super(ContractMonitor, self).__init__(parent=parent) self.mainEngine = mainEngine d = OrderedDict() d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell} d['exchange'] = {'chinese':u'交易所', 'cellType':BasicCell} d['vtSymbol'] = {'chinese':u'vt系统代码', 'cellType':BasicCell} d['name'] = {'chinese':u'名称', 'cellType':BasicCell} d['productClass'] = {'chinese':u'合约类型', 'cellType':BasicCell} d['size'] = {'chinese':u'大小', 'cellType':BasicCell} d['priceTick'] = {'chinese':u'最小价格变动', 'cellType':BasicCell} #d['strikePrice'] = {'chinese':u'期权行权价', 'cellType':BasicCell} #d['underlyingSymbol'] = {'chinese':u'期权标的物', 'cellType':BasicCell} #d['optionType'] = {'chinese':u'期权类型', 'cellType':BasicCell} self.setHeaderDict(d) self.initUi() #---------------------------------------------------------------------- def initUi(self): """初始化界面""" self.setWindowTitle(u'合约查询') self.setMinimumSize(800, 800) self.setFont(BASIC_FONT) self.initTable() self.addMenuAction() #---------------------------------------------------------------------- def showAllContracts(self): """显示所有合约数据""" l = self.mainEngine.getAllContracts() d = {'.'.join([contract.exchange, contract.symbol]):contract for contract in l} l2 = d.keys() l2.sort(reverse=True) self.setRowCount(len(l2)) row = 0 for key in l2: contract = d[key] for n, header in enumerate(self.headerList): content = safeUnicode(contract.__getattribute__(header)) cellType = self.headerDict[header]['cellType'] cell = cellType(content) if self.font: cell.setFont(self.font) # 如果设置了特殊字体,则进行单元格设置 self.setItem(row, n, cell) row = row + 1 #---------------------------------------------------------------------- def refresh(self): """刷新""" self.menu.close() # 关闭菜单 self.clearContents() self.setRowCount(0) self.showAllContracts() #---------------------------------------------------------------------- def addMenuAction(self): """增加右键菜单内容""" refreshAction = QtGui.QAction(u'刷新', self) refreshAction.triggered.connect(self.refresh) self.menu.addAction(refreshAction) #---------------------------------------------------------------------- def show(self): """显示""" super(ContractMonitor, self).show() self.refresh()
mit
mherrmann/osxtrash
setup.py
1
1317
"""Send files to the Trash on OS X (incl. "Put Back" support). See: https://github.com/mherrmann/osxtrash""" from setuptools import setup, Extension impl = Extension( 'osxtrash', sources=['src/trash.m'], extra_compile_args=['-mmacosx-version-min=10.5'], extra_link_args=[ '-framework', 'AppKit', '-framework', 'ScriptingBridge' ] ) setup( name='osxtrash', version='1.6', description='Send files to the Trash on OS X (incl. "Put Back" support).', long_description= 'Send files to the Trash on OS X (incl. "Put Back" support).' + '\n\nHome page: https://github.com/mherrmann/osxtrash', url='https://github.com/mherrmann/osxtrash', author='Michael Herrmann', author_email='[my first name]@[my last name].io', license='MIT', platforms=['MacOS'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries' ], keywords='osx os x trash move recycle put back', ext_modules = [impl] )
mit
mjirayu/sit_academy
lms/djangoapps/certificates/tests/test_create_fake_cert.py
107
2003
"""Tests for the create_fake_certs management command. """ from django.test import TestCase from django.core.management.base import CommandError from nose.plugins.attrib import attr from opaque_keys.edx.locator import CourseLocator from student.tests.factories import UserFactory from certificates.management.commands import create_fake_cert from certificates.models import GeneratedCertificate @attr('shard_1') class CreateFakeCertTest(TestCase): """Tests for the create_fake_certs management command. """ USERNAME = "test" COURSE_KEY = CourseLocator(org='edX', course='DemoX', run='Demo_Course') def setUp(self): super(CreateFakeCertTest, self).setUp() self.user = UserFactory.create(username=self.USERNAME) def test_create_fake_cert(self): # No existing cert, so create it self._run_command( self.USERNAME, unicode(self.COURSE_KEY), cert_mode='verified', grade='0.89' ) cert = GeneratedCertificate.objects.get(user=self.user, course_id=self.COURSE_KEY) self.assertEqual(cert.status, 'downloadable') self.assertEqual(cert.mode, 'verified') self.assertEqual(cert.grade, '0.89') self.assertEqual(cert.download_uuid, 'test') self.assertEqual(cert.download_url, 'http://www.example.com') # Cert already exists; modify it self._run_command( self.USERNAME, unicode(self.COURSE_KEY), cert_mode='honor' ) cert = GeneratedCertificate.objects.get(user=self.user, course_id=self.COURSE_KEY) self.assertEqual(cert.mode, 'honor') def test_too_few_args(self): with self.assertRaisesRegexp(CommandError, 'Usage'): self._run_command(self.USERNAME) def _run_command(self, *args, **kwargs): """Run the management command to generate a fake cert. """ command = create_fake_cert.Command() return command.handle(*args, **kwargs)
agpl-3.0
shahankhatch/scikit-learn
benchmarks/bench_mnist.py
76
6136
""" ======================= MNIST dataset benchmark ======================= Benchmark on the MNIST dataset. The dataset comprises 70,000 samples and 784 features. Here, we consider the task of predicting 10 classes - digits from 0 to 9 from their raw images. By contrast to the covertype dataset, the feature space is homogenous. Example of output : [..] Classification performance: =========================== Classifier train-time test-time error-rat ------------------------------------------------------------ Nystroem-SVM 105.07s 0.91s 0.0227 ExtraTrees 48.20s 1.22s 0.0288 RandomForest 47.17s 1.21s 0.0304 SampledRBF-SVM 140.45s 0.84s 0.0486 CART 22.84s 0.16s 0.1214 dummy 0.01s 0.02s 0.8973 """ from __future__ import division, print_function # Author: Issam H. Laradji # Arnaud Joly <[email protected]> # License: BSD 3 clause import os from time import time import argparse import numpy as np from sklearn.datasets import fetch_mldata from sklearn.datasets import get_data_home from sklearn.ensemble import ExtraTreesClassifier from sklearn.ensemble import RandomForestClassifier from sklearn.dummy import DummyClassifier from sklearn.externals.joblib import Memory from sklearn.kernel_approximation import Nystroem from sklearn.kernel_approximation import RBFSampler from sklearn.metrics import zero_one_loss from sklearn.pipeline import make_pipeline from sklearn.svm import LinearSVC from sklearn.tree import DecisionTreeClassifier from sklearn.utils import check_array from sklearn.linear_model import LogisticRegression # Memoize the data extraction and memory map the resulting # train / test splits in readonly mode memory = Memory(os.path.join(get_data_home(), 'mnist_benchmark_data'), mmap_mode='r') @memory.cache def load_data(dtype=np.float32, order='F'): """Load the data, then cache and memmap the train/test split""" ###################################################################### ## Load dataset print("Loading dataset...") data = fetch_mldata('MNIST original') X = check_array(data['data'], dtype=dtype, order=order) y = data["target"] # Normalize features X = X / 255 ## Create train-test split (as [Joachims, 2006]) print("Creating train-test split...") n_train = 60000 X_train = X[:n_train] y_train = y[:n_train] X_test = X[n_train:] y_test = y[n_train:] return X_train, X_test, y_train, y_test ESTIMATORS = { "dummy": DummyClassifier(), 'CART': DecisionTreeClassifier(), 'ExtraTrees': ExtraTreesClassifier(n_estimators=100), 'RandomForest': RandomForestClassifier(n_estimators=100), 'Nystroem-SVM': make_pipeline(Nystroem(gamma=0.015, n_components=1000), LinearSVC(C=100)), 'SampledRBF-SVM': make_pipeline(RBFSampler(gamma=0.015, n_components=1000), LinearSVC(C=100)), 'LinearRegression-SAG': LogisticRegression(solver='sag', tol=1e-1, C=1e4) } if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--classifiers', nargs="+", choices=ESTIMATORS, type=str, default=['ExtraTrees', 'Nystroem-SVM'], help="list of classifiers to benchmark.") parser.add_argument('--n-jobs', nargs="?", default=1, type=int, help="Number of concurrently running workers for " "models that support parallelism.") parser.add_argument('--order', nargs="?", default="C", type=str, choices=["F", "C"], help="Allow to choose between fortran and C ordered " "data") parser.add_argument('--random-seed', nargs="?", default=0, type=int, help="Common seed used by random number generator.") args = vars(parser.parse_args()) print(__doc__) X_train, X_test, y_train, y_test = load_data(order=args["order"]) print("") print("Dataset statistics:") print("===================") print("%s %d" % ("number of features:".ljust(25), X_train.shape[1])) print("%s %d" % ("number of classes:".ljust(25), np.unique(y_train).size)) print("%s %s" % ("data type:".ljust(25), X_train.dtype)) print("%s %d (size=%dMB)" % ("number of train samples:".ljust(25), X_train.shape[0], int(X_train.nbytes / 1e6))) print("%s %d (size=%dMB)" % ("number of test samples:".ljust(25), X_test.shape[0], int(X_test.nbytes / 1e6))) print() print("Training Classifiers") print("====================") error, train_time, test_time = {}, {}, {} for name in sorted(args["classifiers"]): print("Training %s ... " % name, end="") estimator = ESTIMATORS[name] estimator_params = estimator.get_params() estimator.set_params(**{p: args["random_seed"] for p in estimator_params if p.endswith("random_state")}) if "n_jobs" in estimator_params: estimator.set_params(n_jobs=args["n_jobs"]) time_start = time() estimator.fit(X_train, y_train) train_time[name] = time() - time_start time_start = time() y_pred = estimator.predict(X_test) test_time[name] = time() - time_start error[name] = zero_one_loss(y_test, y_pred) print("done") print() print("Classification performance:") print("===========================") print("{0: <24} {1: >10} {2: >11} {3: >12}" "".format("Classifier ", "train-time", "test-time", "error-rate")) print("-" * 60) for name in sorted(args["classifiers"], key=error.get): print("{0: <23} {1: >10.2f}s {2: >10.2f}s {3: >12.4f}" "".format(name, train_time[name], test_time[name], error[name])) print()
bsd-3-clause
martynovp/edx-platform
lms/djangoapps/shoppingcart/processors/CyberSource.py
142
19828
""" Implementation the CyberSource credit card processor. IMPORTANT: CyberSource will deprecate this version of the API ("Hosted Order Page") in September 2014. We are keeping this implementation in the code-base for now, but we should eventually replace this module with the newer implementation (in `CyberSource2.py`) To enable this implementation, add the following to Django settings: CC_PROCESSOR_NAME = "CyberSource" CC_PROCESSOR = { "CyberSource": { "SHARED_SECRET": "<shared secret>", "MERCHANT_ID": "<merchant ID>", "SERIAL_NUMBER": "<serial number>", "PURCHASE_ENDPOINT": "<purchase endpoint>" } } """ import time import hmac import binascii import re import json from collections import OrderedDict, defaultdict from decimal import Decimal, InvalidOperation from hashlib import sha1 from textwrap import dedent from django.conf import settings from django.utils.translation import ugettext as _ from edxmako.shortcuts import render_to_string from shoppingcart.models import Order from shoppingcart.processors.exceptions import * from shoppingcart.processors.helpers import get_processor_config from microsite_configuration import microsite def process_postpay_callback(params, **kwargs): """ The top level call to this module, basically This function is handed the callback request after the customer has entered the CC info and clicked "buy" on the external Hosted Order Page. It is expected to verify the callback and determine if the payment was successful. It returns {'success':bool, 'order':Order, 'error_html':str} If successful this function must have the side effect of marking the order purchased and calling the purchased_callbacks of the cart items. If unsuccessful this function should not have those side effects but should try to figure out why and return a helpful-enough error message in error_html. """ try: verify_signatures(params) result = payment_accepted(params) if result['accepted']: # SUCCESS CASE first, rest are some sort of oddity record_purchase(params, result['order']) return {'success': True, 'order': result['order'], 'error_html': ''} else: return {'success': False, 'order': result['order'], 'error_html': get_processor_decline_html(params)} except CCProcessorException as error: return {'success': False, 'order': None, # due to exception we may not have the order 'error_html': get_processor_exception_html(error)} def processor_hash(value): """ Performs the base64(HMAC_SHA1(key, value)) used by CyberSource Hosted Order Page """ shared_secret = get_processor_config().get('SHARED_SECRET', '') hash_obj = hmac.new(shared_secret.encode('utf-8'), value.encode('utf-8'), sha1) return binascii.b2a_base64(hash_obj.digest())[:-1] # last character is a '\n', which we don't want def sign(params, signed_fields_key='orderPage_signedFields', full_sig_key='orderPage_signaturePublic'): """ params needs to be an ordered dict, b/c cybersource documentation states that order is important. Reverse engineered from PHP version provided by cybersource """ merchant_id = get_processor_config().get('MERCHANT_ID', '') order_page_version = get_processor_config().get('ORDERPAGE_VERSION', '7') serial_number = get_processor_config().get('SERIAL_NUMBER', '') params['merchantID'] = merchant_id params['orderPage_timestamp'] = int(time.time() * 1000) params['orderPage_version'] = order_page_version params['orderPage_serialNumber'] = serial_number fields = u",".join(params.keys()) values = u",".join([u"{0}={1}".format(i, params[i]) for i in params.keys()]) fields_sig = processor_hash(fields) values += u",signedFieldsPublicSignature=" + fields_sig params[full_sig_key] = processor_hash(values) params[signed_fields_key] = fields return params def verify_signatures(params, signed_fields_key='signedFields', full_sig_key='signedDataPublicSignature'): """ Verify the signatures accompanying the POST back from Cybersource Hosted Order Page returns silently if verified raises CCProcessorSignatureException if not verified """ signed_fields = params.get(signed_fields_key, '').split(',') data = u",".join([u"{0}={1}".format(k, params.get(k, '')) for k in signed_fields]) signed_fields_sig = processor_hash(params.get(signed_fields_key, '')) data += u",signedFieldsPublicSignature=" + signed_fields_sig returned_sig = params.get(full_sig_key, '') if processor_hash(data) != returned_sig: raise CCProcessorSignatureException() def render_purchase_form_html(cart, **kwargs): """ Renders the HTML of the hidden POST form that must be used to initiate a purchase with CyberSource """ return render_to_string('shoppingcart/cybersource_form.html', { 'action': get_purchase_endpoint(), 'params': get_signed_purchase_params(cart), }) def get_signed_purchase_params(cart, **kwargs): return sign(get_purchase_params(cart)) def get_purchase_params(cart): total_cost = cart.total_cost amount = "{0:0.2f}".format(total_cost) cart_items = cart.orderitem_set.all() params = OrderedDict() params['amount'] = amount params['currency'] = cart.currency params['orderPage_transactionType'] = 'sale' params['orderNumber'] = "{0:d}".format(cart.id) return params def get_purchase_endpoint(): return get_processor_config().get('PURCHASE_ENDPOINT', '') def payment_accepted(params): """ Check that cybersource has accepted the payment params: a dictionary of POST parameters returned by CyberSource in their post-payment callback returns: true if the payment was correctly accepted, for the right amount false if the payment was not accepted raises: CCProcessorDataException if the returned message did not provide required parameters CCProcessorWrongAmountException if the amount charged is different than the order amount """ #make sure required keys are present and convert their values to the right type valid_params = {} for key, key_type in [('orderNumber', int), ('orderCurrency', str), ('decision', str)]: if key not in params: raise CCProcessorDataException( _("The payment processor did not return a required parameter: {0}").format(key) ) try: valid_params[key] = key_type(params[key]) except ValueError: raise CCProcessorDataException( _("The payment processor returned a badly-typed value {0} for param {1}.").format(params[key], key) ) try: order = Order.objects.get(id=valid_params['orderNumber']) except Order.DoesNotExist: raise CCProcessorDataException(_("The payment processor accepted an order whose number is not in our system.")) if valid_params['decision'] == 'ACCEPT': try: # Moved reading of charged_amount here from the valid_params loop above because # only 'ACCEPT' messages have a 'ccAuthReply_amount' parameter charged_amt = Decimal(params['ccAuthReply_amount']) except InvalidOperation: raise CCProcessorDataException( _("The payment processor returned a badly-typed value {0} for param {1}.").format( params['ccAuthReply_amount'], 'ccAuthReply_amount' ) ) if charged_amt == order.total_cost and valid_params['orderCurrency'] == order.currency: return {'accepted': True, 'amt_charged': charged_amt, 'currency': valid_params['orderCurrency'], 'order': order} else: raise CCProcessorWrongAmountException( _("The amount charged by the processor {0} {1} is different than the total cost of the order {2} {3}.") .format( charged_amt, valid_params['orderCurrency'], order.total_cost, order.currency ) ) else: return {'accepted': False, 'amt_charged': 0, 'currency': 'usd', 'order': order} def record_purchase(params, order): """ Record the purchase and run purchased_callbacks """ ccnum_str = params.get('card_accountNumber', '') m = re.search("\d", ccnum_str) if m: ccnum = ccnum_str[m.start():] else: ccnum = "####" order.purchase( first=params.get('billTo_firstName', ''), last=params.get('billTo_lastName', ''), street1=params.get('billTo_street1', ''), street2=params.get('billTo_street2', ''), city=params.get('billTo_city', ''), state=params.get('billTo_state', ''), country=params.get('billTo_country', ''), postalcode=params.get('billTo_postalCode', ''), ccnum=ccnum, cardtype=CARDTYPE_MAP[params.get('card_cardType', 'UNKNOWN')], processor_reply_dump=json.dumps(params) ) def get_processor_decline_html(params): """Have to parse through the error codes to return a helpful message""" # see if we have an override in the microsites payment_support_email = microsite.get_value('payment_support_email', settings.PAYMENT_SUPPORT_EMAIL) msg = _( "Sorry! Our payment processor did not accept your payment. " "The decision they returned was {decision_text}, " "and the reason was {reason_text}. " "You were not charged. " "Please try a different form of payment. " "Contact us with payment-related questions at {email}." ) formatted = msg.format( decision_text='<span class="decision">{}</span>'.format(params['decision']), reason_text='<span class="reason">{code}:{msg}</span>'.format( code=params['reasonCode'], msg=REASONCODE_MAP[params['reasonCode']], ), email=payment_support_email, ) return '<p class="error_msg">{}</p>'.format(formatted) def get_processor_exception_html(exception): """Return error HTML associated with exception""" # see if we have an override in the microsites payment_support_email = microsite.get_value('payment_support_email', settings.PAYMENT_SUPPORT_EMAIL) if isinstance(exception, CCProcessorDataException): msg = _( "Sorry! Our payment processor sent us back a payment confirmation " "that had inconsistent data!" "We apologize that we cannot verify whether the charge went through " "and take further action on your order." "The specific error message is: {error_message}. " "Your credit card may possibly have been charged. " "Contact us with payment-specific questions at {email}." ) formatted = msg.format( error_message='<span class="exception_msg">{msg}</span>'.format( msg=exception.message, ), email=payment_support_email, ) return '<p class="error_msg">{}</p>'.format(formatted) elif isinstance(exception, CCProcessorWrongAmountException): msg = _( "Sorry! Due to an error your purchase was charged for " "a different amount than the order total! " "The specific error message is: {error_message}. " "Your credit card has probably been charged. " "Contact us with payment-specific questions at {email}." ) formatted = msg.format( error_message='<span class="exception_msg">{msg}</span>'.format( msg=exception.message, ), email=payment_support_email, ) return '<p class="error_msg">{}</p>'.format(formatted) elif isinstance(exception, CCProcessorSignatureException): msg = _( "Sorry! Our payment processor sent us back a corrupted message " "regarding your charge, so we are unable to validate that " "the message actually came from the payment processor. " "The specific error message is: {error_message}. " "We apologize that we cannot verify whether the charge went through " "and take further action on your order. " "Your credit card may possibly have been charged. " "Contact us with payment-specific questions at {email}." ) formatted = msg.format( error_message='<span class="exception_msg">{msg}</span>'.format( msg=exception.message, ), email=payment_support_email, ) return '<p class="error_msg">{}</p>'.format(formatted) # fallthrough case, which basically never happens return '<p class="error_msg">EXCEPTION!</p>' CARDTYPE_MAP = defaultdict(lambda: "UNKNOWN") CARDTYPE_MAP.update( { '001': 'Visa', '002': 'MasterCard', '003': 'American Express', '004': 'Discover', '005': 'Diners Club', '006': 'Carte Blanche', '007': 'JCB', '014': 'EnRoute', '021': 'JAL', '024': 'Maestro', '031': 'Delta', '033': 'Visa Electron', '034': 'Dankort', '035': 'Laser', '036': 'Carte Bleue', '037': 'Carta Si', '042': 'Maestro', '043': 'GE Money UK card' } ) REASONCODE_MAP = defaultdict(lambda: "UNKNOWN REASON") REASONCODE_MAP.update( { '100': _('Successful transaction.'), '101': _('The request is missing one or more required fields.'), '102': _('One or more fields in the request contains invalid data.'), '104': dedent(_( """ The merchantReferenceCode sent with this authorization request matches the merchantReferenceCode of another authorization request that you sent in the last 15 minutes. Possible fix: retry the payment after 15 minutes. """)), '150': _('Error: General system failure. Possible fix: retry the payment after a few minutes.'), '151': dedent(_( """ Error: The request was received but there was a server timeout. This error does not include timeouts between the client and the server. Possible fix: retry the payment after some time. """)), '152': dedent(_( """ Error: The request was received, but a service did not finish running in time Possible fix: retry the payment after some time. """)), '201': _('The issuing bank has questions about the request. Possible fix: retry with another form of payment'), '202': dedent(_( """ Expired card. You might also receive this if the expiration date you provided does not match the date the issuing bank has on file. Possible fix: retry with another form of payment """)), '203': dedent(_( """ General decline of the card. No other information provided by the issuing bank. Possible fix: retry with another form of payment """)), '204': _('Insufficient funds in the account. Possible fix: retry with another form of payment'), # 205 was Stolen or lost card. Might as well not show this message to the person using such a card. '205': _('Unknown reason'), '207': _('Issuing bank unavailable. Possible fix: retry again after a few minutes'), '208': dedent(_( """ Inactive card or card not authorized for card-not-present transactions. Possible fix: retry with another form of payment """)), '210': _('The card has reached the credit limit. Possible fix: retry with another form of payment'), '211': _('Invalid card verification number. Possible fix: retry with another form of payment'), # 221 was The customer matched an entry on the processor's negative file. # Might as well not show this message to the person using such a card. '221': _('Unknown reason'), '231': _('Invalid account number. Possible fix: retry with another form of payment'), '232': dedent(_( """ The card type is not accepted by the payment processor. Possible fix: retry with another form of payment """)), '233': _('General decline by the processor. Possible fix: retry with another form of payment'), '234': _( "There is a problem with our CyberSource merchant configuration. Please let us know at {0}" ).format(settings.PAYMENT_SUPPORT_EMAIL), # reason code 235 only applies if we are processing a capture through the API. so we should never see it '235': _('The requested amount exceeds the originally authorized amount.'), '236': _('Processor Failure. Possible fix: retry the payment'), # reason code 238 only applies if we are processing a capture through the API. so we should never see it '238': _('The authorization has already been captured'), # reason code 239 only applies if we are processing a capture or credit through the API, # so we should never see it '239': _('The requested transaction amount must match the previous transaction amount.'), '240': dedent(_( """ The card type sent is invalid or does not correlate with the credit card number. Possible fix: retry with the same card or another form of payment """)), # reason code 241 only applies when we are processing a capture or credit through the API, # so we should never see it '241': _('The request ID is invalid.'), # reason code 242 occurs if there was not a previously successful authorization request or # if the previously successful authorization has already been used by another capture request. # This reason code only applies when we are processing a capture through the API # so we should never see it '242': dedent(_( """ You requested a capture through the API, but there is no corresponding, unused authorization record. """)), # we should never see 243 '243': _('The transaction has already been settled or reversed.'), # reason code 246 applies only if we are processing a void through the API. so we should never see it '246': dedent(_( """ The capture or credit is not voidable because the capture or credit information has already been submitted to your processor. Or, you requested a void for a type of transaction that cannot be voided. """)), # reason code 247 applies only if we are processing a void through the API. so we should never see it '247': _('You requested a credit for a capture that was previously voided'), '250': dedent(_( """ Error: The request was received, but there was a timeout at the payment processor. Possible fix: retry the payment. """)), '520': dedent(_( """ The authorization request was approved by the issuing bank but declined by CyberSource.' Possible fix: retry with a different form of payment. """)), } )
agpl-3.0
hydroffice/hyo_soundspeed
hyo2/soundspeed/listener/seacat/seacat_emulator.py
1
5497
import serial import time import traceback try: from hyo2.soundspeed.listener.seacat.sbe_serialcomms import SeacatComms, UTF8Serial except Exception: print("Seacat serial communications module not found or failed to load") print("Emulator will still work but the capture function will raise an exception") class UTF8Serial(serial.Serial): def write(self, data): serial.Serial.write(self, data.encode('utf-8')) def read(self, size=1): data = serial.Serial.read(self, size) # print("raw read:", data, self.port, self.baudrate, self.stopbits, self.parity) try: data = data.decode("utf-8") # converts from bytes in python 3.x except AttributeError: pass except UnicodeDecodeError: data = "" print("bad message received", data) # print("decoded data", str(data)) return str(data) # converts to ascii for python 2.7, leaves as unicode for 3.x class UTF8Capture(UTF8Serial): def __init__(self, filename, *args, **args2): super().__init__(*args, **args2) self.outfile = open(filename, "wb+") def write(self, data): self.outfile.write(b"<write>") if isinstance(data, str): self.outfile.write(data.encode()) else: self.outfile.write(data) self.outfile.write(b"</write>\r\n") UTF8Serial.write(self, data) def read(self, size=1): data = UTF8Serial.read(self, size) self.outfile.write(b"<read>") if isinstance(data, str): self.outfile.write(data.encode()) else: self.outfile.write(data) self.outfile.write(b"</read>\r\n") def respond(max_time=5.0, sleep_time=.04, port='COM1', baud=9600, byte_size=serial.EIGHTBITS, parity=serial.PARITY_NONE, stop_bits=serial.STOPBITS_ONE, timeout=.1): """listen to a COM port and respond like a SeaCat for a certain amount of time""" com_link = UTF8Serial(port, baud, bytesize=byte_size, parity=parity, stopbits=stop_bits, timeout=timeout) print("start responding to %s for %.1f sec" % (port, max_time)) try: t = time.time() while time.time() - t < max_time: d = com_link.read(1000) if str(d): # print type(d), d try: # d=str(d.decode("utf-8")) print("str:", repr(d)) except: print("failed decode") if "DS" in d: print("writing status") com_link.write( "SeacatPlus V 1.6b SERIAL NO. 4677 05 Mar 2010 16:01:56\r\nvbatt = 13.4, vlith = 8.2, " "ioper = 60.7 ma, ipump = 45.4 ma, \r\nstatus = not logging\r\nnumber of scans to average = 1\r\n" "samples = 1374, free = 761226, casts = 2\r\nmode = profile, minimum cond freq = 3258, " "pump delay = 40 sec\r\nautorun = no, ignore magnetic switch = no\r\nbattery type = alkaline, " "battery cutoff = 7.3 volts\r\npressure sensor = strain gauge, range = 508.0\r\nSBE 38 = no, " "Gas Tension Device = no\r\nExt Volt 0 = no, Ext Volt 1 = no, Ext Volt 2 = no, Ext Volt 3 = no\r\n" "echo commands = yes\r\noutput format = raw HEX\r\nS>") elif "SB" in d or "Baud=" in d: if "SB" in d: if d[2] == "1": new_baud = 600 elif d[2] == "2": new_baud = 1200 elif d[3] == "3": new_baud = 9600 else: print("unrecognized baud rate -- ignoring ", d) continue else: new_baud = int(d[5:10]) com_link.close() com_link = UTF8Serial(port, new_baud, bytesize=byte_size, parity=parity, stopbits=stop_bits, timeout=timeout) elif "DH" in d: com_link.write("Headers!!!\r\nS>") elif "\r" in d: print("writing prompt") com_link.write("S>\r\n") else: time.sleep(sleep_time) except Exception: traceback.print_exc() finally: com_link.close() print("end responding to %s" % port) def raw_capture(filename, port='COM1', baud=9600, byte_size=serial.EIGHTBITS, parity=serial.PARITY_NONE): sbe = SeacatComms.open_seacat(port, baud, byte_size, parity) sbe.comlink.close() # replace the serial COMs with one that will push the data to a file sbe.comlink = UTF8Capture(filename, sbe.comlink.port, sbe.comlink.baudrate, bytesize=sbe.comlink.bytesize, parity=sbe.comlink.parity, stopbits=sbe.comlink.stopbits, timeout=sbe.comlink.timeout) try: sbe.wake() except AttributeError: # HSTB version sbe.Wake() sbe.GetStatus() sbe.GetDateTime() sbe.GetVoltages() sbe.GetHeaders() sbe.GetScans() sbe.GetCasts() sbe.GetCalibration() else: # hydroffice version sbe.get_status() sbe.get_datetime() sbe.get_voltages() sbe.get_headers() sbe.get_scans() sbe.get_casts() sbe.get_calibration()
lgpl-2.1
myerpengine/odoo
addons/purchase_requisition/purchase_requisition.py
10
23810
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from datetime import datetime from dateutil.relativedelta import relativedelta import time from openerp.osv import fields, osv from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp class purchase_requisition(osv.osv): _name = "purchase.requisition" _description = "Purchase Requisition" _inherit = ['mail.thread', 'ir.needaction_mixin'] def _get_po_line(self, cr, uid, ids, field_names, arg=None, context=None): result = {}.fromkeys(ids, []) for element in self.browse(cr, uid, ids, context=context): for po in element.purchase_ids: result[element.id] += [po_line.id for po_line in po.order_line] return result _columns = { 'name': fields.char('Call for Bids Reference', size=32, required=True), 'origin': fields.char('Source Document', size=32), 'ordering_date': fields.date('Scheduled Ordering Date'), 'date_end': fields.datetime('Bid Submission Deadline'), 'schedule_date': fields.date('Scheduled Date', select=True, help="The expected and scheduled date where all the products are received"), 'user_id': fields.many2one('res.users', 'Responsible'), 'exclusive': fields.selection([('exclusive', 'Select only one RFQ (exclusive)'), ('multiple', 'Select multiple RFQ')], 'Bid Selection Type', required=True, help="Select only one RFQ (exclusive): On the confirmation of a purchase order, it cancels the remaining purchase order.\nSelect multiple RFQ: It allows to have multiple purchase orders.On confirmation of a purchase order it does not cancel the remaining orders"""), 'description': fields.text('Description'), 'company_id': fields.many2one('res.company', 'Company', required=True), 'purchase_ids': fields.one2many('purchase.order', 'requisition_id', 'Purchase Orders', states={'done': [('readonly', True)]}), 'po_line_ids': fields.function(_get_po_line, method=True, type='one2many', relation='purchase.order.line', string='Products by supplier'), 'line_ids': fields.one2many('purchase.requisition.line', 'requisition_id', 'Products to Purchase', states={'done': [('readonly', True)]}), 'procurement_id': fields.many2one('procurement.order', 'Procurement', ondelete='set null'), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse'), 'state': fields.selection([('draft', 'Draft'), ('in_progress', 'Confirmed'), ('open', 'Bid Selection'), ('done', 'PO Created'), ('cancel', 'Cancelled')], 'Status', track_visibility='onchange', required=True), 'multiple_rfq_per_supplier': fields.boolean('Multiple RFQ per supplier'), 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'), 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type', required=True), } def _get_picking_in(self, cr, uid, context=None): obj_data = self.pool.get('ir.model.data') return obj_data.get_object_reference(cr, uid, 'stock','picking_type_in')[1] _defaults = { 'state': 'draft', 'exclusive': 'multiple', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'purchase.requisition', context=c), 'user_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, c).id, 'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'purchase.order.requisition'), 'picking_type_id': _get_picking_in, } def copy(self, cr, uid, id, default=None, context=None): default = default or {} default.update({ 'state': 'draft', 'purchase_ids': [], 'name': self.pool.get('ir.sequence').get(cr, uid, 'purchase.order.requisition'), }) return super(purchase_requisition, self).copy(cr, uid, id, default, context) def tender_cancel(self, cr, uid, ids, context=None): purchase_order_obj = self.pool.get('purchase.order') #try to set all associated quotations to cancel state purchase_ids = [] for tender in self.browse(cr, uid, ids, context=context): for purchase_order in tender.purchase_ids: purchase_order_obj.action_cancel(cr, uid, [purchase_order.id], context=context) purchase_order_obj.message_post(cr, uid, [purchase_order.id], body=_('Cancelled by the tender associated to this quotation.'), context=context) return self.write(cr, uid, ids, {'state': 'cancel'}) def tender_in_progress(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'in_progress'}, context=context) def tender_open(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'open'}, context=context) def tender_reset(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'draft'}) for p_id in ids: # Deleting the existing instance of workflow for PO self.delete_workflow(cr, uid, [p_id]) self.create_workflow(cr, uid, [p_id]) return True def tender_done(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'done'}, context=context) def open_product_line(self, cr, uid, ids, context=None): """ This opens product line view to view all lines from the different quotations, groupby default by product and partner to show comparaison between supplier price @return: the product line tree view """ if context is None: context = {} res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'purchase_requisition', 'purchase_line_tree', context=context) res['context'] = context po_lines = self.browse(cr, uid, ids, context=context)[0].po_line_ids res['context'] = { 'search_default_groupby_product': True, 'search_default_hide_cancelled': True, } res['domain'] = [('id', 'in', [line.id for line in po_lines])] return res def open_rfq(self, cr, uid, ids, context=None): """ This opens rfq view to view all quotations associated to the call for bids @return: the RFQ tree view """ if context is None: context = {} res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'purchase', 'purchase_rfq', context=context) res['context'] = context po_ids = [po.id for po in self.browse(cr, uid, ids, context=context)[0].purchase_ids] res['domain'] = [('id', 'in', po_ids)] return res def _prepare_purchase_order(self, cr, uid, requisition, supplier, context=None): supplier_pricelist = supplier.property_product_pricelist_purchase and supplier.property_product_pricelist_purchase.id or False picking_type_in = self.pool.get("purchase.order")._get_picking_in(cr, uid, context=context) return { 'origin': requisition.name, 'date_order': requisition.date_end or fields.date.context_today(self, cr, uid, context=context), 'partner_id': supplier.id, 'pricelist_id': supplier_pricelist, 'location_id': requisition.picking_type_id.default_location_dest_id.id, 'company_id': requisition.company_id.id, 'fiscal_position': supplier.property_account_position and supplier.property_account_position.id or False, 'requisition_id': requisition.id, 'notes': requisition.description, 'picking_type_id': picking_type_in, } def _prepare_purchase_order_line(self, cr, uid, requisition, requisition_line, purchase_id, supplier, context=None): po_line_obj = self.pool.get('purchase.order.line') product_uom = self.pool.get('product.uom') product = requisition_line.product_id default_uom_po_id = product.uom_po_id.id date_order = requisition.ordering_date or fields.date.context_today(self, cr, uid, context=context) qty = product_uom._compute_qty(cr, uid, requisition_line.product_uom_id.id, requisition_line.product_qty, default_uom_po_id) supplier_pricelist = supplier.property_product_pricelist_purchase and supplier.property_product_pricelist_purchase.id or False vals = po_line_obj.onchange_product_id(cr, uid, [], supplier_pricelist, product.id, qty, default_uom_po_id, supplier.id, date_order=date_order, fiscal_position_id=supplier.property_account_position, date_planned=requisition_line.schedule_date, name=False, price_unit=False, state='draft', context=context)['value'] vals.update({ 'order_id': purchase_id, 'product_id': product.id, 'account_analytic_id': requisition_line.account_analytic_id.id, }) return vals def make_purchase_order(self, cr, uid, ids, partner_id, context=None): """ Create New RFQ for Supplier """ if context is None: context = {} assert partner_id, 'Supplier should be specified' purchase_order = self.pool.get('purchase.order') purchase_order_line = self.pool.get('purchase.order.line') res_partner = self.pool.get('res.partner') supplier = res_partner.browse(cr, uid, partner_id, context=context) res = {} for requisition in self.browse(cr, uid, ids, context=context): if not requisition.multiple_rfq_per_supplier and supplier.id in filter(lambda x: x, [rfq.state != 'cancel' and rfq.partner_id.id or None for rfq in requisition.purchase_ids]): raise osv.except_osv(_('Warning!'), _('You have already one %s purchase order for this partner, you must cancel this purchase order to create a new quotation.') % rfq.state) context.update({'mail_create_nolog': True}) purchase_id = purchase_order.create(cr, uid, self._prepare_purchase_order(cr, uid, requisition, supplier, context=context), context=context) purchase_order.message_post(cr, uid, [purchase_id], body=_("RFQ created"), context=context) res[requisition.id] = purchase_id for line in requisition.line_ids: purchase_order_line.create(cr, uid, self._prepare_purchase_order_line(cr, uid, requisition, line, purchase_id, supplier, context=context), context=context) return res def check_valid_quotation(self, cr, uid, quotation, context=None): """ Check if a quotation has all his order lines bid in order to confirm it if its the case return True if all order line have been selected during bidding process, else return False args : 'quotation' must be a browse record """ for line in quotation.order_line: if line.state != 'confirmed' or line.product_qty != line.quantity_bid: return False return True def _prepare_po_from_tender(self, cr, uid, tender, context=None): """ Prepare the values to write in the purchase order created from a tender. :param tender: the source tender from which we generate a purchase order """ return {'order_line': [], 'requisition_id': tender.id, 'origin': tender.name} def _prepare_po_line_from_tender(self, cr, uid, tender, line, purchase_id, context=None): """ Prepare the values to write in the purchase order line created from a line of the tender. :param tender: the source tender from which we generate a purchase order :param line: the source tender's line from which we generate a line :param purchase_id: the id of the new purchase """ return {'product_qty': line.quantity_bid, 'order_id': purchase_id} def generate_po(self, cr, uid, ids, context=None): """ Generate all purchase order based on selected lines, should only be called on one tender at a time """ if context is None: contex = {} po = self.pool.get('purchase.order') poline = self.pool.get('purchase.order.line') id_per_supplier = {} for tender in self.browse(cr, uid, ids, context=context): if tender.state == 'done': raise osv.except_osv(_('Warning!'), _('You have already generate the purchase order(s).')) confirm = False #check that we have at least confirm one line for po_line in tender.po_line_ids: if po_line.state == 'confirmed': confirm = True break if not confirm: raise osv.except_osv(_('Warning!'), _('You have no line selected for buying.')) #check for complete RFQ for quotation in tender.purchase_ids: if (self.check_valid_quotation(cr, uid, quotation, context=context)): #use workflow to set PO state to confirm po.signal_purchase_confirm(cr, uid, [quotation.id]) #get other confirmed lines per supplier for po_line in tender.po_line_ids: #only take into account confirmed line that does not belong to already confirmed purchase order if po_line.state == 'confirmed' and po_line.order_id.state in ['draft', 'sent', 'bid']: if id_per_supplier.get(po_line.partner_id.id): id_per_supplier[po_line.partner_id.id].append(po_line) else: id_per_supplier[po_line.partner_id.id] = [po_line] #generate po based on supplier and cancel all previous RFQ ctx = context.copy() ctx['force_requisition_id'] = True for supplier, product_line in id_per_supplier.items(): #copy a quotation for this supplier and change order_line then validate it quotation_id = po.search(cr, uid, [('requisition_id', '=', tender.id), ('partner_id', '=', supplier)], limit=1)[0] vals = self._prepare_po_from_tender(cr, uid, tender, context=context) new_po = po.copy(cr, uid, quotation_id, default=vals, context=ctx) #duplicate po_line and change product_qty if needed and associate them to newly created PO for line in product_line: vals = self._prepare_po_line_from_tender(cr, uid, tender, line, new_po, context=context) poline.copy(cr, uid, line.id, default=vals, context=context) #use workflow to set new PO state to confirm po.signal_purchase_confirm(cr, uid, [new_po]) #cancel other orders self.cancel_unconfirmed_quotations(cr, uid, tender, context=context) #set tender to state done self.signal_done(cr, uid, [tender.id]) return True def cancel_unconfirmed_quotations(self, cr, uid, tender, context=None): #cancel other orders po = self.pool.get('purchase.order') for quotation in tender.purchase_ids: if quotation.state in ['draft', 'sent', 'bid']: self.pool.get('purchase.order').signal_purchase_cancel(cr, uid, [quotation.id]) po.message_post(cr, uid, [quotation.id], body=_('Cancelled by the call for bids associated to this request for quotation.'), context=context) return True class purchase_requisition_line(osv.osv): _name = "purchase.requisition.line" _description = "Purchase Requisition Line" _rec_name = 'product_id' _columns = { 'product_id': fields.many2one('product.product', 'Product'), 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure'), 'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')), 'requisition_id': fields.many2one('purchase.requisition', 'Call for Bids', ondelete='cascade'), 'company_id': fields.related('requisition_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account',), 'schedule_date': fields.date('Scheduled Date'), } def onchange_product_id(self, cr, uid, ids, product_id, product_uom_id, parent_analytic_account, analytic_account, parent_date, date, context=None): """ Changes UoM and name if product_id changes. @param name: Name of the field @param product_id: Changed product_id @return: Dictionary of changed values """ value = {'product_uom_id': ''} if product_id: prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context) value = {'product_uom_id': prod.uom_id.id, 'product_qty': 1.0} if not analytic_account: value.update({'account_analytic_id': parent_analytic_account}) if not date: value.update({'schedule_date': parent_date}) return {'value': value} _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'purchase.requisition.line', context=c), } class purchase_order(osv.osv): _inherit = "purchase.order" _columns = { 'requisition_id': fields.many2one('purchase.requisition', 'Call for Bids'), } def wkf_confirm_order(self, cr, uid, ids, context=None): res = super(purchase_order, self).wkf_confirm_order(cr, uid, ids, context=context) proc_obj = self.pool.get('procurement.order') for po in self.browse(cr, uid, ids, context=context): if po.requisition_id and (po.requisition_id.exclusive == 'exclusive'): for order in po.requisition_id.purchase_ids: if order.id != po.id: proc_ids = proc_obj.search(cr, uid, [('purchase_id', '=', order.id)]) if proc_ids and po.state == 'confirmed': proc_obj.write(cr, uid, proc_ids, {'purchase_id': po.id}) self.signal_purchase_cancel(cr, uid, [order.id]) po.requisition_id.tender_done(context=context) return res def copy(self, cr, uid, id, default=None, context=None): if context is None: context = {} if not context.get('force_requisition_id'): default = default or {} default.update({'requisition_id': False}) return super(purchase_order, self).copy(cr, uid, id, default=default, context=context) def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id, group_id, context=None): stock_move_lines = super(purchase_order, self)._prepare_order_line_move(cr, uid, order, order_line, picking_id, group_id, context=context) if order.requisition_id and order.requisition_id.procurement_id and order.requisition_id.procurement_id.move_dest_id: for i in range(0, len(stock_move_lines)): stock_move_lines[i]['move_dest_id'] = order.requisition_id.procurement_id.move_dest_id.id return stock_move_lines class purchase_order_line(osv.osv): _inherit = 'purchase.order.line' _columns = { 'quantity_bid': fields.float('Quantity Bid', digits_compute=dp.get_precision('Product Unit of Measure'), help="Technical field for not loosing the initial information about the quantity proposed in the bid"), } def action_draft(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'draft'}, context=context) def action_confirm(self, cr, uid, ids, context=None): super(purchase_order_line, self).action_confirm(cr, uid, ids, context=context) for element in self.browse(cr, uid, ids, context=context): if not element.quantity_bid: self.write(cr, uid, ids, {'quantity_bid': element.product_qty}, context=context) return True def generate_po(self, cr, uid, tender_id, context=None): #call generate_po from tender with active_id. Called from js widget return self.pool.get('purchase.requisition').generate_po(cr, uid, [tender_id], context=context) class product_product(osv.osv): _inherit = 'product.product' _columns = { 'purchase_requisition': fields.boolean('Call for Bids', help="Check this box to generate Call for Bids instead of generating requests for quotation from procurement.") } class procurement_order(osv.osv): _inherit = 'procurement.order' _columns = { 'requisition_id': fields.many2one('purchase.requisition', 'Latest Requisition') } def _run(self, cr, uid, procurement, context=None): requisition_obj = self.pool.get('purchase.requisition') warehouse_obj = self.pool.get('stock.warehouse') if procurement.rule_id and procurement.rule_id.action == 'buy' and procurement.product_id.purchase_requisition: warehouse_id = warehouse_obj.search(cr, uid, [('company_id', '=', procurement.company_id.id)], context=context) requisition_id = requisition_obj.create(cr, uid, { 'origin': procurement.origin, 'date_end': procurement.date_planned, 'warehouse_id': warehouse_id and warehouse_id[0] or False, 'company_id': procurement.company_id.id, 'procurement_id': procurement.id, 'line_ids': [(0, 0, { 'product_id': procurement.product_id.id, 'product_uom_id': procurement.product_uom.id, 'product_qty': procurement.product_qty })], }) self.message_post(cr, uid, [procurement.id], body=_("Purchase Requisition created"), context=context) return self.write(cr, uid, [procurement.id], {'requisition_id': requisition_id}, context=context) return super(procurement_order, self)._run(cr, uid, procurement, context=context) def _check(self, cr, uid, procurement, context=None): requisition_obj = self.pool.get('purchase.requisition') if procurement.rule_id and procurement.rule_id.action == 'buy' and procurement.product_id.purchase_requisition: if procurement.requisition_id.state == 'done': if any([purchase.shipped for purchase in procurement.requisition_id.purchase_ids]): return True return False return super(procurement_order, self)._check(cr, uid, procurement, context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
kevinmel2000/brython
www/src/Lib/unittest/test/test_assertions.py
738
15398
import datetime import warnings import unittest from itertools import product class Test_Assertions(unittest.TestCase): def test_AlmostEqual(self): self.assertAlmostEqual(1.00000001, 1.0) self.assertNotAlmostEqual(1.0000001, 1.0) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.0000001, 1.0) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 1.00000001, 1.0) self.assertAlmostEqual(1.1, 1.0, places=0) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, places=1) self.assertAlmostEqual(0, .1+.1j, places=0) self.assertNotAlmostEqual(0, .1+.1j, places=1) self.assertRaises(self.failureException, self.assertAlmostEqual, 0, .1+.1j, places=1) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 0, .1+.1j, places=0) self.assertAlmostEqual(float('inf'), float('inf')) self.assertRaises(self.failureException, self.assertNotAlmostEqual, float('inf'), float('inf')) def test_AmostEqualWithDelta(self): self.assertAlmostEqual(1.1, 1.0, delta=0.5) self.assertAlmostEqual(1.0, 1.1, delta=0.5) self.assertNotAlmostEqual(1.1, 1.0, delta=0.05) self.assertNotAlmostEqual(1.0, 1.1, delta=0.05) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, delta=0.05) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 1.1, 1.0, delta=0.5) self.assertRaises(TypeError, self.assertAlmostEqual, 1.1, 1.0, places=2, delta=2) self.assertRaises(TypeError, self.assertNotAlmostEqual, 1.1, 1.0, places=2, delta=2) first = datetime.datetime.now() second = first + datetime.timedelta(seconds=10) self.assertAlmostEqual(first, second, delta=datetime.timedelta(seconds=20)) self.assertNotAlmostEqual(first, second, delta=datetime.timedelta(seconds=5)) def test_assertRaises(self): def _raise(e): raise e self.assertRaises(KeyError, _raise, KeyError) self.assertRaises(KeyError, _raise, KeyError("key")) try: self.assertRaises(KeyError, lambda: None) except self.failureException as e: self.assertIn("KeyError not raised", str(e)) else: self.fail("assertRaises() didn't fail") try: self.assertRaises(KeyError, _raise, ValueError) except ValueError: pass else: self.fail("assertRaises() didn't let exception pass through") with self.assertRaises(KeyError) as cm: try: raise KeyError except Exception as e: exc = e raise self.assertIs(cm.exception, exc) with self.assertRaises(KeyError): raise KeyError("key") try: with self.assertRaises(KeyError): pass except self.failureException as e: self.assertIn("KeyError not raised", str(e)) else: self.fail("assertRaises() didn't fail") try: with self.assertRaises(KeyError): raise ValueError except ValueError: pass else: self.fail("assertRaises() didn't let exception pass through") def testAssertNotRegex(self): self.assertNotRegex('Ala ma kota', r'r+') try: self.assertNotRegex('Ala ma kota', r'k.t', 'Message') except self.failureException as e: self.assertIn("'kot'", e.args[0]) self.assertIn('Message', e.args[0]) else: self.fail('assertNotRegex should have failed.') class TestLongMessage(unittest.TestCase): """Test that the individual asserts honour longMessage. This actually tests all the message behaviour for asserts that use longMessage.""" def setUp(self): class TestableTestFalse(unittest.TestCase): longMessage = False failureException = self.failureException def testTest(self): pass class TestableTestTrue(unittest.TestCase): longMessage = True failureException = self.failureException def testTest(self): pass self.testableTrue = TestableTestTrue('testTest') self.testableFalse = TestableTestFalse('testTest') def testDefault(self): self.assertTrue(unittest.TestCase.longMessage) def test_formatMsg(self): self.assertEqual(self.testableFalse._formatMessage(None, "foo"), "foo") self.assertEqual(self.testableFalse._formatMessage("foo", "bar"), "foo") self.assertEqual(self.testableTrue._formatMessage(None, "foo"), "foo") self.assertEqual(self.testableTrue._formatMessage("foo", "bar"), "bar : foo") # This blows up if _formatMessage uses string concatenation self.testableTrue._formatMessage(object(), 'foo') def test_formatMessage_unicode_error(self): one = ''.join(chr(i) for i in range(255)) # this used to cause a UnicodeDecodeError constructing msg self.testableTrue._formatMessage(one, '\uFFFD') def assertMessages(self, methodName, args, errors): """ Check that methodName(*args) raises the correct error messages. errors should be a list of 4 regex that match the error when: 1) longMessage = False and no msg passed; 2) longMessage = False and msg passed; 3) longMessage = True and no msg passed; 4) longMessage = True and msg passed; """ def getMethod(i): useTestableFalse = i < 2 if useTestableFalse: test = self.testableFalse else: test = self.testableTrue return getattr(test, methodName) for i, expected_regex in enumerate(errors): testMethod = getMethod(i) kwargs = {} withMsg = i % 2 if withMsg: kwargs = {"msg": "oops"} with self.assertRaisesRegex(self.failureException, expected_regex=expected_regex): testMethod(*args, **kwargs) def testAssertTrue(self): self.assertMessages('assertTrue', (False,), ["^False is not true$", "^oops$", "^False is not true$", "^False is not true : oops$"]) def testAssertFalse(self): self.assertMessages('assertFalse', (True,), ["^True is not false$", "^oops$", "^True is not false$", "^True is not false : oops$"]) def testNotEqual(self): self.assertMessages('assertNotEqual', (1, 1), ["^1 == 1$", "^oops$", "^1 == 1$", "^1 == 1 : oops$"]) def testAlmostEqual(self): self.assertMessages('assertAlmostEqual', (1, 2), ["^1 != 2 within 7 places$", "^oops$", "^1 != 2 within 7 places$", "^1 != 2 within 7 places : oops$"]) def testNotAlmostEqual(self): self.assertMessages('assertNotAlmostEqual', (1, 1), ["^1 == 1 within 7 places$", "^oops$", "^1 == 1 within 7 places$", "^1 == 1 within 7 places : oops$"]) def test_baseAssertEqual(self): self.assertMessages('_baseAssertEqual', (1, 2), ["^1 != 2$", "^oops$", "^1 != 2$", "^1 != 2 : oops$"]) def testAssertSequenceEqual(self): # Error messages are multiline so not testing on full message # assertTupleEqual and assertListEqual delegate to this method self.assertMessages('assertSequenceEqual', ([], [None]), ["\+ \[None\]$", "^oops$", r"\+ \[None\]$", r"\+ \[None\] : oops$"]) def testAssertSetEqual(self): self.assertMessages('assertSetEqual', (set(), set([None])), ["None$", "^oops$", "None$", "None : oops$"]) def testAssertIn(self): self.assertMessages('assertIn', (None, []), ['^None not found in \[\]$', "^oops$", '^None not found in \[\]$', '^None not found in \[\] : oops$']) def testAssertNotIn(self): self.assertMessages('assertNotIn', (None, [None]), ['^None unexpectedly found in \[None\]$', "^oops$", '^None unexpectedly found in \[None\]$', '^None unexpectedly found in \[None\] : oops$']) def testAssertDictEqual(self): self.assertMessages('assertDictEqual', ({}, {'key': 'value'}), [r"\+ \{'key': 'value'\}$", "^oops$", "\+ \{'key': 'value'\}$", "\+ \{'key': 'value'\} : oops$"]) def testAssertDictContainsSubset(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) self.assertMessages('assertDictContainsSubset', ({'key': 'value'}, {}), ["^Missing: 'key'$", "^oops$", "^Missing: 'key'$", "^Missing: 'key' : oops$"]) def testAssertMultiLineEqual(self): self.assertMessages('assertMultiLineEqual', ("", "foo"), [r"\+ foo$", "^oops$", r"\+ foo$", r"\+ foo : oops$"]) def testAssertLess(self): self.assertMessages('assertLess', (2, 1), ["^2 not less than 1$", "^oops$", "^2 not less than 1$", "^2 not less than 1 : oops$"]) def testAssertLessEqual(self): self.assertMessages('assertLessEqual', (2, 1), ["^2 not less than or equal to 1$", "^oops$", "^2 not less than or equal to 1$", "^2 not less than or equal to 1 : oops$"]) def testAssertGreater(self): self.assertMessages('assertGreater', (1, 2), ["^1 not greater than 2$", "^oops$", "^1 not greater than 2$", "^1 not greater than 2 : oops$"]) def testAssertGreaterEqual(self): self.assertMessages('assertGreaterEqual', (1, 2), ["^1 not greater than or equal to 2$", "^oops$", "^1 not greater than or equal to 2$", "^1 not greater than or equal to 2 : oops$"]) def testAssertIsNone(self): self.assertMessages('assertIsNone', ('not None',), ["^'not None' is not None$", "^oops$", "^'not None' is not None$", "^'not None' is not None : oops$"]) def testAssertIsNotNone(self): self.assertMessages('assertIsNotNone', (None,), ["^unexpectedly None$", "^oops$", "^unexpectedly None$", "^unexpectedly None : oops$"]) def testAssertIs(self): self.assertMessages('assertIs', (None, 'foo'), ["^None is not 'foo'$", "^oops$", "^None is not 'foo'$", "^None is not 'foo' : oops$"]) def testAssertIsNot(self): self.assertMessages('assertIsNot', (None, None), ["^unexpectedly identical: None$", "^oops$", "^unexpectedly identical: None$", "^unexpectedly identical: None : oops$"]) def assertMessagesCM(self, methodName, args, func, errors): """ Check that the correct error messages are raised while executing: with method(*args): func() *errors* should be a list of 4 regex that match the error when: 1) longMessage = False and no msg passed; 2) longMessage = False and msg passed; 3) longMessage = True and no msg passed; 4) longMessage = True and msg passed; """ p = product((self.testableFalse, self.testableTrue), ({}, {"msg": "oops"})) for (cls, kwargs), err in zip(p, errors): method = getattr(cls, methodName) with self.assertRaisesRegex(cls.failureException, err): with method(*args, **kwargs) as cm: func() def testAssertRaises(self): self.assertMessagesCM('assertRaises', (TypeError,), lambda: None, ['^TypeError not raised$', '^oops$', '^TypeError not raised$', '^TypeError not raised : oops$']) def testAssertRaisesRegex(self): # test error not raised self.assertMessagesCM('assertRaisesRegex', (TypeError, 'unused regex'), lambda: None, ['^TypeError not raised$', '^oops$', '^TypeError not raised$', '^TypeError not raised : oops$']) # test error raised but with wrong message def raise_wrong_message(): raise TypeError('foo') self.assertMessagesCM('assertRaisesRegex', (TypeError, 'regex'), raise_wrong_message, ['^"regex" does not match "foo"$', '^oops$', '^"regex" does not match "foo"$', '^"regex" does not match "foo" : oops$']) def testAssertWarns(self): self.assertMessagesCM('assertWarns', (UserWarning,), lambda: None, ['^UserWarning not triggered$', '^oops$', '^UserWarning not triggered$', '^UserWarning not triggered : oops$']) def testAssertWarnsRegex(self): # test error not raised self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'unused regex'), lambda: None, ['^UserWarning not triggered$', '^oops$', '^UserWarning not triggered$', '^UserWarning not triggered : oops$']) # test warning raised but with wrong message def raise_wrong_message(): warnings.warn('foo') self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'regex'), raise_wrong_message, ['^"regex" does not match "foo"$', '^oops$', '^"regex" does not match "foo"$', '^"regex" does not match "foo" : oops$'])
bsd-3-clause
gptech/ansible
lib/ansible/module_utils/database.py
401
5839
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c) 2014, Toshio Kuratomi <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. class SQLParseError(Exception): pass class UnclosedQuoteError(SQLParseError): pass # maps a type of identifier to the maximum number of dot levels that are # allowed to specify that identifier. For example, a database column can be # specified by up to 4 levels: database.schema.table.column _PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1) _MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1) def _find_end_quote(identifier, quote_char): accumulate = 0 while True: try: quote = identifier.index(quote_char) except ValueError: raise UnclosedQuoteError accumulate = accumulate + quote try: next_char = identifier[quote+1] except IndexError: return accumulate if next_char == quote_char: try: identifier = identifier[quote+2:] accumulate = accumulate + 2 except IndexError: raise UnclosedQuoteError else: return accumulate def _identifier_parse(identifier, quote_char): if not identifier: raise SQLParseError('Identifier name unspecified or unquoted trailing dot') already_quoted = False if identifier.startswith(quote_char): already_quoted = True try: end_quote = _find_end_quote(identifier[1:], quote_char=quote_char) + 1 except UnclosedQuoteError: already_quoted = False else: if end_quote < len(identifier) - 1: if identifier[end_quote+1] == '.': dot = end_quote + 1 first_identifier = identifier[:dot] next_identifier = identifier[dot+1:] further_identifiers = _identifier_parse(next_identifier, quote_char) further_identifiers.insert(0, first_identifier) else: raise SQLParseError('User escaped identifiers must escape extra quotes') else: further_identifiers = [identifier] if not already_quoted: try: dot = identifier.index('.') except ValueError: identifier = identifier.replace(quote_char, quote_char*2) identifier = ''.join((quote_char, identifier, quote_char)) further_identifiers = [identifier] else: if dot == 0 or dot >= len(identifier) - 1: identifier = identifier.replace(quote_char, quote_char*2) identifier = ''.join((quote_char, identifier, quote_char)) further_identifiers = [identifier] else: first_identifier = identifier[:dot] next_identifier = identifier[dot+1:] further_identifiers = _identifier_parse(next_identifier, quote_char) first_identifier = first_identifier.replace(quote_char, quote_char*2) first_identifier = ''.join((quote_char, first_identifier, quote_char)) further_identifiers.insert(0, first_identifier) return further_identifiers def pg_quote_identifier(identifier, id_type): identifier_fragments = _identifier_parse(identifier, quote_char='"') if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]: raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type])) return '.'.join(identifier_fragments) def mysql_quote_identifier(identifier, id_type): identifier_fragments = _identifier_parse(identifier, quote_char='`') if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]: raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type])) special_cased_fragments = [] for fragment in identifier_fragments: if fragment == '`*`': special_cased_fragments.append('*') else: special_cased_fragments.append(fragment) return '.'.join(special_cased_fragments)
gpl-3.0
julienmalard/Tikon
tikon/datos/valid.py
1
2654
from tikon.datos.proc import ens, n_existen, prom_vals, suma_pesos, Procesador def gen_proc_valid(proc): if isinstance(proc, ProcesadorValid): return proc return ProcesadorValid(f_vals=proc) class PlantillaValid(object): def __init__(símismo, criterios, peso): símismo.criterios = criterios símismo.peso = peso def __getitem__(símismo, itema): return símismo.criterios[itema] def a_dic(símismo): return {'crits': símismo.criterios, 'peso': símismo.peso} def __repr__(símismo): return repr(símismo.a_dic()) class ValidÍnds(PlantillaValid): def __init__(símismo, criterios, peso, índs): símismo.índs = índs super().__init__(criterios, peso) def a_dic(símismo): return {'índs': {ll: str(v) for ll, v in símismo.índs.items()}, **super().a_dic()} class ValidRes(PlantillaValid): def __init__(símismo, valids, proc): símismo.valids = valids criterios = { cr: proc.combin(vals=[v[cr] for v in valids], pesos=[v.peso for v in valids]).item() if valids else None for cr in proc.criterios } peso = proc.combin_pesos([v.peso for v in valids]).item() if valids else 0 super().__init__(criterios, peso) def a_dic(símismo): return {'valids': [v.a_dic() for v in símismo.valids], **super().a_dic()} class Valid(PlantillaValid): def __init__(símismo, ramas, proc): criterios = { cr: proc.combin( vals=[v[cr] for v in ramas.values() if v.peso], pesos=[v.peso for v in ramas.values() if v.peso] ).item() for cr in proc.criterios } peso = proc.combin_pesos([v.peso for v in ramas.values()]).item() símismo.ramas = ramas super().__init__(criterios, peso) def a_dic(símismo): return { **{ str(ll): v.a_dic() for ll, v in símismo.ramas.items() if any(crt for crt in v.a_dic()['crits'].values()) }, **super().a_dic() } def __getitem__(símismo, itema): return símismo.criterios[itema] class ProcesadorValid(Procesador): def __init__(símismo, f_vals=ens, f_pesos=n_existen, f_combin=prom_vals, f_combin_pesos=suma_pesos): if callable(f_vals): f_vals = [f_vals] if isinstance(f_vals, list): f_vals = {f.__name__: f for f in f_vals} símismo.criterios = list(f_vals) super().__init__( lambda o, s: {ll: v(o, s).item() for ll, v in f_vals.items()}, f_pesos, f_combin, f_combin_pesos )
agpl-3.0
mmezzavilla/ns3-mmwave
src/core/bindings/modulegen__gcc_ILP32.py
15
321233
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers import pybindgen.settings import warnings class ErrorHandler(pybindgen.settings.ErrorHandler): def handle_error(self, wrapper, exception, traceback_): warnings.warn("exception %r in wrapper %s" % (exception, wrapper)) return True pybindgen.settings.error_handler = ErrorHandler() import sys def module_init(): root_module = Module('ns.core', cpp_namespace='::ns3') return root_module def register_types(module): root_module = module.get_root() ## log.h (module 'core'): ns3::LogLevel [enumeration] module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL']) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class] module.add_class('AttributeConstructionList') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct] module.add_class('Item', outer_class=root_module['ns3::AttributeConstructionList']) ## callback.h (module 'core'): ns3::CallbackBase [class] module.add_class('CallbackBase') ## command-line.h (module 'core'): ns3::CommandLine [class] module.add_class('CommandLine', allow_subclassing=True) ## system-mutex.h (module 'core'): ns3::CriticalSection [class] module.add_class('CriticalSection') ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector [class] module.add_class('EventGarbageCollector') ## event-id.h (module 'core'): ns3::EventId [class] module.add_class('EventId') ## global-value.h (module 'core'): ns3::GlobalValue [class] module.add_class('GlobalValue') ## hash.h (module 'core'): ns3::Hasher [class] module.add_class('Hasher') ## int-to-type.h (module 'core'): ns3::IntToType<0> [struct] module.add_class('IntToType', template_parameters=['0']) ## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >']) ## int-to-type.h (module 'core'): ns3::IntToType<1> [struct] module.add_class('IntToType', template_parameters=['1']) ## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >']) ## int-to-type.h (module 'core'): ns3::IntToType<2> [struct] module.add_class('IntToType', template_parameters=['2']) ## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >']) ## int-to-type.h (module 'core'): ns3::IntToType<3> [struct] module.add_class('IntToType', template_parameters=['3']) ## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >']) ## int-to-type.h (module 'core'): ns3::IntToType<4> [struct] module.add_class('IntToType', template_parameters=['4']) ## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >']) ## int-to-type.h (module 'core'): ns3::IntToType<5> [struct] module.add_class('IntToType', template_parameters=['5']) ## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >']) ## int-to-type.h (module 'core'): ns3::IntToType<6> [struct] module.add_class('IntToType', template_parameters=['6']) ## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >']) ## log.h (module 'core'): ns3::LogComponent [class] module.add_class('LogComponent') ## names.h (module 'core'): ns3::Names [class] module.add_class('Names') ## non-copyable.h (module 'core'): ns3::NonCopyable [class] module.add_class('NonCopyable', destructor_visibility='protected') ## object-base.h (module 'core'): ns3::ObjectBase [class] module.add_class('ObjectBase', allow_subclassing=True) ## object.h (module 'core'): ns3::ObjectDeleter [struct] module.add_class('ObjectDeleter') ## object-factory.h (module 'core'): ns3::ObjectFactory [class] module.add_class('ObjectFactory') ## log.h (module 'core'): ns3::ParameterLogger [class] module.add_class('ParameterLogger') ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper [class] module.add_class('RandomVariableStreamHelper') ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager [class] module.add_class('RngSeedManager') ## rng-stream.h (module 'core'): ns3::RngStream [class] module.add_class('RngStream') ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simulator.h (module 'core'): ns3::Simulator [class] module.add_class('Simulator', destructor_visibility='private') ## simulator.h (module 'core'): ns3::Simulator [enumeration] module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator']) ## singleton.h (module 'core'): ns3::Singleton<ns3::DesMetrics> [class] module.add_class('Singleton', template_parameters=['ns3::DesMetrics'], parent=root_module['ns3::NonCopyable']) ## system-condition.h (module 'core'): ns3::SystemCondition [class] module.add_class('SystemCondition') ## system-mutex.h (module 'core'): ns3::SystemMutex [class] module.add_class('SystemMutex') ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs [class] module.add_class('SystemWallClockMs') ## nstime.h (module 'core'): ns3::TimeWithUnit [class] module.add_class('TimeWithUnit') ## timer.h (module 'core'): ns3::Timer [class] module.add_class('Timer') ## timer.h (module 'core'): ns3::Timer::DestroyPolicy [enumeration] module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer']) ## timer.h (module 'core'): ns3::Timer::State [enumeration] module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer']) ## timer-impl.h (module 'core'): ns3::TimerImpl [class] module.add_class('TimerImpl', allow_subclassing=True) ## type-id.h (module 'core'): ns3::TypeId [class] module.add_class('TypeId') ## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration] module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration] module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct] module.add_class('AttributeInformation', outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct] module.add_class('TraceSourceInformation', outer_class=root_module['ns3::TypeId']) ## vector.h (module 'core'): ns3::Vector2D [class] module.add_class('Vector2D') ## vector.h (module 'core'): ns3::Vector3D [class] module.add_class('Vector3D') ## watchdog.h (module 'core'): ns3::Watchdog [class] module.add_class('Watchdog') ## empty.h (module 'core'): ns3::empty [class] module.add_class('empty') ## int64x64-double.h (module 'core'): ns3::int64x64_t [class] module.add_class('int64x64_t') ## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration] module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t']) ## des-metrics.h (module 'core'): ns3::DesMetrics [class] module.add_class('DesMetrics', parent=root_module['ns3::Singleton< ns3::DesMetrics >']) ## object.h (module 'core'): ns3::Object [class] module.add_class('Object', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) ## object.h (module 'core'): ns3::Object::AggregateIterator [class] module.add_class('AggregateIterator', outer_class=root_module['ns3::Object']) ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class] module.add_class('RandomVariableStream', parent=root_module['ns3::Object']) ## scheduler.h (module 'core'): ns3::Scheduler [class] module.add_class('Scheduler', parent=root_module['ns3::Object']) ## scheduler.h (module 'core'): ns3::Scheduler::Event [struct] module.add_class('Event', outer_class=root_module['ns3::Scheduler']) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey [struct] module.add_class('EventKey', outer_class=root_module['ns3::Scheduler']) ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class] module.add_class('SequentialRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::FdReader', 'ns3::empty', 'ns3::DefaultDeleter<ns3::FdReader>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::RefCountBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::RefCountBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::SystemThread', 'ns3::empty', 'ns3::DefaultDeleter<ns3::SystemThread>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simulator-impl.h (module 'core'): ns3::SimulatorImpl [class] module.add_class('SimulatorImpl', parent=root_module['ns3::Object']) ## synchronizer.h (module 'core'): ns3::Synchronizer [class] module.add_class('Synchronizer', parent=root_module['ns3::Object']) ## system-thread.h (module 'core'): ns3::SystemThread [class] module.add_class('SystemThread', parent=root_module['ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >']) ## nstime.h (module 'core'): ns3::Time [class] module.add_class('Time') ## nstime.h (module 'core'): ns3::Time::Unit [enumeration] module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time']) ## nstime.h (module 'core'): ns3::Time [class] root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class] module.add_class('TraceSourceAccessor', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class] module.add_class('TriangularRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class] module.add_class('UniformRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer [class] module.add_class('WallClockSynchronizer', parent=root_module['ns3::Synchronizer']) ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class] module.add_class('WeibullRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class] module.add_class('ZetaRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class] module.add_class('ZipfRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## attribute.h (module 'core'): ns3::AttributeAccessor [class] module.add_class('AttributeAccessor', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) ## attribute.h (module 'core'): ns3::AttributeChecker [class] module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) ## attribute.h (module 'core'): ns3::AttributeValue [class] module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) ## boolean.h (module 'core'): ns3::BooleanChecker [class] module.add_class('BooleanChecker', parent=root_module['ns3::AttributeChecker']) ## boolean.h (module 'core'): ns3::BooleanValue [class] module.add_class('BooleanValue', parent=root_module['ns3::AttributeValue']) ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler [class] module.add_class('CalendarScheduler', parent=root_module['ns3::Scheduler']) ## callback.h (module 'core'): ns3::CallbackChecker [class] module.add_class('CallbackChecker', parent=root_module['ns3::AttributeChecker']) ## callback.h (module 'core'): ns3::CallbackImplBase [class] module.add_class('CallbackImplBase', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) ## callback.h (module 'core'): ns3::CallbackValue [class] module.add_class('CallbackValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class] module.add_class('ConstantRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl [class] module.add_class('DefaultSimulatorImpl', parent=root_module['ns3::SimulatorImpl']) ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class] module.add_class('DeterministicRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## double.h (module 'core'): ns3::DoubleValue [class] module.add_class('DoubleValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class] module.add_class('EmpiricalRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class] module.add_class('EmptyAttributeAccessor', parent=root_module['ns3::AttributeAccessor']) ## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class] module.add_class('EmptyAttributeChecker', parent=root_module['ns3::AttributeChecker']) ## attribute.h (module 'core'): ns3::EmptyAttributeValue [class] module.add_class('EmptyAttributeValue', parent=root_module['ns3::AttributeValue']) ## enum.h (module 'core'): ns3::EnumChecker [class] module.add_class('EnumChecker', parent=root_module['ns3::AttributeChecker']) ## enum.h (module 'core'): ns3::EnumValue [class] module.add_class('EnumValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class] module.add_class('ErlangRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## event-impl.h (module 'core'): ns3::EventImpl [class] module.add_class('EventImpl', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class] module.add_class('ExponentialRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## unix-fd-reader.h (module 'core'): ns3::FdReader [class] module.add_class('FdReader', parent=root_module['ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >']) ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class] module.add_class('GammaRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## heap-scheduler.h (module 'core'): ns3::HeapScheduler [class] module.add_class('HeapScheduler', parent=root_module['ns3::Scheduler']) ## integer.h (module 'core'): ns3::IntegerValue [class] module.add_class('IntegerValue', parent=root_module['ns3::AttributeValue']) ## list-scheduler.h (module 'core'): ns3::ListScheduler [class] module.add_class('ListScheduler', parent=root_module['ns3::Scheduler']) ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class] module.add_class('LogNormalRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## map-scheduler.h (module 'core'): ns3::MapScheduler [class] module.add_class('MapScheduler', parent=root_module['ns3::Scheduler']) ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class] module.add_class('NormalRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class] module.add_class('ObjectFactoryChecker', parent=root_module['ns3::AttributeChecker']) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class] module.add_class('ObjectFactoryValue', parent=root_module['ns3::AttributeValue']) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor [class] module.add_class('ObjectPtrContainerAccessor', parent=root_module['ns3::AttributeAccessor']) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker [class] module.add_class('ObjectPtrContainerChecker', parent=root_module['ns3::AttributeChecker']) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue [class] module.add_class('ObjectPtrContainerValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class] module.add_class('ParetoRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## pointer.h (module 'core'): ns3::PointerChecker [class] module.add_class('PointerChecker', parent=root_module['ns3::AttributeChecker']) ## pointer.h (module 'core'): ns3::PointerValue [class] module.add_class('PointerValue', parent=root_module['ns3::AttributeValue']) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl [class] module.add_class('RealtimeSimulatorImpl', parent=root_module['ns3::SimulatorImpl']) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::SynchronizationMode [enumeration] module.add_enum('SynchronizationMode', ['SYNC_BEST_EFFORT', 'SYNC_HARD_LIMIT'], outer_class=root_module['ns3::RealtimeSimulatorImpl']) ## ref-count-base.h (module 'core'): ns3::RefCountBase [class] module.add_class('RefCountBase', parent=root_module['ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >']) ## string.h (module 'core'): ns3::StringChecker [class] module.add_class('StringChecker', parent=root_module['ns3::AttributeChecker']) ## string.h (module 'core'): ns3::StringValue [class] module.add_class('StringValue', parent=root_module['ns3::AttributeValue']) ## nstime.h (module 'core'): ns3::TimeValue [class] module.add_class('TimeValue', parent=root_module['ns3::AttributeValue']) ## type-id.h (module 'core'): ns3::TypeIdChecker [class] module.add_class('TypeIdChecker', parent=root_module['ns3::AttributeChecker']) ## type-id.h (module 'core'): ns3::TypeIdValue [class] module.add_class('TypeIdValue', parent=root_module['ns3::AttributeValue']) ## uinteger.h (module 'core'): ns3::UintegerValue [class] module.add_class('UintegerValue', parent=root_module['ns3::AttributeValue']) ## vector.h (module 'core'): ns3::Vector2DChecker [class] module.add_class('Vector2DChecker', parent=root_module['ns3::AttributeChecker']) ## vector.h (module 'core'): ns3::Vector2DValue [class] module.add_class('Vector2DValue', parent=root_module['ns3::AttributeValue']) ## vector.h (module 'core'): ns3::Vector3DChecker [class] module.add_class('Vector3DChecker', parent=root_module['ns3::AttributeChecker']) ## vector.h (module 'core'): ns3::Vector3DValue [class] module.add_class('Vector3DValue', parent=root_module['ns3::AttributeValue']) module.add_container('std::map< std::string, ns3::LogComponent * >', ('std::string', 'ns3::LogComponent *'), container_type=u'map') typehandlers.add_type_alias(u'ns3::RngSeedManager', u'ns3::SeedManager') typehandlers.add_type_alias(u'ns3::RngSeedManager*', u'ns3::SeedManager*') typehandlers.add_type_alias(u'ns3::RngSeedManager&', u'ns3::SeedManager&') module.add_typedef(root_module['ns3::RngSeedManager'], 'SeedManager') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue', u'ns3::ObjectVectorValue') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue*', u'ns3::ObjectVectorValue*') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue&', u'ns3::ObjectVectorValue&') module.add_typedef(root_module['ns3::ObjectPtrContainerValue'], 'ObjectVectorValue') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *', u'ns3::LogTimePrinter') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) **', u'ns3::LogTimePrinter*') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *&', u'ns3::LogTimePrinter&') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *', u'ns3::LogNodePrinter') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) **', u'ns3::LogNodePrinter*') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *&', u'ns3::LogNodePrinter&') typehandlers.add_type_alias(u'ns3::Vector3D', u'ns3::Vector') typehandlers.add_type_alias(u'ns3::Vector3D*', u'ns3::Vector*') typehandlers.add_type_alias(u'ns3::Vector3D&', u'ns3::Vector&') module.add_typedef(root_module['ns3::Vector3D'], 'Vector') typehandlers.add_type_alias(u'ns3::Vector3DValue', u'ns3::VectorValue') typehandlers.add_type_alias(u'ns3::Vector3DValue*', u'ns3::VectorValue*') typehandlers.add_type_alias(u'ns3::Vector3DValue&', u'ns3::VectorValue&') module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue', u'ns3::ObjectMapValue') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue*', u'ns3::ObjectMapValue*') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue&', u'ns3::ObjectMapValue&') module.add_typedef(root_module['ns3::ObjectPtrContainerValue'], 'ObjectMapValue') typehandlers.add_type_alias(u'ns3::Vector3DChecker', u'ns3::VectorChecker') typehandlers.add_type_alias(u'ns3::Vector3DChecker*', u'ns3::VectorChecker*') typehandlers.add_type_alias(u'ns3::Vector3DChecker&', u'ns3::VectorChecker&') module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker') ## Register a nested module for the namespace CommandLineHelper nested_module = module.add_cpp_namespace('CommandLineHelper') register_types_ns3_CommandLineHelper(nested_module) ## Register a nested module for the namespace Config nested_module = module.add_cpp_namespace('Config') register_types_ns3_Config(nested_module) ## Register a nested module for the namespace FatalImpl nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) ## Register a nested module for the namespace Hash nested_module = module.add_cpp_namespace('Hash') register_types_ns3_Hash(nested_module) ## Register a nested module for the namespace SystemPath nested_module = module.add_cpp_namespace('SystemPath') register_types_ns3_SystemPath(nested_module) ## Register a nested module for the namespace TracedValueCallback nested_module = module.add_cpp_namespace('TracedValueCallback') register_types_ns3_TracedValueCallback(nested_module) ## Register a nested module for the namespace internal nested_module = module.add_cpp_namespace('internal') register_types_ns3_internal(nested_module) def register_types_ns3_CommandLineHelper(module): root_module = module.get_root() def register_types_ns3_Config(module): root_module = module.get_root() ## config.h (module 'core'): ns3::Config::MatchContainer [class] module.add_class('MatchContainer') module.add_container('std::vector< ns3::Ptr< ns3::Object > >', 'ns3::Ptr< ns3::Object >', container_type=u'vector') module.add_container('std::vector< std::string >', 'std::string', container_type=u'vector') def register_types_ns3_FatalImpl(module): root_module = module.get_root() def register_types_ns3_Hash(module): root_module = module.get_root() ## hash-function.h (module 'core'): ns3::Hash::Implementation [class] module.add_class('Implementation', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&') ## Register a nested module for the namespace Function nested_module = module.add_cpp_namespace('Function') register_types_ns3_Hash_Function(nested_module) def register_types_ns3_Hash_Function(module): root_module = module.get_root() ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class] module.add_class('Fnv1a', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class] module.add_class('Hash32', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class] module.add_class('Hash64', parent=root_module['ns3::Hash::Implementation']) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class] module.add_class('Murmur3', parent=root_module['ns3::Hash::Implementation']) def register_types_ns3_SystemPath(module): root_module = module.get_root() module.add_container('std::list< std::string >', 'std::string', container_type=u'list') def register_types_ns3_TracedValueCallback(module): root_module = module.get_root() typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *', u'ns3::TracedValueCallback::Uint8') typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) **', u'ns3::TracedValueCallback::Uint8*') typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *&', u'ns3::TracedValueCallback::Uint8&') typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *', u'ns3::TracedValueCallback::Int8') typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) **', u'ns3::TracedValueCallback::Int8*') typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *&', u'ns3::TracedValueCallback::Int8&') typehandlers.add_type_alias(u'void ( * ) ( double, double ) *', u'ns3::TracedValueCallback::Double') typehandlers.add_type_alias(u'void ( * ) ( double, double ) **', u'ns3::TracedValueCallback::Double*') typehandlers.add_type_alias(u'void ( * ) ( double, double ) *&', u'ns3::TracedValueCallback::Double&') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *', u'ns3::TracedValueCallback::Uint32') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) **', u'ns3::TracedValueCallback::Uint32*') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *&', u'ns3::TracedValueCallback::Uint32&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&') typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *', u'ns3::TracedValueCallback::Bool') typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) **', u'ns3::TracedValueCallback::Bool*') typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *&', u'ns3::TracedValueCallback::Bool&') typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *', u'ns3::TracedValueCallback::Int16') typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) **', u'ns3::TracedValueCallback::Int16*') typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *&', u'ns3::TracedValueCallback::Int16&') typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *', u'ns3::TracedValueCallback::Int32') typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) **', u'ns3::TracedValueCallback::Int32*') typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *&', u'ns3::TracedValueCallback::Int32&') typehandlers.add_type_alias(u'void ( * ) ( ) *', u'ns3::TracedValueCallback::Void') typehandlers.add_type_alias(u'void ( * ) ( ) **', u'ns3::TracedValueCallback::Void*') typehandlers.add_type_alias(u'void ( * ) ( ) *&', u'ns3::TracedValueCallback::Void&') typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *', u'ns3::TracedValueCallback::Uint16') typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) **', u'ns3::TracedValueCallback::Uint16*') typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *&', u'ns3::TracedValueCallback::Uint16&') def register_types_ns3_internal(module): root_module = module.get_root() def register_methods(root_module): register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList']) register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item']) register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase']) register_Ns3CommandLine_methods(root_module, root_module['ns3::CommandLine']) register_Ns3CriticalSection_methods(root_module, root_module['ns3::CriticalSection']) register_Ns3EventGarbageCollector_methods(root_module, root_module['ns3::EventGarbageCollector']) register_Ns3EventId_methods(root_module, root_module['ns3::EventId']) register_Ns3GlobalValue_methods(root_module, root_module['ns3::GlobalValue']) register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher']) register_Ns3IntToType__0_methods(root_module, root_module['ns3::IntToType< 0 >']) register_Ns3IntToType__1_methods(root_module, root_module['ns3::IntToType< 1 >']) register_Ns3IntToType__2_methods(root_module, root_module['ns3::IntToType< 2 >']) register_Ns3IntToType__3_methods(root_module, root_module['ns3::IntToType< 3 >']) register_Ns3IntToType__4_methods(root_module, root_module['ns3::IntToType< 4 >']) register_Ns3IntToType__5_methods(root_module, root_module['ns3::IntToType< 5 >']) register_Ns3IntToType__6_methods(root_module, root_module['ns3::IntToType< 6 >']) register_Ns3LogComponent_methods(root_module, root_module['ns3::LogComponent']) register_Ns3Names_methods(root_module, root_module['ns3::Names']) register_Ns3NonCopyable_methods(root_module, root_module['ns3::NonCopyable']) register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase']) register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter']) register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory']) register_Ns3ParameterLogger_methods(root_module, root_module['ns3::ParameterLogger']) register_Ns3RandomVariableStreamHelper_methods(root_module, root_module['ns3::RandomVariableStreamHelper']) register_Ns3RngSeedManager_methods(root_module, root_module['ns3::RngSeedManager']) register_Ns3RngStream_methods(root_module, root_module['ns3::RngStream']) register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator']) register_Ns3Singleton__Ns3DesMetrics_methods(root_module, root_module['ns3::Singleton< ns3::DesMetrics >']) register_Ns3SystemCondition_methods(root_module, root_module['ns3::SystemCondition']) register_Ns3SystemMutex_methods(root_module, root_module['ns3::SystemMutex']) register_Ns3SystemWallClockMs_methods(root_module, root_module['ns3::SystemWallClockMs']) register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit']) register_Ns3Timer_methods(root_module, root_module['ns3::Timer']) register_Ns3TimerImpl_methods(root_module, root_module['ns3::TimerImpl']) register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId']) register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation']) register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation']) register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D']) register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D']) register_Ns3Watchdog_methods(root_module, root_module['ns3::Watchdog']) register_Ns3Empty_methods(root_module, root_module['ns3::empty']) register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t']) register_Ns3DesMetrics_methods(root_module, root_module['ns3::DesMetrics']) register_Ns3Object_methods(root_module, root_module['ns3::Object']) register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator']) register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream']) register_Ns3Scheduler_methods(root_module, root_module['ns3::Scheduler']) register_Ns3SchedulerEvent_methods(root_module, root_module['ns3::Scheduler::Event']) register_Ns3SchedulerEventKey_methods(root_module, root_module['ns3::Scheduler::EventKey']) register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable']) register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) register_Ns3SimpleRefCount__Ns3FdReader_Ns3Empty_Ns3DefaultDeleter__lt__ns3FdReader__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >']) register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) register_Ns3SimpleRefCount__Ns3RefCountBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3RefCountBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >']) register_Ns3SimpleRefCount__Ns3SystemThread_Ns3Empty_Ns3DefaultDeleter__lt__ns3SystemThread__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >']) register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) register_Ns3SimulatorImpl_methods(root_module, root_module['ns3::SimulatorImpl']) register_Ns3Synchronizer_methods(root_module, root_module['ns3::Synchronizer']) register_Ns3SystemThread_methods(root_module, root_module['ns3::SystemThread']) register_Ns3Time_methods(root_module, root_module['ns3::Time']) register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor']) register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable']) register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable']) register_Ns3WallClockSynchronizer_methods(root_module, root_module['ns3::WallClockSynchronizer']) register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable']) register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable']) register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable']) register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor']) register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker']) register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue']) register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker']) register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue']) register_Ns3CalendarScheduler_methods(root_module, root_module['ns3::CalendarScheduler']) register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker']) register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase']) register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue']) register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable']) register_Ns3DefaultSimulatorImpl_methods(root_module, root_module['ns3::DefaultSimulatorImpl']) register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable']) register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue']) register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable']) register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor']) register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker']) register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue']) register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker']) register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue']) register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable']) register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl']) register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable']) register_Ns3FdReader_methods(root_module, root_module['ns3::FdReader']) register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable']) register_Ns3HeapScheduler_methods(root_module, root_module['ns3::HeapScheduler']) register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue']) register_Ns3ListScheduler_methods(root_module, root_module['ns3::ListScheduler']) register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable']) register_Ns3MapScheduler_methods(root_module, root_module['ns3::MapScheduler']) register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable']) register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker']) register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue']) register_Ns3ObjectPtrContainerAccessor_methods(root_module, root_module['ns3::ObjectPtrContainerAccessor']) register_Ns3ObjectPtrContainerChecker_methods(root_module, root_module['ns3::ObjectPtrContainerChecker']) register_Ns3ObjectPtrContainerValue_methods(root_module, root_module['ns3::ObjectPtrContainerValue']) register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable']) register_Ns3PointerChecker_methods(root_module, root_module['ns3::PointerChecker']) register_Ns3PointerValue_methods(root_module, root_module['ns3::PointerValue']) register_Ns3RealtimeSimulatorImpl_methods(root_module, root_module['ns3::RealtimeSimulatorImpl']) register_Ns3RefCountBase_methods(root_module, root_module['ns3::RefCountBase']) register_Ns3StringChecker_methods(root_module, root_module['ns3::StringChecker']) register_Ns3StringValue_methods(root_module, root_module['ns3::StringValue']) register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue']) register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker']) register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue']) register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue']) register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker']) register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue']) register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker']) register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue']) register_Ns3ConfigMatchContainer_methods(root_module, root_module['ns3::Config::MatchContainer']) register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation']) register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a']) register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32']) register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64']) register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3']) return def register_Ns3AttributeConstructionList_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')]) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function] cls.add_method('Begin', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function] cls.add_method('End', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('Find', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True) return def register_Ns3AttributeConstructionListItem_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable] cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False) return def register_Ns3CallbackBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function] cls.add_method('GetImpl', 'ns3::Ptr< ns3::CallbackImplBase >', [], is_const=True) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')], visibility='protected') return def register_Ns3CommandLine_methods(root_module, cls): cls.add_output_stream_operator() ## command-line.h (module 'core'): ns3::CommandLine::CommandLine() [constructor] cls.add_constructor([]) ## command-line.h (module 'core'): ns3::CommandLine::CommandLine(ns3::CommandLine const & cmd) [copy constructor] cls.add_constructor([param('ns3::CommandLine const &', 'cmd')]) ## command-line.h (module 'core'): void ns3::CommandLine::AddValue(std::string const & name, std::string const & help, ns3::Callback<bool, std::string, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function] cls.add_method('AddValue', 'void', [param('std::string const &', 'name'), param('std::string const &', 'help'), param('ns3::Callback< bool, std::string, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')]) ## command-line.h (module 'core'): void ns3::CommandLine::AddValue(std::string const & name, std::string const & attributePath) [member function] cls.add_method('AddValue', 'void', [param('std::string const &', 'name'), param('std::string const &', 'attributePath')]) ## command-line.h (module 'core'): std::string ns3::CommandLine::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## command-line.h (module 'core'): void ns3::CommandLine::Parse(int argc, char * * argv) [member function] cls.add_method('Parse', 'void', [param('int', 'argc'), param('char * *', 'argv')]) ## command-line.h (module 'core'): void ns3::CommandLine::PrintHelp(std::ostream & os) const [member function] cls.add_method('PrintHelp', 'void', [param('std::ostream &', 'os')], is_const=True) ## command-line.h (module 'core'): void ns3::CommandLine::Usage(std::string const usage) [member function] cls.add_method('Usage', 'void', [param('std::string const', 'usage')]) return def register_Ns3CriticalSection_methods(root_module, cls): ## system-mutex.h (module 'core'): ns3::CriticalSection::CriticalSection(ns3::CriticalSection const & arg0) [copy constructor] cls.add_constructor([param('ns3::CriticalSection const &', 'arg0')]) ## system-mutex.h (module 'core'): ns3::CriticalSection::CriticalSection(ns3::SystemMutex & mutex) [constructor] cls.add_constructor([param('ns3::SystemMutex &', 'mutex')]) return def register_Ns3EventGarbageCollector_methods(root_module, cls): ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector::EventGarbageCollector(ns3::EventGarbageCollector const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventGarbageCollector const &', 'arg0')]) ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector::EventGarbageCollector() [constructor] cls.add_constructor([]) ## event-garbage-collector.h (module 'core'): void ns3::EventGarbageCollector::Track(ns3::EventId event) [member function] cls.add_method('Track', 'void', [param('ns3::EventId', 'event')]) return def register_Ns3EventId_methods(root_module, cls): cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('==') ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventId const &', 'arg0')]) ## event-id.h (module 'core'): ns3::EventId::EventId() [constructor] cls.add_constructor([]) ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')]) ## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True) ## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function] cls.add_method('GetTs', 'uint64_t', [], is_const=True) ## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function] cls.add_method('GetUid', 'uint32_t', [], is_const=True) ## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function] cls.add_method('IsExpired', 'bool', [], is_const=True) ## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function] cls.add_method('IsRunning', 'bool', [], is_const=True) ## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function] cls.add_method('PeekEventImpl', 'ns3::EventImpl *', [], is_const=True) return def register_Ns3GlobalValue_methods(root_module, cls): ## global-value.h (module 'core'): ns3::GlobalValue::GlobalValue(ns3::GlobalValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::GlobalValue const &', 'arg0')]) ## global-value.h (module 'core'): ns3::GlobalValue::GlobalValue(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeChecker const> checker) [constructor] cls.add_constructor([param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')]) ## global-value.h (module 'core'): static __gnu_cxx::__normal_iterator<ns3::GlobalValue* const*,std::vector<ns3::GlobalValue*, std::allocator<ns3::GlobalValue*> > > ns3::GlobalValue::Begin() [member function] cls.add_method('Begin', '__gnu_cxx::__normal_iterator< ns3::GlobalValue * const *, std::vector< ns3::GlobalValue * > >', [], is_static=True) ## global-value.h (module 'core'): static void ns3::GlobalValue::Bind(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Bind', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')], is_static=True) ## global-value.h (module 'core'): static bool ns3::GlobalValue::BindFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('BindFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')], is_static=True) ## global-value.h (module 'core'): static __gnu_cxx::__normal_iterator<ns3::GlobalValue* const*,std::vector<ns3::GlobalValue*, std::allocator<ns3::GlobalValue*> > > ns3::GlobalValue::End() [member function] cls.add_method('End', '__gnu_cxx::__normal_iterator< ns3::GlobalValue * const *, std::vector< ns3::GlobalValue * > >', [], is_static=True) ## global-value.h (module 'core'): ns3::Ptr<ns3::AttributeChecker const> ns3::GlobalValue::GetChecker() const [member function] cls.add_method('GetChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [], is_const=True) ## global-value.h (module 'core'): std::string ns3::GlobalValue::GetHelp() const [member function] cls.add_method('GetHelp', 'std::string', [], is_const=True) ## global-value.h (module 'core'): std::string ns3::GlobalValue::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## global-value.h (module 'core'): void ns3::GlobalValue::GetValue(ns3::AttributeValue & value) const [member function] cls.add_method('GetValue', 'void', [param('ns3::AttributeValue &', 'value')], is_const=True) ## global-value.h (module 'core'): static void ns3::GlobalValue::GetValueByName(std::string name, ns3::AttributeValue & value) [member function] cls.add_method('GetValueByName', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_static=True) ## global-value.h (module 'core'): static bool ns3::GlobalValue::GetValueByNameFailSafe(std::string name, ns3::AttributeValue & value) [member function] cls.add_method('GetValueByNameFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_static=True) ## global-value.h (module 'core'): void ns3::GlobalValue::ResetInitialValue() [member function] cls.add_method('ResetInitialValue', 'void', []) ## global-value.h (module 'core'): bool ns3::GlobalValue::SetValue(ns3::AttributeValue const & value) [member function] cls.add_method('SetValue', 'bool', [param('ns3::AttributeValue const &', 'value')]) return def register_Ns3Hasher_methods(root_module, cls): ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hasher const &', 'arg0')]) ## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor] cls.add_constructor([]) ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function] cls.add_method('GetHash32', 'uint32_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function] cls.add_method('GetHash64', 'uint64_t', [param('std::string const', 's')]) ## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function] cls.add_method('clear', 'ns3::Hasher &', []) return def register_Ns3IntToType__0_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType(ns3::IntToType<0> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 0 > const &', 'arg0')]) return def register_Ns3IntToType__1_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType(ns3::IntToType<1> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 1 > const &', 'arg0')]) return def register_Ns3IntToType__2_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType(ns3::IntToType<2> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 2 > const &', 'arg0')]) return def register_Ns3IntToType__3_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType(ns3::IntToType<3> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 3 > const &', 'arg0')]) return def register_Ns3IntToType__4_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType(ns3::IntToType<4> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 4 > const &', 'arg0')]) return def register_Ns3IntToType__5_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType(ns3::IntToType<5> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 5 > const &', 'arg0')]) return def register_Ns3IntToType__6_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType(ns3::IntToType<6> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 6 > const &', 'arg0')]) return def register_Ns3LogComponent_methods(root_module, cls): ## log.h (module 'core'): ns3::LogComponent::LogComponent(ns3::LogComponent const & arg0) [copy constructor] cls.add_constructor([param('ns3::LogComponent const &', 'arg0')]) ## log.h (module 'core'): ns3::LogComponent::LogComponent(std::string const & name, std::string const & file, ns3::LogLevel const mask=::ns3::LOG_NONE) [constructor] cls.add_constructor([param('std::string const &', 'name'), param('std::string const &', 'file'), param('ns3::LogLevel const', 'mask', default_value='::ns3::LOG_NONE')]) ## log.h (module 'core'): void ns3::LogComponent::Disable(ns3::LogLevel const level) [member function] cls.add_method('Disable', 'void', [param('ns3::LogLevel const', 'level')]) ## log.h (module 'core'): void ns3::LogComponent::Enable(ns3::LogLevel const level) [member function] cls.add_method('Enable', 'void', [param('ns3::LogLevel const', 'level')]) ## log.h (module 'core'): std::string ns3::LogComponent::File() const [member function] cls.add_method('File', 'std::string', [], is_const=True) ## log.h (module 'core'): static std::map<std::basic_string<char, std::char_traits<char>, std::allocator<char> >,ns3::LogComponent*,std::less<std::basic_string<char, std::char_traits<char>, std::allocator<char> > >,std::allocator<std::pair<const std::basic_string<char, std::char_traits<char>, std::allocator<char> >, ns3::LogComponent*> > > * ns3::LogComponent::GetComponentList() [member function] cls.add_method('GetComponentList', 'std::map< std::string, ns3::LogComponent * > *', [], is_static=True) ## log.h (module 'core'): static std::string ns3::LogComponent::GetLevelLabel(ns3::LogLevel const level) [member function] cls.add_method('GetLevelLabel', 'std::string', [param('ns3::LogLevel const', 'level')], is_static=True) ## log.h (module 'core'): bool ns3::LogComponent::IsEnabled(ns3::LogLevel const level) const [member function] cls.add_method('IsEnabled', 'bool', [param('ns3::LogLevel const', 'level')], is_const=True) ## log.h (module 'core'): bool ns3::LogComponent::IsNoneEnabled() const [member function] cls.add_method('IsNoneEnabled', 'bool', [], is_const=True) ## log.h (module 'core'): char const * ns3::LogComponent::Name() const [member function] cls.add_method('Name', 'char const *', [], is_const=True) ## log.h (module 'core'): void ns3::LogComponent::SetMask(ns3::LogLevel const level) [member function] cls.add_method('SetMask', 'void', [param('ns3::LogLevel const', 'level')]) return def register_Ns3Names_methods(root_module, cls): ## names.h (module 'core'): ns3::Names::Names() [constructor] cls.add_constructor([]) ## names.h (module 'core'): ns3::Names::Names(ns3::Names const & arg0) [copy constructor] cls.add_constructor([param('ns3::Names const &', 'arg0')]) ## names.h (module 'core'): static void ns3::Names::Add(std::string name, ns3::Ptr<ns3::Object> object) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Add(std::string path, std::string name, ns3::Ptr<ns3::Object> object) [member function] cls.add_method('Add', 'void', [param('std::string', 'path'), param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Add(ns3::Ptr<ns3::Object> context, std::string name, ns3::Ptr<ns3::Object> object) [member function] cls.add_method('Add', 'void', [param('ns3::Ptr< ns3::Object >', 'context'), param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Clear() [member function] cls.add_method('Clear', 'void', [], is_static=True) ## names.h (module 'core'): static std::string ns3::Names::FindName(ns3::Ptr<ns3::Object> object) [member function] cls.add_method('FindName', 'std::string', [param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static std::string ns3::Names::FindPath(ns3::Ptr<ns3::Object> object) [member function] cls.add_method('FindPath', 'std::string', [param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Rename(std::string oldpath, std::string newname) [member function] cls.add_method('Rename', 'void', [param('std::string', 'oldpath'), param('std::string', 'newname')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Rename(std::string path, std::string oldname, std::string newname) [member function] cls.add_method('Rename', 'void', [param('std::string', 'path'), param('std::string', 'oldname'), param('std::string', 'newname')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Rename(ns3::Ptr<ns3::Object> context, std::string oldname, std::string newname) [member function] cls.add_method('Rename', 'void', [param('ns3::Ptr< ns3::Object >', 'context'), param('std::string', 'oldname'), param('std::string', 'newname')], is_static=True) return def register_Ns3NonCopyable_methods(root_module, cls): ## non-copyable.h (module 'core'): ns3::NonCopyable::NonCopyable() [constructor] cls.add_constructor([], visibility='protected') return def register_Ns3ObjectBase_methods(root_module, cls): ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor] cls.add_constructor([]) ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')]) ## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function] cls.add_method('ConstructSelf', 'void', [param('ns3::AttributeConstructionList const &', 'attributes')], visibility='protected') ## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function] cls.add_method('NotifyConstructionCompleted', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectDeleter_methods(root_module, cls): ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor] cls.add_constructor([]) ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')]) ## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Object *', 'object')], is_static=True) return def register_Ns3ObjectFactory_methods(root_module, cls): cls.add_output_stream_operator() ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')]) ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor] cls.add_constructor([param('std::string', 'typeId')]) ## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::Object >', [], is_const=True) ## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) ## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Set', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function] cls.add_method('SetTypeId', 'void', [param('ns3::TypeId', 'tid')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function] cls.add_method('SetTypeId', 'void', [param('char const *', 'tid')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function] cls.add_method('SetTypeId', 'void', [param('std::string', 'tid')]) return def register_Ns3ParameterLogger_methods(root_module, cls): ## log.h (module 'core'): ns3::ParameterLogger::ParameterLogger(ns3::ParameterLogger const & arg0) [copy constructor] cls.add_constructor([param('ns3::ParameterLogger const &', 'arg0')]) ## log.h (module 'core'): ns3::ParameterLogger::ParameterLogger(std::ostream & os) [constructor] cls.add_constructor([param('std::ostream &', 'os')]) return def register_Ns3RandomVariableStreamHelper_methods(root_module, cls): ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper::RandomVariableStreamHelper() [constructor] cls.add_constructor([]) ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper::RandomVariableStreamHelper(ns3::RandomVariableStreamHelper const & arg0) [copy constructor] cls.add_constructor([param('ns3::RandomVariableStreamHelper const &', 'arg0')]) ## random-variable-stream-helper.h (module 'core'): static int64_t ns3::RandomVariableStreamHelper::AssignStreams(std::string path, int64_t stream) [member function] cls.add_method('AssignStreams', 'int64_t', [param('std::string', 'path'), param('int64_t', 'stream')], is_static=True) return def register_Ns3RngSeedManager_methods(root_module, cls): ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager() [constructor] cls.add_constructor([]) ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager(ns3::RngSeedManager const & arg0) [copy constructor] cls.add_constructor([param('ns3::RngSeedManager const &', 'arg0')]) ## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetNextStreamIndex() [member function] cls.add_method('GetNextStreamIndex', 'uint64_t', [], is_static=True) ## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetRun() [member function] cls.add_method('GetRun', 'uint64_t', [], is_static=True) ## rng-seed-manager.h (module 'core'): static uint32_t ns3::RngSeedManager::GetSeed() [member function] cls.add_method('GetSeed', 'uint32_t', [], is_static=True) ## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetRun(uint64_t run) [member function] cls.add_method('SetRun', 'void', [param('uint64_t', 'run')], is_static=True) ## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetSeed(uint32_t seed) [member function] cls.add_method('SetSeed', 'void', [param('uint32_t', 'seed')], is_static=True) return def register_Ns3RngStream_methods(root_module, cls): ## rng-stream.h (module 'core'): ns3::RngStream::RngStream(uint32_t seed, uint64_t stream, uint64_t substream) [constructor] cls.add_constructor([param('uint32_t', 'seed'), param('uint64_t', 'stream'), param('uint64_t', 'substream')]) ## rng-stream.h (module 'core'): ns3::RngStream::RngStream(ns3::RngStream const & r) [copy constructor] cls.add_constructor([param('ns3::RngStream const &', 'r')]) ## rng-stream.h (module 'core'): double ns3::RngStream::RandU01() [member function] cls.add_method('RandU01', 'double', []) return def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3Simulator_methods(root_module, cls): ## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Simulator const &', 'arg0')]) ## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_static=True) ## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function] cls.add_method('GetContext', 'uint32_t', [], is_static=True) ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function] cls.add_method('GetImplementation', 'ns3::Ptr< ns3::SimulatorImpl >', [], is_static=True) ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_static=True) ## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_static=True) ## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function] cls.add_method('IsFinished', 'bool', [], is_static=True) ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function] cls.add_method('Now', 'ns3::Time', [], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function] cls.add_method('SetImplementation', 'void', [param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function] cls.add_method('Stop', 'void', [], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_static=True) return def register_Ns3Singleton__Ns3DesMetrics_methods(root_module, cls): ## singleton.h (module 'core'): ns3::Singleton<ns3::DesMetrics>::Singleton() [constructor] cls.add_constructor([]) ## singleton.h (module 'core'): static ns3::DesMetrics * ns3::Singleton<ns3::DesMetrics>::Get() [member function] cls.add_method('Get', 'ns3::DesMetrics *', [], is_static=True) return def register_Ns3SystemCondition_methods(root_module, cls): ## system-condition.h (module 'core'): ns3::SystemCondition::SystemCondition(ns3::SystemCondition const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemCondition const &', 'arg0')]) ## system-condition.h (module 'core'): ns3::SystemCondition::SystemCondition() [constructor] cls.add_constructor([]) ## system-condition.h (module 'core'): void ns3::SystemCondition::Broadcast() [member function] cls.add_method('Broadcast', 'void', []) ## system-condition.h (module 'core'): bool ns3::SystemCondition::GetCondition() [member function] cls.add_method('GetCondition', 'bool', []) ## system-condition.h (module 'core'): void ns3::SystemCondition::SetCondition(bool condition) [member function] cls.add_method('SetCondition', 'void', [param('bool', 'condition')]) ## system-condition.h (module 'core'): void ns3::SystemCondition::Signal() [member function] cls.add_method('Signal', 'void', []) ## system-condition.h (module 'core'): bool ns3::SystemCondition::TimedWait(uint64_t ns) [member function] cls.add_method('TimedWait', 'bool', [param('uint64_t', 'ns')]) ## system-condition.h (module 'core'): void ns3::SystemCondition::Wait() [member function] cls.add_method('Wait', 'void', []) return def register_Ns3SystemMutex_methods(root_module, cls): ## system-mutex.h (module 'core'): ns3::SystemMutex::SystemMutex(ns3::SystemMutex const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemMutex const &', 'arg0')]) ## system-mutex.h (module 'core'): ns3::SystemMutex::SystemMutex() [constructor] cls.add_constructor([]) ## system-mutex.h (module 'core'): void ns3::SystemMutex::Lock() [member function] cls.add_method('Lock', 'void', []) ## system-mutex.h (module 'core'): void ns3::SystemMutex::Unlock() [member function] cls.add_method('Unlock', 'void', []) return def register_Ns3SystemWallClockMs_methods(root_module, cls): ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs(ns3::SystemWallClockMs const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemWallClockMs const &', 'arg0')]) ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs() [constructor] cls.add_constructor([]) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::End() [member function] cls.add_method('End', 'int64_t', []) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedReal() const [member function] cls.add_method('GetElapsedReal', 'int64_t', [], is_const=True) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedSystem() const [member function] cls.add_method('GetElapsedSystem', 'int64_t', [], is_const=True) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedUser() const [member function] cls.add_method('GetElapsedUser', 'int64_t', [], is_const=True) ## system-wall-clock-ms.h (module 'core'): void ns3::SystemWallClockMs::Start() [member function] cls.add_method('Start', 'void', []) return def register_Ns3TimeWithUnit_methods(root_module, cls): cls.add_output_stream_operator() ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor] cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')]) return def register_Ns3Timer_methods(root_module, cls): ## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Timer const &', 'arg0')]) ## timer.h (module 'core'): ns3::Timer::Timer() [constructor] cls.add_constructor([]) ## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer::DestroyPolicy destroyPolicy) [constructor] cls.add_constructor([param('ns3::Timer::DestroyPolicy', 'destroyPolicy')]) ## timer.h (module 'core'): void ns3::Timer::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelay() const [member function] cls.add_method('GetDelay', 'ns3::Time', [], is_const=True) ## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelayLeft() const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [], is_const=True) ## timer.h (module 'core'): ns3::Timer::State ns3::Timer::GetState() const [member function] cls.add_method('GetState', 'ns3::Timer::State', [], is_const=True) ## timer.h (module 'core'): bool ns3::Timer::IsExpired() const [member function] cls.add_method('IsExpired', 'bool', [], is_const=True) ## timer.h (module 'core'): bool ns3::Timer::IsRunning() const [member function] cls.add_method('IsRunning', 'bool', [], is_const=True) ## timer.h (module 'core'): bool ns3::Timer::IsSuspended() const [member function] cls.add_method('IsSuspended', 'bool', [], is_const=True) ## timer.h (module 'core'): void ns3::Timer::Remove() [member function] cls.add_method('Remove', 'void', []) ## timer.h (module 'core'): void ns3::Timer::Resume() [member function] cls.add_method('Resume', 'void', []) ## timer.h (module 'core'): void ns3::Timer::Schedule() [member function] cls.add_method('Schedule', 'void', []) ## timer.h (module 'core'): void ns3::Timer::Schedule(ns3::Time delay) [member function] cls.add_method('Schedule', 'void', [param('ns3::Time', 'delay')]) ## timer.h (module 'core'): void ns3::Timer::SetDelay(ns3::Time const & delay) [member function] cls.add_method('SetDelay', 'void', [param('ns3::Time const &', 'delay')]) ## timer.h (module 'core'): void ns3::Timer::Suspend() [member function] cls.add_method('Suspend', 'void', []) return def register_Ns3TimerImpl_methods(root_module, cls): ## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl() [constructor] cls.add_constructor([]) ## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl(ns3::TimerImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimerImpl const &', 'arg0')]) ## timer-impl.h (module 'core'): void ns3::TimerImpl::Invoke() [member function] cls.add_method('Invoke', 'void', [], is_pure_virtual=True, is_virtual=True) ## timer-impl.h (module 'core'): ns3::EventId ns3::TimerImpl::Schedule(ns3::Time const & delay) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay')], is_pure_virtual=True, is_virtual=True) return def register_Ns3TypeId_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor] cls.add_constructor([param('char const *', 'name')]) ## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor] cls.add_constructor([param('ns3::TypeId const &', 'o')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')], deprecated=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function] cls.add_method('GetAttribute', 'ns3::TypeId::AttributeInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function] cls.add_method('GetAttributeFullName', 'std::string', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function] cls.add_method('GetAttributeN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function] cls.add_method('GetConstructor', 'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function] cls.add_method('GetGroupName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function] cls.add_method('GetHash', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function] cls.add_method('GetParent', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function] cls.add_method('GetRegistered', 'ns3::TypeId', [param('uint32_t', 'i')], is_static=True) ## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function] cls.add_method('GetRegisteredN', 'uint32_t', [], is_static=True) ## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function] cls.add_method('GetSize', 'std::size_t', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function] cls.add_method('GetTraceSource', 'ns3::TypeId::TraceSourceInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function] cls.add_method('GetTraceSourceN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function] cls.add_method('GetUid', 'uint16_t', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function] cls.add_method('HasConstructor', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function] cls.add_method('HasParent', 'bool', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function] cls.add_method('HideFromDocumentation', 'ns3::TypeId', []) ## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function] cls.add_method('IsChildOf', 'bool', [param('ns3::TypeId', 'other')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function] cls.add_method('LookupAttributeByName', 'bool', [param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function] cls.add_method('LookupByHash', 'ns3::TypeId', [param('uint32_t', 'hash')], is_static=True) ## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function] cls.add_method('LookupByHashFailSafe', 'bool', [param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')], is_static=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function] cls.add_method('LookupByName', 'ns3::TypeId', [param('std::string', 'name')], is_static=True) ## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name')], is_const=True) ## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function] cls.add_method('MustHideFromDocumentation', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function] cls.add_method('SetAttributeInitialValue', 'bool', [param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function] cls.add_method('SetGroupName', 'ns3::TypeId', [param('std::string', 'groupName')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function] cls.add_method('SetParent', 'ns3::TypeId', [param('ns3::TypeId', 'tid')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function] cls.add_method('SetSize', 'ns3::TypeId', [param('std::size_t', 'size')]) ## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function] cls.add_method('SetUid', 'void', [param('uint16_t', 'uid')]) return def register_Ns3TypeIdAttributeInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable] cls.add_instance_attribute('flags', 'uint32_t', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable] cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable] cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable] cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable] cls.add_instance_attribute('supportMsg', 'std::string', is_const=False) return def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable] cls.add_instance_attribute('callback', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable] cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable] cls.add_instance_attribute('supportMsg', 'std::string', is_const=False) return def register_Ns3Vector2D_methods(root_module, cls): cls.add_output_stream_operator() ## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2D const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor] cls.add_constructor([param('double', '_x'), param('double', '_y')]) ## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2D::x [variable] cls.add_instance_attribute('x', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector2D::y [variable] cls.add_instance_attribute('y', 'double', is_const=False) return def register_Ns3Vector3D_methods(root_module, cls): cls.add_output_stream_operator() ## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3D const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor] cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')]) ## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3D::x [variable] cls.add_instance_attribute('x', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector3D::y [variable] cls.add_instance_attribute('y', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector3D::z [variable] cls.add_instance_attribute('z', 'double', is_const=False) return def register_Ns3Watchdog_methods(root_module, cls): ## watchdog.h (module 'core'): ns3::Watchdog::Watchdog(ns3::Watchdog const & arg0) [copy constructor] cls.add_constructor([param('ns3::Watchdog const &', 'arg0')]) ## watchdog.h (module 'core'): ns3::Watchdog::Watchdog() [constructor] cls.add_constructor([]) ## watchdog.h (module 'core'): void ns3::Watchdog::Ping(ns3::Time delay) [member function] cls.add_method('Ping', 'void', [param('ns3::Time', 'delay')]) return def register_Ns3Empty_methods(root_module, cls): ## empty.h (module 'core'): ns3::empty::empty() [constructor] cls.add_constructor([]) ## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor] cls.add_constructor([param('ns3::empty const &', 'arg0')]) return def register_Ns3Int64x64_t_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_unary_numeric_operator('-') cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor] cls.add_constructor([]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor] cls.add_constructor([param('long double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor] cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function] cls.add_method('GetHigh', 'int64_t', [], is_const=True) ## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function] cls.add_method('GetLow', 'uint64_t', [], is_const=True) ## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function] cls.add_method('Invert', 'ns3::int64x64_t', [param('uint64_t', 'v')], is_static=True) ## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function] cls.add_method('MulByInvert', 'void', [param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable] cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True) return def register_Ns3DesMetrics_methods(root_module, cls): ## des-metrics.h (module 'core'): ns3::DesMetrics::DesMetrics() [constructor] cls.add_constructor([]) ## des-metrics.h (module 'core'): void ns3::DesMetrics::Initialize(int argc, char * * argv, std::string outDir="") [member function] cls.add_method('Initialize', 'void', [param('int', 'argc'), param('char * *', 'argv'), param('std::string', 'outDir', default_value='""')]) ## des-metrics.h (module 'core'): void ns3::DesMetrics::Trace(ns3::Time const & now, ns3::Time const & delay) [member function] cls.add_method('Trace', 'void', [param('ns3::Time const &', 'now'), param('ns3::Time const &', 'delay')]) ## des-metrics.h (module 'core'): void ns3::DesMetrics::TraceWithContext(uint32_t context, ns3::Time const & now, ns3::Time const & delay) [member function] cls.add_method('TraceWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'now'), param('ns3::Time const &', 'delay')]) return def register_Ns3Object_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::Object() [constructor] cls.add_constructor([]) ## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function] cls.add_method('AggregateObject', 'void', [param('ns3::Ptr< ns3::Object >', 'other')]) ## object.h (module 'core'): void ns3::Object::Dispose() [member function] cls.add_method('Dispose', 'void', []) ## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function] cls.add_method('GetAggregateIterator', 'ns3::Object::AggregateIterator', [], is_const=True) ## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## object.h (module 'core'): ns3::Ptr<ns3::Object> ns3::Object::GetObject(ns3::TypeId tid) const [member function] cls.add_method('GetObject', 'ns3::Ptr< ns3::Object >', [param('ns3::TypeId', 'tid')], is_const=True, template_parameters=['ns3::Object'], custom_template_method_name=u'GetObject') ## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object.h (module 'core'): void ns3::Object::Initialize() [member function] cls.add_method('Initialize', 'void', []) ## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function] cls.add_method('IsInitialized', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor] cls.add_constructor([param('ns3::Object const &', 'o')], visibility='protected') ## object.h (module 'core'): void ns3::Object::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::DoInitialize() [member function] cls.add_method('DoInitialize', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function] cls.add_method('NotifyNewAggregate', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectAggregateIterator_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')]) ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor] cls.add_constructor([]) ## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function] cls.add_method('Next', 'ns3::Ptr< ns3::Object const >', []) return def register_Ns3RandomVariableStream_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function] cls.add_method('SetStream', 'void', [param('int64_t', 'stream')]) ## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function] cls.add_method('GetStream', 'int64_t', [], is_const=True) ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function] cls.add_method('SetAntithetic', 'void', [param('bool', 'isAntithetic')]) ## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function] cls.add_method('IsAntithetic', 'bool', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_pure_virtual=True, is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_pure_virtual=True, is_virtual=True) ## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function] cls.add_method('Peek', 'ns3::RngStream *', [], is_const=True, visibility='protected') return def register_Ns3Scheduler_methods(root_module, cls): ## scheduler.h (module 'core'): ns3::Scheduler::Scheduler() [constructor] cls.add_constructor([]) ## scheduler.h (module 'core'): ns3::Scheduler::Scheduler(ns3::Scheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::Scheduler const &', 'arg0')]) ## scheduler.h (module 'core'): static ns3::TypeId ns3::Scheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## scheduler.h (module 'core'): void ns3::Scheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_pure_virtual=True, is_virtual=True) ## scheduler.h (module 'core'): bool ns3::Scheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## scheduler.h (module 'core'): ns3::Scheduler::Event ns3::Scheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## scheduler.h (module 'core'): void ns3::Scheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_pure_virtual=True, is_virtual=True) ## scheduler.h (module 'core'): ns3::Scheduler::Event ns3::Scheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_pure_virtual=True, is_virtual=True) return def register_Ns3SchedulerEvent_methods(root_module, cls): cls.add_binary_comparison_operator('<') ## scheduler.h (module 'core'): ns3::Scheduler::Event::Event() [constructor] cls.add_constructor([]) ## scheduler.h (module 'core'): ns3::Scheduler::Event::Event(ns3::Scheduler::Event const & arg0) [copy constructor] cls.add_constructor([param('ns3::Scheduler::Event const &', 'arg0')]) ## scheduler.h (module 'core'): ns3::Scheduler::Event::impl [variable] cls.add_instance_attribute('impl', 'ns3::EventImpl *', is_const=False) ## scheduler.h (module 'core'): ns3::Scheduler::Event::key [variable] cls.add_instance_attribute('key', 'ns3::Scheduler::EventKey', is_const=False) return def register_Ns3SchedulerEventKey_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::EventKey() [constructor] cls.add_constructor([]) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::EventKey(ns3::Scheduler::EventKey const & arg0) [copy constructor] cls.add_constructor([param('ns3::Scheduler::EventKey const &', 'arg0')]) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_context [variable] cls.add_instance_attribute('m_context', 'uint32_t', is_const=False) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_ts [variable] cls.add_instance_attribute('m_ts', 'uint64_t', is_const=False) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_uid [variable] cls.add_instance_attribute('m_uid', 'uint32_t', is_const=False) return def register_Ns3SequentialRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function] cls.add_method('GetIncrement', 'ns3::Ptr< ns3::RandomVariableStream >', [], is_const=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function] cls.add_method('GetConsecutive', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3FdReader_Ns3Empty_Ns3DefaultDeleter__lt__ns3FdReader__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >::SimpleRefCount(ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter< ns3::FdReader > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3RefCountBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3RefCountBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter< ns3::RefCountBase > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3SystemThread_Ns3Empty_Ns3DefaultDeleter__lt__ns3SystemThread__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >::SimpleRefCount(ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter< ns3::SystemThread > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimulatorImpl_methods(root_module, cls): ## simulator-impl.h (module 'core'): ns3::SimulatorImpl::SimulatorImpl() [constructor] cls.add_constructor([]) ## simulator-impl.h (module 'core'): ns3::SimulatorImpl::SimulatorImpl(ns3::SimulatorImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::SimulatorImpl const &', 'arg0')]) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Cancel(ns3::EventId const & id) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): uint32_t ns3::SimulatorImpl::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::GetMaximumSimulationTime() const [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): uint32_t ns3::SimulatorImpl::GetSystemId() const [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): static ns3::TypeId ns3::SimulatorImpl::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## simulator-impl.h (module 'core'): bool ns3::SimulatorImpl::IsExpired(ns3::EventId const & id) const [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): bool ns3::SimulatorImpl::IsFinished() const [member function] cls.add_method('IsFinished', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::Now() const [member function] cls.add_method('Now', 'ns3::Time', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Remove(ns3::EventId const & id) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Run() [member function] cls.add_method('Run', 'void', [], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function] cls.add_method('ScheduleDestroy', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleNow', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Stop() [member function] cls.add_method('Stop', 'void', [], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_pure_virtual=True, is_virtual=True) return def register_Ns3Synchronizer_methods(root_module, cls): ## synchronizer.h (module 'core'): ns3::Synchronizer::Synchronizer(ns3::Synchronizer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Synchronizer const &', 'arg0')]) ## synchronizer.h (module 'core'): ns3::Synchronizer::Synchronizer() [constructor] cls.add_constructor([]) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::EventEnd() [member function] cls.add_method('EventEnd', 'uint64_t', []) ## synchronizer.h (module 'core'): void ns3::Synchronizer::EventStart() [member function] cls.add_method('EventStart', 'void', []) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::GetCurrentRealtime() [member function] cls.add_method('GetCurrentRealtime', 'uint64_t', []) ## synchronizer.h (module 'core'): int64_t ns3::Synchronizer::GetDrift(uint64_t ts) [member function] cls.add_method('GetDrift', 'int64_t', [param('uint64_t', 'ts')]) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::GetOrigin() [member function] cls.add_method('GetOrigin', 'uint64_t', []) ## synchronizer.h (module 'core'): static ns3::TypeId ns3::Synchronizer::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::Realtime() [member function] cls.add_method('Realtime', 'bool', []) ## synchronizer.h (module 'core'): void ns3::Synchronizer::SetCondition(bool arg0) [member function] cls.add_method('SetCondition', 'void', [param('bool', 'arg0')]) ## synchronizer.h (module 'core'): void ns3::Synchronizer::SetOrigin(uint64_t ts) [member function] cls.add_method('SetOrigin', 'void', [param('uint64_t', 'ts')]) ## synchronizer.h (module 'core'): void ns3::Synchronizer::Signal() [member function] cls.add_method('Signal', 'void', []) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::Synchronize(uint64_t tsCurrent, uint64_t tsDelay) [member function] cls.add_method('Synchronize', 'bool', [param('uint64_t', 'tsCurrent'), param('uint64_t', 'tsDelay')]) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::DoEventEnd() [member function] cls.add_method('DoEventEnd', 'uint64_t', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoEventStart() [member function] cls.add_method('DoEventStart', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::DoGetCurrentRealtime() [member function] cls.add_method('DoGetCurrentRealtime', 'uint64_t', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): int64_t ns3::Synchronizer::DoGetDrift(uint64_t ns) [member function] cls.add_method('DoGetDrift', 'int64_t', [param('uint64_t', 'ns')], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::DoRealtime() [member function] cls.add_method('DoRealtime', 'bool', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSetCondition(bool arg0) [member function] cls.add_method('DoSetCondition', 'void', [param('bool', 'arg0')], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSetOrigin(uint64_t ns) [member function] cls.add_method('DoSetOrigin', 'void', [param('uint64_t', 'ns')], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSignal() [member function] cls.add_method('DoSignal', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::DoSynchronize(uint64_t nsCurrent, uint64_t nsDelay) [member function] cls.add_method('DoSynchronize', 'bool', [param('uint64_t', 'nsCurrent'), param('uint64_t', 'nsDelay')], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3SystemThread_methods(root_module, cls): ## system-thread.h (module 'core'): ns3::SystemThread::SystemThread(ns3::SystemThread const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemThread const &', 'arg0')]) ## system-thread.h (module 'core'): ns3::SystemThread::SystemThread(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [constructor] cls.add_constructor([param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')]) ## system-thread.h (module 'core'): static bool ns3::SystemThread::Equals(pthread_t id) [member function] cls.add_method('Equals', 'bool', [param('pthread_t', 'id')], is_static=True) ## system-thread.h (module 'core'): void ns3::SystemThread::Join() [member function] cls.add_method('Join', 'void', []) ## system-thread.h (module 'core'): static pthread_t ns3::SystemThread::Self() [member function] cls.add_method('Self', 'pthread_t', [], is_static=True) ## system-thread.h (module 'core'): void ns3::SystemThread::Start() [member function] cls.add_method('Start', 'void', []) return def register_Ns3Time_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## nstime.h (module 'core'): ns3::Time::Time() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor] cls.add_constructor([param('ns3::Time const &', 'o')]) ## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor] cls.add_constructor([param('std::string const &', 's')]) ## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function] cls.add_method('As', 'ns3::TimeWithUnit', [param('ns3::Time::Unit const', 'unit')], is_const=True) ## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function] cls.add_method('Compare', 'int', [param('ns3::Time const &', 'o')], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function] cls.add_method('FromDouble', 'ns3::Time', [param('double', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function] cls.add_method('FromInteger', 'ns3::Time', [param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function] cls.add_method('GetDays', 'double', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function] cls.add_method('GetFemtoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function] cls.add_method('GetHours', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function] cls.add_method('GetInteger', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function] cls.add_method('GetMicroSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function] cls.add_method('GetMilliSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function] cls.add_method('GetMinutes', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function] cls.add_method('GetNanoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function] cls.add_method('GetPicoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function] cls.add_method('GetResolution', 'ns3::Time::Unit', [], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function] cls.add_method('GetSeconds', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function] cls.add_method('GetTimeStep', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function] cls.add_method('GetYears', 'double', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function] cls.add_method('IsNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function] cls.add_method('IsPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function] cls.add_method('IsStrictlyNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function] cls.add_method('IsStrictlyPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function] cls.add_method('IsZero', 'bool', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function] cls.add_method('Max', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function] cls.add_method('Min', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function] cls.add_method('SetResolution', 'void', [param('ns3::Time::Unit', 'resolution')], is_static=True) ## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function] cls.add_method('StaticInit', 'bool', [], is_static=True) ## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function] cls.add_method('To', 'ns3::int64x64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function] cls.add_method('ToDouble', 'double', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function] cls.add_method('ToInteger', 'int64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) return def register_Ns3TraceSourceAccessor_methods(root_module, cls): ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')]) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor] cls.add_constructor([]) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Connect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('ConnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Disconnect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('DisconnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3TriangularRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'min'), param('double', 'max')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')]) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3UniformRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function] cls.add_method('GetValue', 'double', [param('double', 'min'), param('double', 'max')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'min'), param('uint32_t', 'max')]) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3WallClockSynchronizer_methods(root_module, cls): ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::WallClockSynchronizer(ns3::WallClockSynchronizer const & arg0) [copy constructor] cls.add_constructor([param('ns3::WallClockSynchronizer const &', 'arg0')]) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::WallClockSynchronizer() [constructor] cls.add_constructor([]) ## wall-clock-synchronizer.h (module 'core'): static ns3::TypeId ns3::WallClockSynchronizer::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::NS_PER_SEC [variable] cls.add_static_attribute('NS_PER_SEC', 'uint64_t const', is_const=True) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::US_PER_NS [variable] cls.add_static_attribute('US_PER_NS', 'uint64_t const', is_const=True) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::US_PER_SEC [variable] cls.add_static_attribute('US_PER_SEC', 'uint64_t const', is_const=True) ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DoEventEnd() [member function] cls.add_method('DoEventEnd', 'uint64_t', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoEventStart() [member function] cls.add_method('DoEventStart', 'void', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DoGetCurrentRealtime() [member function] cls.add_method('DoGetCurrentRealtime', 'uint64_t', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): int64_t ns3::WallClockSynchronizer::DoGetDrift(uint64_t ns) [member function] cls.add_method('DoGetDrift', 'int64_t', [param('uint64_t', 'ns')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::DoRealtime() [member function] cls.add_method('DoRealtime', 'bool', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSetCondition(bool cond) [member function] cls.add_method('DoSetCondition', 'void', [param('bool', 'cond')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSetOrigin(uint64_t ns) [member function] cls.add_method('DoSetOrigin', 'void', [param('uint64_t', 'ns')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSignal() [member function] cls.add_method('DoSignal', 'void', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::DoSynchronize(uint64_t nsCurrent, uint64_t nsDelay) [member function] cls.add_method('DoSynchronize', 'bool', [param('uint64_t', 'nsCurrent'), param('uint64_t', 'nsDelay')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DriftCorrect(uint64_t nsNow, uint64_t nsDelay) [member function] cls.add_method('DriftCorrect', 'uint64_t', [param('uint64_t', 'nsNow'), param('uint64_t', 'nsDelay')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::GetNormalizedRealtime() [member function] cls.add_method('GetNormalizedRealtime', 'uint64_t', [], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::GetRealtime() [member function] cls.add_method('GetRealtime', 'uint64_t', [], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::NsToTimeval(int64_t ns, timeval * tv) [member function] cls.add_method('NsToTimeval', 'void', [param('int64_t', 'ns'), param('timeval *', 'tv')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::SleepWait(uint64_t ns) [member function] cls.add_method('SleepWait', 'bool', [param('uint64_t', 'ns')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::SpinWait(uint64_t ns) [member function] cls.add_method('SpinWait', 'bool', [param('uint64_t', 'ns')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::TimevalAdd(timeval * tv1, timeval * tv2, timeval * result) [member function] cls.add_method('TimevalAdd', 'void', [param('timeval *', 'tv1'), param('timeval *', 'tv2'), param('timeval *', 'result')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::TimevalToNs(timeval * tv) [member function] cls.add_method('TimevalToNs', 'uint64_t', [param('timeval *', 'tv')], visibility='protected') return def register_Ns3WeibullRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function] cls.add_method('GetScale', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function] cls.add_method('GetShape', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'scale'), param('double', 'shape'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ZetaRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'alpha')]) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ZipfRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function] cls.add_method('GetValue', 'double', [param('uint32_t', 'n'), param('double', 'alpha')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'n'), param('uint32_t', 'alpha')]) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3AttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function] cls.add_method('CreateValidValue', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::AttributeValue const &', 'value')], is_const=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3BooleanChecker_methods(root_module, cls): ## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor] cls.add_constructor([]) ## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')]) return def register_Ns3BooleanValue_methods(root_module, cls): cls.add_output_stream_operator() ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')]) ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor] cls.add_constructor([]) ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor] cls.add_constructor([param('bool', 'value')]) ## boolean.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::BooleanValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function] cls.add_method('Get', 'bool', [], is_const=True) ## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function] cls.add_method('Set', 'void', [param('bool', 'value')]) return def register_Ns3CalendarScheduler_methods(root_module, cls): ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler::CalendarScheduler(ns3::CalendarScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::CalendarScheduler const &', 'arg0')]) ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler::CalendarScheduler() [constructor] cls.add_constructor([]) ## calendar-scheduler.h (module 'core'): static ns3::TypeId ns3::CalendarScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## calendar-scheduler.h (module 'core'): void ns3::CalendarScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## calendar-scheduler.h (module 'core'): bool ns3::CalendarScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## calendar-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::CalendarScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## calendar-scheduler.h (module 'core'): void ns3::CalendarScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## calendar-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::CalendarScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3CallbackChecker_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')]) return def register_Ns3CallbackImplBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) ## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function] cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')], is_pure_virtual=True, is_const=True, is_virtual=True) ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function] cls.add_method('Demangle', 'std::string', [param('std::string const &', 'mangled')], is_static=True, visibility='protected') return def register_Ns3CallbackValue_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'base')]) ## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function] cls.add_method('Set', 'void', [param('ns3::CallbackBase', 'base')]) return def register_Ns3ConstantRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function] cls.add_method('GetConstant', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function] cls.add_method('GetValue', 'double', [param('double', 'constant')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'constant')]) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3DefaultSimulatorImpl_methods(root_module, cls): ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl::DefaultSimulatorImpl(ns3::DefaultSimulatorImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::DefaultSimulatorImpl const &', 'arg0')]) ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl::DefaultSimulatorImpl() [constructor] cls.add_constructor([]) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Cancel(ns3::EventId const & id) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'id')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_virtual=True) ## default-simulator-impl.h (module 'core'): uint32_t ns3::DefaultSimulatorImpl::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::GetMaximumSimulationTime() const [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): uint32_t ns3::DefaultSimulatorImpl::GetSystemId() const [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): static ns3::TypeId ns3::DefaultSimulatorImpl::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## default-simulator-impl.h (module 'core'): bool ns3::DefaultSimulatorImpl::IsExpired(ns3::EventId const & id) const [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'id')], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): bool ns3::DefaultSimulatorImpl::IsFinished() const [member function] cls.add_method('IsFinished', 'bool', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::Now() const [member function] cls.add_method('Now', 'ns3::Time', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Remove(ns3::EventId const & id) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'id')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Run() [member function] cls.add_method('Run', 'void', [], is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function] cls.add_method('ScheduleDestroy', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleNow', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Stop() [member function] cls.add_method('Stop', 'void', [], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='private', is_virtual=True) return def register_Ns3DeterministicRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function] cls.add_method('SetValueArray', 'void', [param('double *', 'values'), param('uint64_t', 'length')]) ## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3DoubleValue_methods(root_module, cls): ## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor] cls.add_constructor([]) ## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')]) ## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor] cls.add_constructor([param('double const &', 'value')]) ## double.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::DoubleValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function] cls.add_method('Get', 'double', [], is_const=True) ## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function] cls.add_method('Set', 'void', [param('double const &', 'value')]) return def register_Ns3EmpiricalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function] cls.add_method('CDF', 'void', [param('double', 'v'), param('double', 'c')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double c1, double c2, double v1, double v2, double r) [member function] cls.add_method('Interpolate', 'double', [param('double', 'c1'), param('double', 'c2'), param('double', 'v1'), param('double', 'v2'), param('double', 'r')], visibility='private', is_virtual=True) ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function] cls.add_method('Validate', 'void', [], visibility='private', is_virtual=True) return def register_Ns3EmptyAttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) return def register_Ns3EmptyAttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_const=True, is_virtual=True) return def register_Ns3EmptyAttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, visibility='private', is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], visibility='private', is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3EnumChecker_methods(root_module, cls): ## enum.h (module 'core'): ns3::EnumChecker::EnumChecker(ns3::EnumChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::EnumChecker const &', 'arg0')]) ## enum.h (module 'core'): ns3::EnumChecker::EnumChecker() [constructor] cls.add_constructor([]) ## enum.h (module 'core'): void ns3::EnumChecker::Add(int value, std::string name) [member function] cls.add_method('Add', 'void', [param('int', 'value'), param('std::string', 'name')]) ## enum.h (module 'core'): void ns3::EnumChecker::AddDefault(int value, std::string name) [member function] cls.add_method('AddDefault', 'void', [param('int', 'value'), param('std::string', 'name')]) ## enum.h (module 'core'): bool ns3::EnumChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## enum.h (module 'core'): bool ns3::EnumChecker::Copy(ns3::AttributeValue const & src, ns3::AttributeValue & dst) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'src'), param('ns3::AttributeValue &', 'dst')], is_const=True, is_virtual=True) ## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): std::string ns3::EnumChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): std::string ns3::EnumChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): bool ns3::EnumChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_const=True, is_virtual=True) return def register_Ns3EnumValue_methods(root_module, cls): ## enum.h (module 'core'): ns3::EnumValue::EnumValue(ns3::EnumValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::EnumValue const &', 'arg0')]) ## enum.h (module 'core'): ns3::EnumValue::EnumValue() [constructor] cls.add_constructor([]) ## enum.h (module 'core'): ns3::EnumValue::EnumValue(int value) [constructor] cls.add_constructor([param('int', 'value')]) ## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): bool ns3::EnumValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## enum.h (module 'core'): int ns3::EnumValue::Get() const [member function] cls.add_method('Get', 'int', [], is_const=True) ## enum.h (module 'core'): std::string ns3::EnumValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## enum.h (module 'core'): void ns3::EnumValue::Set(int value) [member function] cls.add_method('Set', 'void', [param('int', 'value')]) return def register_Ns3ErlangRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function] cls.add_method('GetK', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function] cls.add_method('GetLambda', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function] cls.add_method('GetValue', 'double', [param('uint32_t', 'k'), param('double', 'lambda')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'k'), param('uint32_t', 'lambda')]) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3EventImpl_methods(root_module, cls): ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventImpl const &', 'arg0')]) ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor] cls.add_constructor([]) ## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function] cls.add_method('Invoke', 'void', []) ## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function] cls.add_method('IsCancelled', 'bool', []) ## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function] cls.add_method('Notify', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3ExponentialRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3FdReader_methods(root_module, cls): ## unix-fd-reader.h (module 'core'): ns3::FdReader::FdReader(ns3::FdReader const & arg0) [copy constructor] cls.add_constructor([param('ns3::FdReader const &', 'arg0')]) ## unix-fd-reader.h (module 'core'): ns3::FdReader::FdReader() [constructor] cls.add_constructor([]) ## unix-fd-reader.h (module 'core'): void ns3::FdReader::Start(int fd, ns3::Callback<void, unsigned char*, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> readCallback) [member function] cls.add_method('Start', 'void', [param('int', 'fd'), param('ns3::Callback< void, unsigned char *, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'readCallback')]) ## unix-fd-reader.h (module 'core'): void ns3::FdReader::Stop() [member function] cls.add_method('Stop', 'void', []) ## unix-fd-reader.h (module 'core'): ns3::FdReader::Data ns3::FdReader::DoRead() [member function] cls.add_method('DoRead', 'ns3::FdReader::Data', [], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3GammaRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function] cls.add_method('GetBeta', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha'), param('double', 'beta')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'alpha'), param('uint32_t', 'beta')]) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3HeapScheduler_methods(root_module, cls): ## heap-scheduler.h (module 'core'): ns3::HeapScheduler::HeapScheduler(ns3::HeapScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::HeapScheduler const &', 'arg0')]) ## heap-scheduler.h (module 'core'): ns3::HeapScheduler::HeapScheduler() [constructor] cls.add_constructor([]) ## heap-scheduler.h (module 'core'): static ns3::TypeId ns3::HeapScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## heap-scheduler.h (module 'core'): void ns3::HeapScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## heap-scheduler.h (module 'core'): bool ns3::HeapScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## heap-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::HeapScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## heap-scheduler.h (module 'core'): void ns3::HeapScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## heap-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::HeapScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3IntegerValue_methods(root_module, cls): ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue() [constructor] cls.add_constructor([]) ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(ns3::IntegerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntegerValue const &', 'arg0')]) ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(int64_t const & value) [constructor] cls.add_constructor([param('int64_t const &', 'value')]) ## integer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::IntegerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## integer.h (module 'core'): bool ns3::IntegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## integer.h (module 'core'): int64_t ns3::IntegerValue::Get() const [member function] cls.add_method('Get', 'int64_t', [], is_const=True) ## integer.h (module 'core'): std::string ns3::IntegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## integer.h (module 'core'): void ns3::IntegerValue::Set(int64_t const & value) [member function] cls.add_method('Set', 'void', [param('int64_t const &', 'value')]) return def register_Ns3ListScheduler_methods(root_module, cls): ## list-scheduler.h (module 'core'): ns3::ListScheduler::ListScheduler(ns3::ListScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::ListScheduler const &', 'arg0')]) ## list-scheduler.h (module 'core'): ns3::ListScheduler::ListScheduler() [constructor] cls.add_constructor([]) ## list-scheduler.h (module 'core'): static ns3::TypeId ns3::ListScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## list-scheduler.h (module 'core'): void ns3::ListScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## list-scheduler.h (module 'core'): bool ns3::ListScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## list-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::ListScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## list-scheduler.h (module 'core'): void ns3::ListScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## list-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::ListScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3LogNormalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function] cls.add_method('GetMu', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function] cls.add_method('GetSigma', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function] cls.add_method('GetValue', 'double', [param('double', 'mu'), param('double', 'sigma')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mu'), param('uint32_t', 'sigma')]) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3MapScheduler_methods(root_module, cls): ## map-scheduler.h (module 'core'): ns3::MapScheduler::MapScheduler(ns3::MapScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::MapScheduler const &', 'arg0')]) ## map-scheduler.h (module 'core'): ns3::MapScheduler::MapScheduler() [constructor] cls.add_constructor([]) ## map-scheduler.h (module 'core'): static ns3::TypeId ns3::MapScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## map-scheduler.h (module 'core'): void ns3::MapScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## map-scheduler.h (module 'core'): bool ns3::MapScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## map-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::MapScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## map-scheduler.h (module 'core'): void ns3::MapScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## map-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::MapScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3NormalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable] cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True) ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function] cls.add_method('GetVariance', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ObjectFactoryChecker_methods(root_module, cls): ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')]) return def register_Ns3ObjectFactoryValue_methods(root_module, cls): ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')]) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor] cls.add_constructor([param('ns3::ObjectFactory const &', 'value')]) ## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function] cls.add_method('Get', 'ns3::ObjectFactory', [], is_const=True) ## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function] cls.add_method('Set', 'void', [param('ns3::ObjectFactory const &', 'value')]) return def register_Ns3ObjectPtrContainerAccessor_methods(root_module, cls): ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor::ObjectPtrContainerAccessor() [constructor] cls.add_constructor([]) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor::ObjectPtrContainerAccessor(ns3::ObjectPtrContainerAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectPtrContainerAccessor const &', 'arg0')]) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & value) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'value')], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectPtrContainerAccessor::DoGet(ns3::ObjectBase const * object, uint32_t i, uint32_t * index) const [member function] cls.add_method('DoGet', 'ns3::Ptr< ns3::Object >', [param('ns3::ObjectBase const *', 'object'), param('uint32_t', 'i'), param('uint32_t *', 'index')], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::DoGetN(ns3::ObjectBase const * object, uint32_t * n) const [member function] cls.add_method('DoGetN', 'bool', [param('ns3::ObjectBase const *', 'object'), param('uint32_t *', 'n')], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) return def register_Ns3ObjectPtrContainerChecker_methods(root_module, cls): ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker::ObjectPtrContainerChecker() [constructor] cls.add_constructor([]) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker::ObjectPtrContainerChecker(ns3::ObjectPtrContainerChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectPtrContainerChecker const &', 'arg0')]) ## object-ptr-container.h (module 'core'): ns3::TypeId ns3::ObjectPtrContainerChecker::GetItemTypeId() const [member function] cls.add_method('GetItemTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3ObjectPtrContainerValue_methods(root_module, cls): ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue::ObjectPtrContainerValue(ns3::ObjectPtrContainerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectPtrContainerValue const &', 'arg0')]) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue::ObjectPtrContainerValue() [constructor] cls.add_constructor([]) ## object-ptr-container.h (module 'core'): std::_Rb_tree_const_iterator<std::pair<const unsigned int, ns3::Ptr<ns3::Object> > > ns3::ObjectPtrContainerValue::Begin() const [member function] cls.add_method('Begin', 'std::_Rb_tree_const_iterator< std::pair< unsigned int const, ns3::Ptr< ns3::Object > > >', [], is_const=True) ## object-ptr-container.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectPtrContainerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## object-ptr-container.h (module 'core'): std::_Rb_tree_const_iterator<std::pair<const unsigned int, ns3::Ptr<ns3::Object> > > ns3::ObjectPtrContainerValue::End() const [member function] cls.add_method('End', 'std::_Rb_tree_const_iterator< std::pair< unsigned int const, ns3::Ptr< ns3::Object > > >', [], is_const=True) ## object-ptr-container.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectPtrContainerValue::Get(uint32_t i) const [member function] cls.add_method('Get', 'ns3::Ptr< ns3::Object >', [param('uint32_t', 'i')], is_const=True) ## object-ptr-container.h (module 'core'): uint32_t ns3::ObjectPtrContainerValue::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## object-ptr-container.h (module 'core'): std::string ns3::ObjectPtrContainerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) return def register_Ns3ParetoRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function] cls.add_method('GetShape', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'shape'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3PointerChecker_methods(root_module, cls): ## pointer.h (module 'core'): ns3::PointerChecker::PointerChecker() [constructor] cls.add_constructor([]) ## pointer.h (module 'core'): ns3::PointerChecker::PointerChecker(ns3::PointerChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::PointerChecker const &', 'arg0')]) ## pointer.h (module 'core'): ns3::TypeId ns3::PointerChecker::GetPointeeTypeId() const [member function] cls.add_method('GetPointeeTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3PointerValue_methods(root_module, cls): ## pointer.h (module 'core'): ns3::PointerValue::PointerValue(ns3::PointerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::PointerValue const &', 'arg0')]) ## pointer.h (module 'core'): ns3::PointerValue::PointerValue() [constructor] cls.add_constructor([]) ## pointer.h (module 'core'): ns3::PointerValue::PointerValue(ns3::Ptr<ns3::Object> object) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::Object >', 'object')]) ## pointer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::PointerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## pointer.h (module 'core'): bool ns3::PointerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## pointer.h (module 'core'): ns3::Ptr<ns3::Object> ns3::PointerValue::GetObject() const [member function] cls.add_method('GetObject', 'ns3::Ptr< ns3::Object >', [], is_const=True) ## pointer.h (module 'core'): std::string ns3::PointerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## pointer.h (module 'core'): void ns3::PointerValue::SetObject(ns3::Ptr<ns3::Object> object) [member function] cls.add_method('SetObject', 'void', [param('ns3::Ptr< ns3::Object >', 'object')]) return def register_Ns3RealtimeSimulatorImpl_methods(root_module, cls): ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::RealtimeSimulatorImpl(ns3::RealtimeSimulatorImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::RealtimeSimulatorImpl const &', 'arg0')]) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::RealtimeSimulatorImpl() [constructor] cls.add_constructor([]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Cancel(ns3::EventId const & ev) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'ev')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): uint32_t ns3::RealtimeSimulatorImpl::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetHardLimit() const [member function] cls.add_method('GetHardLimit', 'ns3::Time', [], is_const=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetMaximumSimulationTime() const [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::SynchronizationMode ns3::RealtimeSimulatorImpl::GetSynchronizationMode() const [member function] cls.add_method('GetSynchronizationMode', 'ns3::RealtimeSimulatorImpl::SynchronizationMode', [], is_const=True) ## realtime-simulator-impl.h (module 'core'): uint32_t ns3::RealtimeSimulatorImpl::GetSystemId() const [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): static ns3::TypeId ns3::RealtimeSimulatorImpl::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## realtime-simulator-impl.h (module 'core'): bool ns3::RealtimeSimulatorImpl::IsExpired(ns3::EventId const & ev) const [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'ev')], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): bool ns3::RealtimeSimulatorImpl::IsFinished() const [member function] cls.add_method('IsFinished', 'bool', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::Now() const [member function] cls.add_method('Now', 'ns3::Time', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::RealtimeNow() const [member function] cls.add_method('RealtimeNow', 'ns3::Time', [], is_const=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Remove(ns3::EventId const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'ev')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Run() [member function] cls.add_method('Run', 'void', [], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function] cls.add_method('ScheduleDestroy', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleNow', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtime(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtime', 'void', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtimeNow', 'void', [param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeNowWithContext(uint32_t context, ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtimeNowWithContext', 'void', [param('uint32_t', 'context'), param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtimeWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetHardLimit(ns3::Time limit) [member function] cls.add_method('SetHardLimit', 'void', [param('ns3::Time', 'limit')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetSynchronizationMode(ns3::RealtimeSimulatorImpl::SynchronizationMode mode) [member function] cls.add_method('SetSynchronizationMode', 'void', [param('ns3::RealtimeSimulatorImpl::SynchronizationMode', 'mode')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Stop() [member function] cls.add_method('Stop', 'void', [], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='private', is_virtual=True) return def register_Ns3RefCountBase_methods(root_module, cls): ## ref-count-base.h (module 'core'): ns3::RefCountBase::RefCountBase() [constructor] cls.add_constructor([]) ## ref-count-base.h (module 'core'): ns3::RefCountBase::RefCountBase(ns3::RefCountBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::RefCountBase const &', 'arg0')]) return def register_Ns3StringChecker_methods(root_module, cls): ## string.h (module 'core'): ns3::StringChecker::StringChecker() [constructor] cls.add_constructor([]) ## string.h (module 'core'): ns3::StringChecker::StringChecker(ns3::StringChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::StringChecker const &', 'arg0')]) return def register_Ns3StringValue_methods(root_module, cls): ## string.h (module 'core'): ns3::StringValue::StringValue() [constructor] cls.add_constructor([]) ## string.h (module 'core'): ns3::StringValue::StringValue(ns3::StringValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::StringValue const &', 'arg0')]) ## string.h (module 'core'): ns3::StringValue::StringValue(std::string const & value) [constructor] cls.add_constructor([param('std::string const &', 'value')]) ## string.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::StringValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## string.h (module 'core'): bool ns3::StringValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## string.h (module 'core'): std::string ns3::StringValue::Get() const [member function] cls.add_method('Get', 'std::string', [], is_const=True) ## string.h (module 'core'): std::string ns3::StringValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## string.h (module 'core'): void ns3::StringValue::Set(std::string const & value) [member function] cls.add_method('Set', 'void', [param('std::string const &', 'value')]) return def register_Ns3TimeValue_methods(root_module, cls): ## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeValue const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor] cls.add_constructor([param('ns3::Time const &', 'value')]) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function] cls.add_method('Get', 'ns3::Time', [], is_const=True) ## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Time const &', 'value')]) return def register_Ns3TypeIdChecker_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')]) return def register_Ns3TypeIdValue_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor] cls.add_constructor([param('ns3::TypeId const &', 'value')]) ## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function] cls.add_method('Get', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function] cls.add_method('Set', 'void', [param('ns3::TypeId const &', 'value')]) return def register_Ns3UintegerValue_methods(root_module, cls): ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor] cls.add_constructor([]) ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')]) ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor] cls.add_constructor([param('uint64_t const &', 'value')]) ## uinteger.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::UintegerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function] cls.add_method('Get', 'uint64_t', [], is_const=True) ## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function] cls.add_method('Set', 'void', [param('uint64_t const &', 'value')]) return def register_Ns3Vector2DChecker_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')]) return def register_Ns3Vector2DValue_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor] cls.add_constructor([param('ns3::Vector2D const &', 'value')]) ## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function] cls.add_method('Get', 'ns3::Vector2D', [], is_const=True) ## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Vector2D const &', 'value')]) return def register_Ns3Vector3DChecker_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')]) return def register_Ns3Vector3DValue_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor] cls.add_constructor([param('ns3::Vector3D const &', 'value')]) ## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function] cls.add_method('Get', 'ns3::Vector3D', [], is_const=True) ## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Vector3D const &', 'value')]) return def register_Ns3ConfigMatchContainer_methods(root_module, cls): ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer(ns3::Config::MatchContainer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Config::MatchContainer const &', 'arg0')]) ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer() [constructor] cls.add_constructor([]) ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer(std::vector<ns3::Ptr<ns3::Object>, std::allocator<ns3::Ptr<ns3::Object> > > const & objects, std::vector<std::string, std::allocator<std::string> > const & contexts, std::string path) [constructor] cls.add_constructor([param('std::vector< ns3::Ptr< ns3::Object > > const &', 'objects'), param('std::vector< std::string > const &', 'contexts'), param('std::string', 'path')]) ## config.h (module 'core'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Object>*,std::vector<ns3::Ptr<ns3::Object>, std::allocator<ns3::Ptr<ns3::Object> > > > ns3::Config::MatchContainer::Begin() const [member function] cls.add_method('Begin', '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Object > const, std::vector< ns3::Ptr< ns3::Object > > >', [], is_const=True) ## config.h (module 'core'): void ns3::Config::MatchContainer::Connect(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('Connect', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): void ns3::Config::MatchContainer::ConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('ConnectWithoutContext', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): void ns3::Config::MatchContainer::Disconnect(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('Disconnect', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): void ns3::Config::MatchContainer::DisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('DisconnectWithoutContext', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Object>*,std::vector<ns3::Ptr<ns3::Object>, std::allocator<ns3::Ptr<ns3::Object> > > > ns3::Config::MatchContainer::End() const [member function] cls.add_method('End', '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Object > const, std::vector< ns3::Ptr< ns3::Object > > >', [], is_const=True) ## config.h (module 'core'): ns3::Ptr<ns3::Object> ns3::Config::MatchContainer::Get(uint32_t i) const [member function] cls.add_method('Get', 'ns3::Ptr< ns3::Object >', [param('uint32_t', 'i')], is_const=True) ## config.h (module 'core'): std::string ns3::Config::MatchContainer::GetMatchedPath(uint32_t i) const [member function] cls.add_method('GetMatchedPath', 'std::string', [param('uint32_t', 'i')], is_const=True) ## config.h (module 'core'): uint32_t ns3::Config::MatchContainer::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## config.h (module 'core'): std::string ns3::Config::MatchContainer::GetPath() const [member function] cls.add_method('GetPath', 'std::string', [], is_const=True) ## config.h (module 'core'): void ns3::Config::MatchContainer::Set(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Set', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) return def register_Ns3HashImplementation_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor] cls.add_constructor([]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_pure_virtual=True, is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function] cls.add_method('clear', 'void', [], is_pure_virtual=True, is_virtual=True) return def register_Ns3HashFunctionFnv1a_methods(root_module, cls): ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')]) ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor] cls.add_constructor([]) ## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash32_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash64_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionMurmur3_methods(root_module, cls): ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')]) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor] cls.add_constructor([]) ## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_functions(root_module): module = root_module ## nstime.h (module 'core'): ns3::Time ns3::Abs(ns3::Time const & time) [free function] module.add_function('Abs', 'ns3::Time', [param('ns3::Time const &', 'time')]) ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Abs(ns3::int64x64_t const & value) [free function] module.add_function('Abs', 'ns3::int64x64_t', [param('ns3::int64x64_t const &', 'value')]) ## breakpoint.h (module 'core'): extern void ns3::BreakpointFallback() [free function] module.add_function('BreakpointFallback', 'void', []) ## vector.h (module 'core'): extern double ns3::CalculateDistance(ns3::Vector2D const & a, ns3::Vector2D const & b) [free function] module.add_function('CalculateDistance', 'double', [param('ns3::Vector2D const &', 'a'), param('ns3::Vector2D const &', 'b')]) ## vector.h (module 'core'): extern double ns3::CalculateDistance(ns3::Vector3D const & a, ns3::Vector3D const & b) [free function] module.add_function('CalculateDistance', 'double', [param('ns3::Vector3D const &', 'a'), param('ns3::Vector3D const &', 'b')]) ## ptr.h (module 'core'): extern ns3::Ptr<ns3::ObjectPtrContainerValue> ns3::Create() [free function] module.add_function('Create', 'ns3::Ptr< ns3::ObjectPtrContainerValue >', [], template_parameters=['ns3::ObjectPtrContainerValue']) ## ptr.h (module 'core'): extern ns3::Ptr<ns3::PointerValue> ns3::Create() [free function] module.add_function('Create', 'ns3::Ptr< ns3::PointerValue >', [], template_parameters=['ns3::PointerValue']) ## nstime.h (module 'core'): ns3::Time ns3::Days(ns3::int64x64_t value) [free function] module.add_function('Days', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Days(double value) [free function] module.add_function('Days', 'ns3::Time', [param('double', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::FemtoSeconds(ns3::int64x64_t value) [free function] module.add_function('FemtoSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::FemtoSeconds(uint64_t value) [free function] module.add_function('FemtoSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## hash.h (module 'core'): uint32_t ns3::Hash32(std::string const s) [free function] module.add_function('Hash32', 'uint32_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint32_t ns3::Hash32(char const * buffer, size_t const size) [free function] module.add_function('Hash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint64_t ns3::Hash64(std::string const s) [free function] module.add_function('Hash64', 'uint64_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint64_t ns3::Hash64(char const * buffer, size_t const size) [free function] module.add_function('Hash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## nstime.h (module 'core'): ns3::Time ns3::Hours(ns3::int64x64_t value) [free function] module.add_function('Hours', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Hours(double value) [free function] module.add_function('Hours', 'ns3::Time', [param('double', 'value')]) ## log.h (module 'core'): extern void ns3::LogComponentDisable(char const * name, ns3::LogLevel level) [free function] module.add_function('LogComponentDisable', 'void', [param('char const *', 'name'), param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentDisableAll(ns3::LogLevel level) [free function] module.add_function('LogComponentDisableAll', 'void', [param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentEnable(char const * name, ns3::LogLevel level) [free function] module.add_function('LogComponentEnable', 'void', [param('char const *', 'name'), param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentEnableAll(ns3::LogLevel level) [free function] module.add_function('LogComponentEnableAll', 'void', [param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentPrintList() [free function] module.add_function('LogComponentPrintList', 'void', []) ## log.h (module 'core'): extern ns3::LogNodePrinter ns3::LogGetNodePrinter() [free function] module.add_function('LogGetNodePrinter', 'ns3::LogNodePrinter', []) ## log.h (module 'core'): extern ns3::LogTimePrinter ns3::LogGetTimePrinter() [free function] module.add_function('LogGetTimePrinter', 'ns3::LogTimePrinter', []) ## log.h (module 'core'): extern void ns3::LogSetNodePrinter(ns3::LogNodePrinter np) [free function] module.add_function('LogSetNodePrinter', 'void', [param('ns3::LogNodePrinter', 'np')]) ## log.h (module 'core'): extern void ns3::LogSetTimePrinter(ns3::LogTimePrinter lp) [free function] module.add_function('LogSetTimePrinter', 'void', [param('ns3::LogTimePrinter', 'lp')]) ## boolean.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeBooleanChecker() [free function] module.add_function('MakeBooleanChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## callback.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeCallbackChecker() [free function] module.add_function('MakeCallbackChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeAccessor const> ns3::MakeEmptyAttributeAccessor() [free function] module.add_function('MakeEmptyAttributeAccessor', 'ns3::Ptr< ns3::AttributeAccessor const >', []) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeChecker> ns3::MakeEmptyAttributeChecker() [free function] module.add_function('MakeEmptyAttributeChecker', 'ns3::Ptr< ns3::AttributeChecker >', []) ## trace-source-accessor.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::MakeEmptyTraceSourceAccessor() [free function] module.add_function('MakeEmptyTraceSourceAccessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', []) ## enum.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeEnumChecker(int v1, std::string n1, int v2=0, std::string n2="", int v3=0, std::string n3="", int v4=0, std::string n4="", int v5=0, std::string n5="", int v6=0, std::string n6="", int v7=0, std::string n7="", int v8=0, std::string n8="", int v9=0, std::string n9="", int v10=0, std::string n10="", int v11=0, std::string n11="", int v12=0, std::string n12="", int v13=0, std::string n13="", int v14=0, std::string n14="", int v15=0, std::string n15="", int v16=0, std::string n16="", int v17=0, std::string n17="", int v18=0, std::string n18="", int v19=0, std::string n19="", int v20=0, std::string n20="", int v21=0, std::string n21="", int v22=0, std::string n22="") [free function] module.add_function('MakeEnumChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('int', 'v1'), param('std::string', 'n1'), param('int', 'v2', default_value='0'), param('std::string', 'n2', default_value='""'), param('int', 'v3', default_value='0'), param('std::string', 'n3', default_value='""'), param('int', 'v4', default_value='0'), param('std::string', 'n4', default_value='""'), param('int', 'v5', default_value='0'), param('std::string', 'n5', default_value='""'), param('int', 'v6', default_value='0'), param('std::string', 'n6', default_value='""'), param('int', 'v7', default_value='0'), param('std::string', 'n7', default_value='""'), param('int', 'v8', default_value='0'), param('std::string', 'n8', default_value='""'), param('int', 'v9', default_value='0'), param('std::string', 'n9', default_value='""'), param('int', 'v10', default_value='0'), param('std::string', 'n10', default_value='""'), param('int', 'v11', default_value='0'), param('std::string', 'n11', default_value='""'), param('int', 'v12', default_value='0'), param('std::string', 'n12', default_value='""'), param('int', 'v13', default_value='0'), param('std::string', 'n13', default_value='""'), param('int', 'v14', default_value='0'), param('std::string', 'n14', default_value='""'), param('int', 'v15', default_value='0'), param('std::string', 'n15', default_value='""'), param('int', 'v16', default_value='0'), param('std::string', 'n16', default_value='""'), param('int', 'v17', default_value='0'), param('std::string', 'n17', default_value='""'), param('int', 'v18', default_value='0'), param('std::string', 'n18', default_value='""'), param('int', 'v19', default_value='0'), param('std::string', 'n19', default_value='""'), param('int', 'v20', default_value='0'), param('std::string', 'n20', default_value='""'), param('int', 'v21', default_value='0'), param('std::string', 'n21', default_value='""'), param('int', 'v22', default_value='0'), param('std::string', 'n22', default_value='""')]) ## make-event.h (module 'core'): extern ns3::EventImpl * ns3::MakeEvent(void (*)( ) * f) [free function] module.add_function('MakeEvent', 'ns3::EventImpl *', [param('void ( * ) ( ) *', 'f')]) ## object-factory.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeObjectFactoryChecker() [free function] module.add_function('MakeObjectFactoryChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## string.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeStringChecker() [free function] module.add_function('MakeStringChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTimeChecker() [free function] module.add_function('MakeTimeChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTimeChecker(ns3::Time const min) [free function] module.add_function('MakeTimeChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('ns3::Time const', 'min')]) ## nstime.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTimeChecker(ns3::Time const min, ns3::Time const max) [free function] module.add_function('MakeTimeChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('ns3::Time const', 'min'), param('ns3::Time const', 'max')]) ## type-id.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTypeIdChecker() [free function] module.add_function('MakeTypeIdChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## vector.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeVector2DChecker() [free function] module.add_function('MakeVector2DChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## vector.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeVector3DChecker() [free function] module.add_function('MakeVector3DChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## vector.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeVectorChecker() [free function] module.add_function('MakeVectorChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## nstime.h (module 'core'): ns3::Time ns3::Max(ns3::Time const & ta, ns3::Time const & tb) [free function] module.add_function('Max', 'ns3::Time', [param('ns3::Time const &', 'ta'), param('ns3::Time const &', 'tb')]) ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Max(ns3::int64x64_t const & a, ns3::int64x64_t const & b) [free function] module.add_function('Max', 'ns3::int64x64_t', [param('ns3::int64x64_t const &', 'a'), param('ns3::int64x64_t const &', 'b')]) ## nstime.h (module 'core'): ns3::Time ns3::MicroSeconds(ns3::int64x64_t value) [free function] module.add_function('MicroSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::MicroSeconds(uint64_t value) [free function] module.add_function('MicroSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::MilliSeconds(ns3::int64x64_t value) [free function] module.add_function('MilliSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::MilliSeconds(uint64_t value) [free function] module.add_function('MilliSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Min(ns3::Time const & ta, ns3::Time const & tb) [free function] module.add_function('Min', 'ns3::Time', [param('ns3::Time const &', 'ta'), param('ns3::Time const &', 'tb')]) ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Min(ns3::int64x64_t const & a, ns3::int64x64_t const & b) [free function] module.add_function('Min', 'ns3::int64x64_t', [param('ns3::int64x64_t const &', 'a'), param('ns3::int64x64_t const &', 'b')]) ## nstime.h (module 'core'): ns3::Time ns3::Minutes(ns3::int64x64_t value) [free function] module.add_function('Minutes', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Minutes(double value) [free function] module.add_function('Minutes', 'ns3::Time', [param('double', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::NanoSeconds(ns3::int64x64_t value) [free function] module.add_function('NanoSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::NanoSeconds(uint64_t value) [free function] module.add_function('NanoSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## simulator.h (module 'core'): extern ns3::Time ns3::Now() [free function] module.add_function('Now', 'ns3::Time', []) ## nstime.h (module 'core'): ns3::Time ns3::PicoSeconds(ns3::int64x64_t value) [free function] module.add_function('PicoSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::PicoSeconds(uint64_t value) [free function] module.add_function('PicoSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Seconds(ns3::int64x64_t value) [free function] module.add_function('Seconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Seconds(double value) [free function] module.add_function('Seconds', 'ns3::Time', [param('double', 'value')]) ## test.h (module 'core'): extern bool ns3::TestDoubleIsEqual(double const a, double const b, double const epsilon=std::numeric_limits<double>::epsilon()) [free function] module.add_function('TestDoubleIsEqual', 'bool', [param('double const', 'a'), param('double const', 'b'), param('double const', 'epsilon', default_value='std::numeric_limits<double>::epsilon()')]) ## nstime.h (module 'core'): ns3::Time ns3::TimeStep(uint64_t ts) [free function] module.add_function('TimeStep', 'ns3::Time', [param('uint64_t', 'ts')]) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['double']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['float']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned long long']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned int']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned short']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned char']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['long']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['int']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['short']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['signed char']) ## nstime.h (module 'core'): ns3::Time ns3::Years(ns3::int64x64_t value) [free function] module.add_function('Years', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Years(double value) [free function] module.add_function('Years', 'ns3::Time', [param('double', 'value')]) register_functions_ns3_CommandLineHelper(module.get_submodule('CommandLineHelper'), root_module) register_functions_ns3_Config(module.get_submodule('Config'), root_module) register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module) register_functions_ns3_Hash(module.get_submodule('Hash'), root_module) register_functions_ns3_SystemPath(module.get_submodule('SystemPath'), root_module) register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module) register_functions_ns3_internal(module.get_submodule('internal'), root_module) return def register_functions_ns3_CommandLineHelper(module, root_module): ## command-line.h (module 'core'): extern std::string ns3::CommandLineHelper::GetDefault(bool const & val) [free function] module.add_function('GetDefault', 'std::string', [param('bool const &', 'val')], template_parameters=['bool']) ## command-line.h (module 'core'): extern bool ns3::CommandLineHelper::UserItemParse(std::string const value, bool & val) [free function] module.add_function('UserItemParse', 'bool', [param('std::string const', 'value'), param('bool &', 'val')], template_parameters=['bool']) return def register_functions_ns3_Config(module, root_module): ## config.h (module 'core'): extern void ns3::Config::Connect(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('Connect', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern void ns3::Config::ConnectWithoutContext(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('ConnectWithoutContext', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern void ns3::Config::Disconnect(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('Disconnect', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern void ns3::Config::DisconnectWithoutContext(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('DisconnectWithoutContext', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern ns3::Ptr<ns3::Object> ns3::Config::GetRootNamespaceObject(uint32_t i) [free function] module.add_function('GetRootNamespaceObject', 'ns3::Ptr< ns3::Object >', [param('uint32_t', 'i')]) ## config.h (module 'core'): extern uint32_t ns3::Config::GetRootNamespaceObjectN() [free function] module.add_function('GetRootNamespaceObjectN', 'uint32_t', []) ## config.h (module 'core'): extern ns3::Config::MatchContainer ns3::Config::LookupMatches(std::string path) [free function] module.add_function('LookupMatches', 'ns3::Config::MatchContainer', [param('std::string', 'path')]) ## config.h (module 'core'): extern void ns3::Config::RegisterRootNamespaceObject(ns3::Ptr<ns3::Object> obj) [free function] module.add_function('RegisterRootNamespaceObject', 'void', [param('ns3::Ptr< ns3::Object >', 'obj')]) ## config.h (module 'core'): extern void ns3::Config::Reset() [free function] module.add_function('Reset', 'void', []) ## config.h (module 'core'): extern void ns3::Config::Set(std::string path, ns3::AttributeValue const & value) [free function] module.add_function('Set', 'void', [param('std::string', 'path'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern void ns3::Config::SetDefault(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetDefault', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern bool ns3::Config::SetDefaultFailSafe(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetDefaultFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern void ns3::Config::SetGlobal(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetGlobal', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern bool ns3::Config::SetGlobalFailSafe(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetGlobalFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern void ns3::Config::UnregisterRootNamespaceObject(ns3::Ptr<ns3::Object> obj) [free function] module.add_function('UnregisterRootNamespaceObject', 'void', [param('ns3::Ptr< ns3::Object >', 'obj')]) return def register_functions_ns3_FatalImpl(module, root_module): ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::FlushStreams() [free function] module.add_function('FlushStreams', 'void', []) ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::RegisterStream(std::ostream * stream) [free function] module.add_function('RegisterStream', 'void', [param('std::ostream *', 'stream')]) ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::UnregisterStream(std::ostream * stream) [free function] module.add_function('UnregisterStream', 'void', [param('std::ostream *', 'stream')]) return def register_functions_ns3_Hash(module, root_module): register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module) return def register_functions_ns3_Hash_Function(module, root_module): return def register_functions_ns3_SystemPath(module, root_module): ## system-path.h (module 'core'): extern std::string ns3::SystemPath::Append(std::string left, std::string right) [free function] module.add_function('Append', 'std::string', [param('std::string', 'left'), param('std::string', 'right')]) ## system-path.h (module 'core'): extern std::string ns3::SystemPath::FindSelfDirectory() [free function] module.add_function('FindSelfDirectory', 'std::string', []) ## system-path.h (module 'core'): extern std::string ns3::SystemPath::Join(std::_List_const_iterator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > begin, std::_List_const_iterator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > end) [free function] module.add_function('Join', 'std::string', [param('std::_List_const_iterator< std::basic_string< char, std::char_traits< char >, std::allocator< char > > >', 'begin'), param('std::_List_const_iterator< std::basic_string< char, std::char_traits< char >, std::allocator< char > > >', 'end')]) ## system-path.h (module 'core'): extern void ns3::SystemPath::MakeDirectories(std::string path) [free function] module.add_function('MakeDirectories', 'void', [param('std::string', 'path')]) ## system-path.h (module 'core'): extern std::string ns3::SystemPath::MakeTemporaryDirectoryName() [free function] module.add_function('MakeTemporaryDirectoryName', 'std::string', []) ## system-path.h (module 'core'): extern std::list<std::string, std::allocator<std::string> > ns3::SystemPath::ReadFiles(std::string path) [free function] module.add_function('ReadFiles', 'std::list< std::string >', [param('std::string', 'path')]) ## system-path.h (module 'core'): extern std::list<std::string, std::allocator<std::string> > ns3::SystemPath::Split(std::string path) [free function] module.add_function('Split', 'std::list< std::string >', [param('std::string', 'path')]) return def register_functions_ns3_TracedValueCallback(module, root_module): return def register_functions_ns3_internal(module, root_module): ## double.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::internal::MakeDoubleChecker(double min, double max, std::string name) [free function] module.add_function('MakeDoubleChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('double', 'min'), param('double', 'max'), param('std::string', 'name')]) ## integer.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::internal::MakeIntegerChecker(int64_t min, int64_t max, std::string name) [free function] module.add_function('MakeIntegerChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('int64_t', 'min'), param('int64_t', 'max'), param('std::string', 'name')]) ## uinteger.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::internal::MakeUintegerChecker(uint64_t min, uint64_t max, std::string name) [free function] module.add_function('MakeUintegerChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('uint64_t', 'min'), param('uint64_t', 'max'), param('std::string', 'name')]) return def main(): out = FileCodeSink(sys.stdout) root_module = module_init() register_types(root_module) register_methods(root_module) register_functions(root_module) root_module.generate(out) if __name__ == '__main__': main()
gpl-2.0
ds0nt/or-tools
examples/python/hidato.py
34
6160
# Copyright 2010 Hakan Kjellerstrand [email protected] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Hidato puzzle in Google CP Solver. http://www.shockwave.com/gamelanding/hidato.jsp http://www.hidato.com/ ''' Puzzles start semi-filled with numbered tiles. The first and last numbers are circled. Connect the numbers together to win. Consecutive number must touch horizontally, vertically, or diagonally. ''' Compare with the following models: * MiniZinc: http://www.hakank.org/minizinc/hidato.mzn * Gecode : http://www.hakank.org/gecode/hidato.cpp * Comet : http://www.hakank.org/comet/hidato.co * Tailopr/Essence': http://hakank.org/tailor/hidato.eprime * ECLiPSe: http://hakank.org/eclipse/hidato.ecl * SICStus: http://hakank.org/sicstus/hidato.pl Note: This model is very slow. Please see Laurent Perron's much faster (and more elegant) model: hidato_table.py . This model was created by Hakan Kjellerstrand ([email protected]) Also see my other Google CP Solver models: http://www.hakank.org/google_or_tools/ """ from ortools.constraint_solver import pywrapcp def main(): # Create the solver. solver = pywrapcp.Solver("n-queens") # # data # # # Simple problem # # r = 3 # c = r # puzzle = [ # [6,0,9], # [0,2,8], # [1,0,0] # ] # r = 7 # c = 7 # puzzle = [ # [0,44,41, 0, 0, 0, 0], # [0,43, 0,28,29, 0, 0], # [0, 1, 0, 0, 0,33, 0], # [0, 2,25, 4,34, 0,36], # [49,16, 0,23, 0, 0, 0], # [0,19, 0, 0,12, 7, 0], # [0, 0, 0,14, 0, 0, 0] # ] # Problems from the book: # Gyora Bededek: "Hidato: 2000 Pure Logic Puzzles" # Problem 1 (Practice) # r = 5 # c = r # puzzle = [ # [ 0, 0,20, 0, 0], # [ 0, 0, 0,16,18], # [22, 0,15, 0, 0], # [23, 0, 1,14,11], # [ 0,25, 0, 0,12], # ] # # problem 2 (Practice) r = 5 c = r puzzle = [ [0, 0, 0, 0, 14], [0, 18, 12, 0, 0], [0, 0, 17, 4, 5], [0, 0, 7, 0, 0], [9, 8, 25, 1, 0], ] # problem 3 (Beginner) # r = 6 # c = r # puzzle = [ # [ 0, 26,0, 0, 0,18], # [ 0, 0,27, 0, 0,19], # [31,23, 0, 0,14, 0], # [ 0,33, 8, 0,15, 1], # [ 0, 0, 0, 5, 0, 0], # [35,36, 0,10, 0, 0] # ]; # Problem 15 (Intermediate) # Note: This takes very long time to solve... # r = 8 # c = r # puzzle = [ # [64, 0, 0, 0, 0, 0, 0, 0], # [ 1,63, 0,59,15,57,53, 0], # [ 0, 4, 0,14, 0, 0, 0, 0], # [ 3, 0,11, 0,20,19, 0,50], # [ 0, 0, 0, 0,22, 0,48,40], # [ 9, 0, 0,32,23, 0, 0,41], # [27, 0, 0, 0,36, 0,46, 0], # [28,30, 0,35, 0, 0, 0, 0] # ] print_game(puzzle, r, c) # # declare variables # x = {} for i in range(r): for j in range(c): x[(i, j)] = solver.IntVar(1, r * c, "dice(%i,%i)" % (i, j)) x_flat = [x[(i, j)] for i in range(r) for j in range(c)] # # constraints # solver.Add(solver.AllDifferent(x_flat)) # # Fill in the clues # for i in range(r): for j in range(c): if puzzle[i][j] > 0: solver.Add(x[(i, j)] == puzzle[i][j]) # From the numbers k = 1 to r*c-1, find this position, # and then the position of k+1 for k in range(1, r * c): i = solver.IntVar(0, r) j = solver.IntVar(0, c) a = solver.IntVar(-1, 1) b = solver.IntVar(-1, 1) # 1) First: fix "this" k # 2) and then find the position of the next value (k+1) # solver.Add(k == x[(i,j)]) solver.Add(k == solver.Element(x_flat, i * c + j)) # solver.Add(k + 1 == x[(i+a,j+b)]) solver.Add(k + 1 == solver.Element(x_flat, (i + a) * c + (j + b))) solver.Add(i + a >= 0) solver.Add(j + b >= 0) solver.Add(i + a < r) solver.Add(j + b < c) # solver.Add(((a != 0) | (b != 0))) a_nz = solver.BoolVar() b_nz = solver.BoolVar() solver.Add(a_nz == solver.IsDifferentCstVar(a, 0)) solver.Add(b_nz == solver.IsDifferentCstVar(b, 0)) solver.Add(a_nz + b_nz >= 1) # # solution and search # solution = solver.Assignment() solution.Add(x_flat) # db: DecisionBuilder db = solver.Phase(x_flat, # solver.INT_VAR_DEFAULT # solver.INT_VAR_SIMPLE # solver.CHOOSE_RANDOM # solver.CHOOSE_MIN_SIZE_LOWEST_MIN # solver.CHOOSE_MIN_SIZE_HIGHEST_MIN # solver.CHOOSE_MIN_SIZE_LOWEST_MAX # solver.CHOOSE_MIN_SIZE_HIGHEST_MAX # solver.CHOOSE_PATH solver.CHOOSE_FIRST_UNBOUND, # solver.INT_VALUE_DEFAULT # solver.INT_VALUE_SIMPLE # solver.ASSIGN_MAX_VALUE # solver.ASSIGN_RANDOM_VALUE # solver.ASSIGN_CENTER_VALUE solver.ASSIGN_MIN_VALUE) solver.NewSearch(db) num_solutions = 0 while solver.NextSolution(): num_solutions += 1 print "\nSolution:", num_solutions print_board(x, r, c) print solver.EndSearch() print print "num_solutions:", num_solutions print "failures:", solver.Failures() print "branches:", solver.Branches() print "WallTime:", solver.WallTime() def print_board(x, rows, cols): for i in range(rows): for j in range(cols): print "% 2s" % x[i, j].Value(), print "" def print_game(game, rows, cols): for i in range(rows): for j in range(cols): print "% 2s" % game[i][j], print "" if __name__ == "__main__": main()
apache-2.0
AthinaB/synnefo
snf-cyclades-gtools/setup.py
9
1843
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import os from setuptools import setup HERE = os.path.abspath(os.path.normpath(os.path.dirname(__file__))) from synnefo.versions.ganeti import __version__ setup( name="snf-cyclades-gtools", version=__version__, description="Synnefo tools for interaction with Ganeti", url="http://www.synnefo.org/", author='Synnefo development team', author_email='[email protected]', maintainer='Synnefo development team', maintainer_email='[email protected]', license="GNU GPLv3", namespace_packages=["synnefo", "synnefo.versions"], packages=["synnefo", "synnefo.ganeti", "synnefo.versions"], dependency_links=['http://www.synnefo.org/packages/pypi'], install_requires=[ 'snf-common', 'python-daemon>=1.5.5', 'pyinotify>=0.8.9', 'puka', 'setproctitle>=1.0.1' ], entry_points={ 'console_scripts': [ 'snf-ganeti-eventd = synnefo.ganeti.eventd:main', 'snf-progress-monitor = synnefo.ganeti.progress_monitor:main' ], 'synnefo': [ 'default_settings = synnefo.ganeti.settings' ] }, )
gpl-3.0
PaulAYoung/f2014_iolab
pymongoProject/venv/lib/python2.7/site-packages/passlib/handlers/sha1_crypt.py
19
5325
"""passlib.handlers.sha1_crypt """ #============================================================================= # imports #============================================================================= # core from hmac import new as hmac from hashlib import sha1 import re import logging; log = logging.getLogger(__name__) from warnings import warn # site # pkg from passlib.utils import classproperty, h64, safe_crypt, test_crypt from passlib.utils.compat import b, bytes, u, uascii_to_str, unicode from passlib.utils.pbkdf2 import get_prf import passlib.utils.handlers as uh # local __all__ = [ ] #============================================================================= # sha1-crypt #============================================================================= _hmac_sha1 = get_prf("hmac-sha1")[0] _BNULL = b('\x00') class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): """This class implements the SHA1-Crypt password hash, and follows the :ref:`password-hash-api`. It supports a variable-length salt, and a variable number of rounds. The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: :type salt: str :param salt: Optional salt string. If not specified, an 8 character one will be autogenerated (this is recommended). If specified, it must be 0-64 characters, drawn from the regexp range ``[./0-9A-Za-z]``. :type salt_size: int :param salt_size: Optional number of bytes to use when autogenerating new salts. Defaults to 8 bytes, but can be any value between 0 and 64. :type rounds: int :param rounds: Optional number of rounds to use. Defaults to 64000, must be between 1 and 4294967295, inclusive. :type relaxed: bool :param relaxed: By default, providing an invalid value for one of the other keywords will result in a :exc:`ValueError`. If ``relaxed=True``, and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` will be issued instead. Correctable errors include ``rounds`` that are too small or too large, and ``salt`` strings that are too long. .. versionadded:: 1.6 """ #=================================================================== # class attrs #=================================================================== #--GenericHandler-- name = "sha1_crypt" setting_kwds = ("salt", "salt_size", "rounds") ident = u("$sha1$") checksum_size = 28 checksum_chars = uh.HASH64_CHARS #--HasSalt-- default_salt_size = 8 min_salt_size = 0 max_salt_size = 64 salt_chars = uh.HASH64_CHARS #--HasRounds-- default_rounds = 64000 # current passlib default min_rounds = 1 # really, this should be higher. max_rounds = 4294967295 # 32-bit integer limit rounds_cost = "linear" #=================================================================== # formatting #=================================================================== @classmethod def from_string(cls, hash): rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) return cls(rounds=rounds, salt=salt, checksum=chk) def to_string(self, config=False): chk = None if config else self.checksum return uh.render_mc3(self.ident, self.rounds, self.salt, chk) #=================================================================== # backend #=================================================================== backends = ("os_crypt", "builtin") _has_backend_builtin = True @classproperty def _has_backend_os_crypt(cls): return test_crypt("test", '$sha1$1$Wq3GL2Vp$C8U25GvfHS8qGHim' 'ExLaiSFlGkAe') def _calc_checksum_builtin(self, secret): if isinstance(secret, unicode): secret = secret.encode("utf-8") if _BNULL in secret: raise uh.exc.NullPasswordError(self) rounds = self.rounds # NOTE: this seed value is NOT the same as the config string result = (u("%s$sha1$%s") % (self.salt, rounds)).encode("ascii") # NOTE: this algorithm is essentially PBKDF1, modified to use HMAC. r = 0 while r < rounds: result = _hmac_sha1(secret, result) r += 1 return h64.encode_transposed_bytes(result, self._chk_offsets).decode("ascii") _chk_offsets = [ 2,1,0, 5,4,3, 8,7,6, 11,10,9, 14,13,12, 17,16,15, 0,19,18, ] def _calc_checksum_os_crypt(self, secret): config = self.to_string(config=True) hash = safe_crypt(secret, config) if hash: assert hash.startswith(config) and len(hash) == len(config) + 29 return hash[-28:] else: return self._calc_checksum_builtin(secret) #=================================================================== # eoc #=================================================================== #============================================================================= # eof #=============================================================================
unlicense
bulletRush/QCloud_yunapi_wrapper
thirdparty/requests/exceptions.py
895
2517
# -*- coding: utf-8 -*- """ requests.exceptions ~~~~~~~~~~~~~~~~~~~ This module contains the set of Requests' exceptions. """ from .packages.urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): """There was an ambiguous exception that occurred while handling your request.""" def __init__(self, *args, **kwargs): """ Initialize RequestException with `request` and `response` objects. """ response = kwargs.pop('response', None) self.response = response self.request = kwargs.pop('request', None) if (response is not None and not self.request and hasattr(response, 'request')): self.request = self.response.request super(RequestException, self).__init__(*args, **kwargs) class HTTPError(RequestException): """An HTTP error occurred.""" class ConnectionError(RequestException): """A Connection error occurred.""" class ProxyError(ConnectionError): """A proxy error occurred.""" class SSLError(ConnectionError): """An SSL error occurred.""" class Timeout(RequestException): """The request timed out. Catching this error will catch both :exc:`~requests.exceptions.ConnectTimeout` and :exc:`~requests.exceptions.ReadTimeout` errors. """ class ConnectTimeout(ConnectionError, Timeout): """The request timed out while trying to connect to the remote server. Requests that produced this error are safe to retry. """ class ReadTimeout(Timeout): """The server did not send any data in the allotted amount of time.""" class URLRequired(RequestException): """A valid URL is required to make a request.""" class TooManyRedirects(RequestException): """Too many redirects.""" class MissingSchema(RequestException, ValueError): """The URL schema (e.g. http or https) is missing.""" class InvalidSchema(RequestException, ValueError): """See defaults.py for valid schemas.""" class InvalidURL(RequestException, ValueError): """ The URL provided was somehow invalid. """ class ChunkedEncodingError(RequestException): """The server declared chunked encoding but sent an invalid chunk.""" class ContentDecodingError(RequestException, BaseHTTPError): """Failed to decode response content""" class StreamConsumedError(RequestException, TypeError): """The content for this response was already consumed""" class RetryError(RequestException): """Custom retries logic failed"""
apache-2.0
Mirantis/tempest
tempest/services/volume/xml/volumes_client.py
1
17467
# Copyright 2012 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time import urllib from xml.sax import saxutils from lxml import etree from tempest.common import rest_client from tempest.common import xml_utils as common from tempest import config from tempest import exceptions CONF = config.CONF VOLUME_NS_BASE = 'http://docs.openstack.org/volume/ext/' VOLUME_HOST_NS = VOLUME_NS_BASE + 'volume_host_attribute/api/v1' VOLUME_MIG_STATUS_NS = VOLUME_NS_BASE + 'volume_mig_status_attribute/api/v1' VOLUMES_TENANT_NS = VOLUME_NS_BASE + 'volume_tenant_attribute/api/v1' class BaseVolumesClientXML(rest_client.RestClient): """ Base client class to send CRUD Volume API requests to a Cinder endpoint """ TYPE = "xml" def __init__(self, auth_provider): super(BaseVolumesClientXML, self).__init__(auth_provider) self.service = CONF.volume.catalog_type self.build_interval = CONF.compute.build_interval self.build_timeout = CONF.compute.build_timeout def _translate_attributes_to_json(self, volume): volume_host_attr = '{' + VOLUME_HOST_NS + '}host' volume_mig_stat_attr = '{' + VOLUME_MIG_STATUS_NS + '}migstat' volume_mig_name_attr = '{' + VOLUME_MIG_STATUS_NS + '}name_id' volume_tenant_id_attr = '{' + VOLUMES_TENANT_NS + '}tenant_id' if volume_host_attr in volume: volume['os-vol-host-attr:host'] = volume.pop(volume_host_attr) if volume_mig_stat_attr in volume: volume['os-vol-mig-status-attr:migstat'] = volume.pop( volume_mig_stat_attr) if volume_mig_name_attr in volume: volume['os-vol-mig-status-attr:name_id'] = volume.pop( volume_mig_name_attr) if volume_tenant_id_attr in volume: volume['os-vol-tenant-attr:tenant_id'] = volume.pop( volume_tenant_id_attr) def _parse_volume(self, body): vol = dict((attr, body.get(attr)) for attr in body.keys()) for child in body.getchildren(): tag = child.tag if tag.startswith("{"): ns, tag = tag.split("}", 1) if tag == 'metadata': vol['metadata'] = dict((meta.get('key'), meta.text) for meta in child.getchildren()) else: vol[tag] = common.xml_to_json(child) self._translate_attributes_to_json(vol) self._check_if_bootable(vol) return vol def get_attachment_from_volume(self, volume): """Return the element 'attachment' from input volumes.""" return volume['attachments']['attachment'] def _check_if_bootable(self, volume): """ Check if the volume is bootable, also change the value of 'bootable' from string to boolean. """ # NOTE(jdg): Version 1 of Cinder API uses lc strings # We should consider being explicit in this check to # avoid introducing bugs like: LP #1227837 if volume['bootable'].lower() == 'true': volume['bootable'] = True elif volume['bootable'].lower() == 'false': volume['bootable'] = False else: raise ValueError( 'bootable flag is supposed to be either True or False,' 'it is %s' % volume['bootable']) return volume def list_volumes(self, params=None): """List all the volumes created.""" url = 'volumes' if params: url += '?%s' % urllib.urlencode(params) resp, body = self.get(url) body = etree.fromstring(body) volumes = [] if body is not None: volumes += [self._parse_volume(vol) for vol in list(body)] return resp, volumes def list_volumes_with_detail(self, params=None): """List all the details of volumes.""" url = 'volumes/detail' if params: url += '?%s' % urllib.urlencode(params) resp, body = self.get(url) body = etree.fromstring(body) volumes = [] if body is not None: volumes += [self._parse_volume(vol) for vol in list(body)] return resp, volumes def get_volume(self, volume_id): """Returns the details of a single volume.""" url = "volumes/%s" % str(volume_id) resp, body = self.get(url) body = self._parse_volume(etree.fromstring(body)) return resp, body def create_volume(self, size=None, **kwargs): """Creates a new Volume. :param size: Size of volume in GB. :param display_name: Optional Volume Name(only for V1). :param name: Optional Volume Name(only for V2). :param display_name: Optional Volume Name. :param metadata: An optional dictionary of values for metadata. :param volume_type: Optional Name of volume_type for the volume :param snapshot_id: When specified the volume is created from this snapshot :param imageRef: When specified the volume is created from this image """ # for bug #1293885: # If no size specified, read volume size from CONF if size is None: size = CONF.volume.volume_size # NOTE(afazekas): it should use a volume namespace volume = common.Element("volume", xmlns=common.XMLNS_11, size=size) if 'metadata' in kwargs: _metadata = common.Element('metadata') volume.append(_metadata) for key, value in kwargs['metadata'].items(): meta = common.Element('meta') meta.add_attr('key', key) meta.append(common.Text(value)) _metadata.append(meta) attr_to_add = kwargs.copy() del attr_to_add['metadata'] else: attr_to_add = kwargs for key, value in attr_to_add.items(): volume.add_attr(key, value) resp, body = self.post('volumes', str(common.Document(volume))) body = common.xml_to_json(etree.fromstring(body)) return resp, body def update_volume(self, volume_id, **kwargs): """Updates the Specified Volume.""" put_body = common.Element("volume", xmlns=common.XMLNS_11, **kwargs) resp, body = self.put('volumes/%s' % volume_id, str(common.Document(put_body))) body = common.xml_to_json(etree.fromstring(body)) return resp, body def delete_volume(self, volume_id): """Deletes the Specified Volume.""" return self.delete("volumes/%s" % str(volume_id)) def wait_for_volume_status(self, volume_id, status): """Waits for a Volume to reach a given status.""" resp, body = self.get_volume(volume_id) volume_status = body['status'] start = int(time.time()) while volume_status != status: time.sleep(self.build_interval) resp, body = self.get_volume(volume_id) volume_status = body['status'] if volume_status == 'error': raise exceptions.VolumeBuildErrorException(volume_id=volume_id) if int(time.time()) - start >= self.build_timeout: message = 'Volume %s failed to reach %s status within '\ 'the required time (%s s).' % (volume_id, status, self.build_timeout) raise exceptions.TimeoutException(message) def is_resource_deleted(self, id): try: self.get_volume(id) except exceptions.NotFound: return True return False def attach_volume(self, volume_id, instance_uuid, mountpoint): """Attaches a volume to a given instance on a given mountpoint.""" post_body = common.Element("os-attach", instance_uuid=instance_uuid, mountpoint=mountpoint ) url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def detach_volume(self, volume_id): """Detaches a volume from an instance.""" post_body = common.Element("os-detach") url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def upload_volume(self, volume_id, image_name, disk_format): """Uploads a volume in Glance.""" post_body = common.Element("os-volume_upload_image", image_name=image_name, disk_format=disk_format) url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) volume = common.xml_to_json(etree.fromstring(body)) return resp, volume def extend_volume(self, volume_id, extend_size): """Extend a volume.""" post_body = common.Element("os-extend", new_size=extend_size) url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def reset_volume_status(self, volume_id, status): """Reset the Specified Volume's Status.""" post_body = common.Element("os-reset_status", status=status ) url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def volume_begin_detaching(self, volume_id): """Volume Begin Detaching.""" post_body = common.Element("os-begin_detaching") url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def volume_roll_detaching(self, volume_id): """Volume Roll Detaching.""" post_body = common.Element("os-roll_detaching") url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def reserve_volume(self, volume_id): """Reserves a volume.""" post_body = common.Element("os-reserve") url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def unreserve_volume(self, volume_id): """Restore a reserved volume .""" post_body = common.Element("os-unreserve") url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def create_volume_transfer(self, vol_id, display_name=None): """Create a volume transfer.""" post_body = common.Element("transfer", volume_id=vol_id) if display_name: post_body.add_attr('name', display_name) resp, body = self.post('os-volume-transfer', str(common.Document(post_body))) volume = common.xml_to_json(etree.fromstring(body)) return resp, volume def get_volume_transfer(self, transfer_id): """Returns the details of a volume transfer.""" url = "os-volume-transfer/%s" % str(transfer_id) resp, body = self.get(url) volume = common.xml_to_json(etree.fromstring(body)) return resp, volume def list_volume_transfers(self, params=None): """List all the volume transfers created.""" url = 'os-volume-transfer' if params: url += '?%s' % urllib.urlencode(params) resp, body = self.get(url) body = etree.fromstring(body) volumes = [] if body is not None: volumes += [self._parse_volume_transfer(vol) for vol in list(body)] return resp, volumes def _parse_volume_transfer(self, body): vol = dict((attr, body.get(attr)) for attr in body.keys()) for child in body.getchildren(): tag = child.tag if tag.startswith("{"): tag = tag.split("}", 1) vol[tag] = common.xml_to_json(child) return vol def delete_volume_transfer(self, transfer_id): """Delete a volume transfer.""" return self.delete("os-volume-transfer/%s" % str(transfer_id)) def accept_volume_transfer(self, transfer_id, transfer_auth_key): """Accept a volume transfer.""" post_body = common.Element("accept", auth_key=transfer_auth_key) url = 'os-volume-transfer/%s/accept' % transfer_id resp, body = self.post(url, str(common.Document(post_body))) volume = common.xml_to_json(etree.fromstring(body)) return resp, volume def update_volume_readonly(self, volume_id, readonly): """Update the Specified Volume readonly.""" post_body = common.Element("os-update_readonly_flag", readonly=readonly) url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def force_delete_volume(self, volume_id): """Force Delete Volume.""" post_body = common.Element("os-force_delete") url = 'volumes/%s/action' % str(volume_id) resp, body = self.post(url, str(common.Document(post_body))) if body: body = common.xml_to_json(etree.fromstring(body)) return resp, body def _metadata_body(self, meta): post_body = common.Element('metadata') for k, v in meta.items(): data = common.Element('meta', key=k) # Escape value to allow for special XML chars data.append(common.Text(saxutils.escape(v))) post_body.append(data) return post_body def _parse_key_value(self, node): """Parse <foo key='key'>value</foo> data into {'key': 'value'}.""" data = {} for node in node.getchildren(): data[node.get('key')] = node.text return data def create_volume_metadata(self, volume_id, metadata): """Create metadata for the volume.""" post_body = self._metadata_body(metadata) resp, body = self.post('volumes/%s/metadata' % volume_id, str(common.Document(post_body))) body = self._parse_key_value(etree.fromstring(body)) return resp, body def get_volume_metadata(self, volume_id): """Get metadata of the volume.""" url = "volumes/%s/metadata" % str(volume_id) resp, body = self.get(url) body = self._parse_key_value(etree.fromstring(body)) return resp, body def update_volume_metadata(self, volume_id, metadata): """Update metadata for the volume.""" put_body = self._metadata_body(metadata) url = "volumes/%s/metadata" % str(volume_id) resp, body = self.put(url, str(common.Document(put_body))) body = self._parse_key_value(etree.fromstring(body)) return resp, body def update_volume_metadata_item(self, volume_id, id, meta_item): """Update metadata item for the volume.""" for k, v in meta_item.items(): put_body = common.Element('meta', key=k) put_body.append(common.Text(v)) url = "volumes/%s/metadata/%s" % (str(volume_id), str(id)) resp, body = self.put(url, str(common.Document(put_body))) body = common.xml_to_json(etree.fromstring(body)) return resp, body def delete_volume_metadata_item(self, volume_id, id): """Delete metadata item for the volume.""" url = "volumes/%s/metadata/%s" % (str(volume_id), str(id)) return self.delete(url) class VolumesClientXML(BaseVolumesClientXML): """ Client class to send CRUD Volume API V1 requests to a Cinder endpoint """
apache-2.0
maxfischer2781/cpy2py
cpy2py_unittests/test_descriptor.py
1
1295
from __future__ import print_function import unittest import time from cpy2py import kernel_state, TwinMaster, TwinObject class DescriptorObject(TwinObject): __twin_id__ = 'pypy' def __init__(self, numeric_value=0): self.numeric_value = numeric_value @property def prop(self): return self.numeric_value, kernel_state.TWIN_ID @prop.setter def prop(self, value): self.numeric_value = value @prop.deleter def prop(self): self.numeric_value = 0 class TestDescriptor(unittest.TestCase): """Test for object magic methods""" def setUp(self): self.twinterpreter = TwinMaster('pypy') self.twinterpreter.start() def tearDown(self): self.twinterpreter.destroy() time.sleep(0.1) def test_get(self): instance = DescriptorObject(2) self.assertEqual(instance.prop, (2, 'pypy')) def test_set(self): instance = DescriptorObject(2) self.assertEqual(instance.prop, (2, 'pypy')) instance.prop = 3 self.assertEqual(instance.prop, (3, 'pypy')) def test_del(self): instance = DescriptorObject(2) self.assertEqual(instance.prop, (2, 'pypy')) del instance.prop self.assertEqual(instance.prop, (0, 'pypy'))
apache-2.0
wmles/scholarium.at
Bibliothek/migrations/0005_auto_20171001_1105.py
1
1586
# -*- coding: utf-8 -*- # Generated by Django 1.9.11 on 2017-10-01 11:05 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Bibliothek', '0004_altes_buch'), ] operations = [ migrations.AddField( model_name='buch', name='anzahl_druck', field=models.SmallIntegerField(blank=True, default=0), ), migrations.AddField( model_name='buch', name='ob_epub', field=models.BooleanField(default=0), ), migrations.AddField( model_name='buch', name='ob_mobi', field=models.BooleanField(default=0), ), migrations.AddField( model_name='buch', name='ob_pdf', field=models.BooleanField(default=0), ), migrations.AddField( model_name='buch', name='preis_druck', field=models.SmallIntegerField(blank=True, null=True), ), migrations.AddField( model_name='buch', name='preis_epub', field=models.SmallIntegerField(blank=True, null=True), ), migrations.AddField( model_name='buch', name='preis_mobi', field=models.SmallIntegerField(blank=True, null=True), ), migrations.AddField( model_name='buch', name='preis_pdf', field=models.SmallIntegerField(blank=True, null=True), ), ]
mit
alexmogavero/home-assistant
tests/components/test_logger.py
24
3191
"""The tests for the Logger component.""" from collections import namedtuple import logging import unittest from homeassistant.setup import setup_component from homeassistant.components import logger from tests.common import get_test_home_assistant RECORD = namedtuple('record', ('name', 'levelno')) NO_LOGS_CONFIG = {'logger': {'default': 'info'}} TEST_CONFIG = { 'logger': { 'default': 'warning', 'logs': {'test': 'info'} } } class TestUpdater(unittest.TestCase): """Test logger component.""" def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.log_filter = None def tearDown(self): """Stop everything that was started.""" del logging.root.handlers[-1] self.hass.stop() def setup_logger(self, config): """Setup logger and save log filter.""" setup_component(self.hass, logger.DOMAIN, config) self.log_filter = logging.root.handlers[-1].filters[0] def assert_logged(self, name, level): """Assert that a certain record was logged.""" self.assertTrue(self.log_filter.filter(RECORD(name, level))) def assert_not_logged(self, name, level): """Assert that a certain record was not logged.""" self.assertFalse(self.log_filter.filter(RECORD(name, level))) def test_logger_setup(self): """Use logger to create a logging filter.""" self.setup_logger(TEST_CONFIG) self.assertTrue(len(logging.root.handlers) > 0) handler = logging.root.handlers[-1] self.assertEqual(len(handler.filters), 1) log_filter = handler.filters[0].logfilter self.assertEqual(log_filter['default'], logging.WARNING) self.assertEqual(log_filter['logs']['test'], logging.INFO) def test_logger_test_filters(self): """Test resulting filter operation.""" self.setup_logger(TEST_CONFIG) # Blocked default record self.assert_not_logged('asdf', logging.DEBUG) # Allowed default record self.assert_logged('asdf', logging.WARNING) # Blocked named record self.assert_not_logged('test', logging.DEBUG) # Allowed named record self.assert_logged('test', logging.INFO) def test_set_filter_empty_config(self): """Test change log level from empty configuration.""" self.setup_logger(NO_LOGS_CONFIG) self.assert_not_logged('test', logging.DEBUG) self.hass.services.call( logger.DOMAIN, 'set_level', {'test': 'debug'}) self.hass.block_till_done() self.assert_logged('test', logging.DEBUG) def test_set_filter(self): """Test change log level of existing filter.""" self.setup_logger(TEST_CONFIG) self.assert_not_logged('asdf', logging.DEBUG) self.assert_logged('dummy', logging.WARNING) self.hass.services.call(logger.DOMAIN, 'set_level', {'asdf': 'debug', 'dummy': 'info'}) self.hass.block_till_done() self.assert_logged('asdf', logging.DEBUG) self.assert_logged('dummy', logging.WARNING)
apache-2.0
besacier/WCE-LIG
tools/moses/scripts/analysis/extract-target-trees.py
2
5322
#!/usr/bin/env python # Usage: extract-target-trees.py [FILE] # # Reads moses-chart's -T output from FILE or standard input and writes trees to # standard output in Moses' XML tree format. import re import sys class Tree: def __init__(self, label, children): self.label = label self.children = children def is_leaf(self): return len(self.children) == 0 class Derivation(list): def find_root(self): assert len(self) > 0 root = None for hypothesis in self: if hypothesis.span[0] != 0: continue if root == None or hypothesis.span[1] > root.span[1]: root = hypothesis assert root return root def construct_target_tree(self): hypo_map = {} for hypothesis in self: hypo_map[hypothesis.span] = hypothesis root = self.find_root() return self._build_tree(root, hypo_map) def _build_tree(self, root, hypo_map): def escape_label(label): s = label.replace("&", "&amp;") s = s.replace("<", "&lt;") s = s.replace(">", "&gt;") return s # Build list of NT spans in source order... non_term_spans = [] for item in root.source_symbol_info: span = item[0] if span != root.span and span in hypo_map: # In hypo_map iff symbol is NT non_term_spans.append(span) non_term_spans.sort() # ... then convert to target order. alignment_pairs = root.nt_alignments[:] alignment_pairs.sort() target_order_non_term_spans = {} for i, pair in enumerate(alignment_pairs): target_order_non_term_spans[pair[1]] = non_term_spans[i] children = [] num_non_terms = 0 for i, symbol in enumerate(root.target_rhs): if i in target_order_non_term_spans: hyp = hypo_map[target_order_non_term_spans[i]] children.append(self._build_tree(hyp, hypo_map)) num_non_terms += 1 else: children.append(Tree(escape_label(symbol), [])) assert num_non_terms == len(root.nt_alignments) return Tree(root.target_lhs, children) class Hypothesis: def __init__(self): self.sentence_num = None self.span = None self.source_symbol_info = None self.target_lhs = None self.target_rhs = None self.nt_alignments = None def read_derivations(input): line_num = 0 start_line_num = None prev_sentence_num = None derivation = Derivation() for line in input: line_num += 1 hypothesis = parse_line(line) if hypothesis.sentence_num != prev_sentence_num: # We've started reading the next derivation... prev_sentence_num = hypothesis.sentence_num if len(derivation): yield derivation, start_line_num derivation = Derivation() start_line_num = line_num derivation.append(hypothesis) if len(derivation): yield derivation, start_line_num # Extract the hypothesis components and return a Hypothesis object. def parse_line(s): pattern = r"Trans Opt (\d+) " + \ r"\[(\d+)\.\.(\d+)\]:" + \ r"((?: \[\d+\.\.\d+\]=\S+ )+):" + \ r" (\S+) ->\S+ -> " + \ r"((?:\S+ )+):" + \ r"((?:\d+-\d+ )*): c=" regexp = re.compile(pattern) match = regexp.match(s) if not match: sys.stderr.write("%s\n" % s) assert match group = match.groups() hypothesis = Hypothesis() hypothesis.sentence_num = int(group[0]) + 1 hypothesis.span = (int(group[1]), int(group[2])) hypothesis.source_symbol_info = [] for item in group[3].split(): pattern = "\[(\d+)\.\.(\d+)\]=(\S+)" regexp = re.compile(pattern) match = regexp.match(item) assert(match) start, end, symbol = match.groups() span = (int(start), int(end)) hypothesis.source_symbol_info.append((span, symbol)) hypothesis.target_lhs = group[4] hypothesis.target_rhs = group[5].split() hypothesis.nt_alignments = [] for pair in group[6].split(): match = re.match(r'(\d+)-(\d+)', pair) assert match ai = (int(match.group(1)), int(match.group(2))) hypothesis.nt_alignments.append(ai) return hypothesis def tree_to_xml(tree): if tree.is_leaf(): return tree.label else: s = '<tree label="%s"> ' % tree.label for child in tree.children: s += tree_to_xml(child) s += " " s += '</tree>' return s def main(): if len(sys.argv) > 2: sys.stderr.write("usage: %s [FILE]\n" % sys.argv[0]) sys.exit(1) if len(sys.argv) == 1 or sys.argv[1] == "-": input = sys.stdin else: input = open(sys.argv[1]) for derivation, line_num in read_derivations(input): try: tree = derivation.construct_target_tree() except: msg = "error processing derivation starting at line %d\n" % line_num sys.stderr.write(msg) raise print tree_to_xml(tree) if __name__ == '__main__': main()
gpl-3.0
VinACE/nltk-trainer
analyze_tagger_coverage.py
5
5537
#!/usr/bin/env python import argparse, collections, math, os.path import nltk.corpus, nltk.corpus.reader, nltk.data, nltk.tag, nltk.metrics from nltk.corpus.util import LazyCorpusLoader from nltk_trainer import load_corpus_reader, load_model, simplify_wsj_tag from nltk_trainer.tagging import taggers ######################################## ## command options & argument parsing ## ######################################## parser = argparse.ArgumentParser(description='Analyze a part-of-speech tagger on a tagged corpus', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('corpus', help='''The name of a tagged corpus included with NLTK, such as treebank, brown, cess_esp, floresta, or the root path to a corpus directory, which can be either an absolute path or relative to a nltk_data directory.''') parser.add_argument('--tagger', default=nltk.tag._POS_TAGGER, help='''pickled tagger filename/path relative to an nltk_data directory default is NLTK's default tagger''') parser.add_argument('--trace', default=1, type=int, help='How much trace output you want, defaults to 1. 0 is no trace output.') parser.add_argument('--metrics', action='store_true', default=False, help='Use tagged sentences to determine tagger accuracy and tag precision & recall') corpus_group = parser.add_argument_group('Corpus Reader Options') corpus_group.add_argument('--reader', default=None, help='''Full module path to a corpus reader class, such as nltk.corpus.reader.tagged.TaggedCorpusReader''') corpus_group.add_argument('--fileids', default=None, help='Specify fileids to load from corpus') corpus_group.add_argument('--fraction', default=1.0, type=float, help='''The fraction of the corpus to use for testing coverage''') if simplify_wsj_tag: corpus_group.add_argument('--simplify_tags', action='store_true', default=False, help='Use simplified tags') else: corpus_group.add_argument('--tagset', default=None, help='Map tags to a given tagset, such as "universal"') args = parser.parse_args() ################### ## corpus reader ## ################### corpus = load_corpus_reader(args.corpus, reader=args.reader, fileids=args.fileids) kwargs = {'fileids': args.fileids} if simplify_wsj_tag and args.simplify_tags and not args.metrics: raise ValueError('simplify_tags can only be used with the --metrics option') elif simplify_wsj_tag and args.simplify_tags and args.corpus not in ['conll2000', 'switchboard']: kwargs['simplify_tags'] = True elif not simplify_wsj_tag and args.tagset and not args.metrics: raise ValueError('tagset can only be used with the --metrics option') elif not simplify_wsj_tag and args.tagset: kwargs['tagset'] = args.tagset # TODO: support corpora with alternatives to tagged_sents that work just as well if args.metrics and not hasattr(corpus, 'tagged_sents'): raise ValueError('%s does not support metrics' % args.corpus) ############ ## tagger ## ############ if args.trace: print('loading tagger %s' % args.tagger) if args.tagger == 'pattern': tagger = taggers.PatternTagger() else: tagger = load_model(args.tagger) ####################### ## coverage analysis ## ####################### if args.trace: print('analyzing tag coverage of %s with %s\n' % (args.corpus, tagger.__class__.__name__)) tags_found = collections.defaultdict(int) unknown_words = set() if args.metrics: tags_actual = collections.defaultdict(int) tag_refs = [] tag_test = [] tag_word_refs = collections.defaultdict(set) tag_word_test = collections.defaultdict(set) tagged_sents = corpus.tagged_sents(**kwargs) taglen = 7 if args.fraction != 1.0: cutoff = int(math.ceil(len(tagged_sents) * args.fraction)) tagged_sents = tagged_sents[:cutoff] for tagged_sent in tagged_sents: for word, tag in tagged_sent: tags_actual[tag] += 1 tag_refs.append(tag) tag_word_refs[tag].add(word) if len(tag) > taglen: taglen = len(tag) for word, tag in tagger.tag(nltk.tag.untag(tagged_sent)): tags_found[tag] += 1 tag_test.append(tag) tag_word_test[tag].add(word) if tag == '-NONE-': unknown_words.add(word) print('Accuracy: %f' % nltk.metrics.accuracy(tag_refs, tag_test)) print('Unknown words: %d' % len(unknown_words)) if args.trace and unknown_words: print(', '.join(sorted(unknown_words))) print('') print(' '.join(['Tag'.center(taglen), 'Found'.center(9), 'Actual'.center(10), 'Precision'.center(13), 'Recall'.center(13)])) print(' '.join(['='*taglen, '='*9, '='*10, '='*13, '='*13])) for tag in sorted(set(tags_found.keys()) | set(tags_actual.keys())): found = tags_found[tag] actual = tags_actual[tag] precision = nltk.metrics.precision(tag_word_refs[tag], tag_word_test[tag]) recall = nltk.metrics.recall(tag_word_refs[tag], tag_word_test[tag]) print(' '.join([tag.ljust(taglen), str(found).rjust(9), str(actual).rjust(10), str(precision).ljust(13)[:13], str(recall).ljust(13)[:13]])) print(' '.join(['='*taglen, '='*9, '='*10, '='*13, '='*13])) else: sents = corpus.sents(**kwargs) taglen = 7 if args.fraction != 1.0: cutoff = int(math.ceil(len(sents) * args.fraction)) sents = sents[:cutoff] for sent in sents: for word, tag in tagger.tag(sent): tags_found[tag] += 1 if len(tag) > taglen: taglen = len(tag) print(' '.join(['Tag'.center(taglen), 'Count'.center(9)])) print(' '.join(['='*taglen, '='*9])) for tag in sorted(tags_found.keys()): print(' '.join([tag.ljust(taglen), str(tags_found[tag]).rjust(9)])) print(' '.join(['='*taglen, '='*9]))
apache-2.0
macmania/klein
setup.py
2
1990
import codecs import os import re from setuptools import setup def read(*parts): """ Build an absolute path from *parts* and and return the contents of the resulting file. Assume UTF-8 encoding. """ here = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(here, *parts), 'r', 'utf-8') as f: return f.read() def find_version(*file_paths): """ Build a path from *file_paths* and search for a ``__version__`` string inside. """ version_file = read(*file_paths) version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") if __name__ == "__main__": setup( classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ], description="werkzeug + twisted.web", long_description=read('README.rst'), install_requires=[ "Twisted>=12.1", "werkzeug" ], keywords="twisted flask werkzeug web", license="MIT", name="klein", packages=["klein", "klein.test"], url="https://github.com/twisted/klein", version=find_version('klein', '__init__.py'), maintainer='HawkOwl', maintainer_email='[email protected]', )
mit
pavels/pootle
pootle/apps/accounts/migrations/0003_remove_pootleprofile_id.py
15
2823
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations def fix_accounts_alt_src_langs(apps, schema_editor): """Remove pootleprofile_id column from accounts_user_alt_src_langs After migration from 2.5.x the pootleprofile_id column is left on the m2m table accounts_user_alt_src_langs causing uniqueness issues (#3856). This migration removes the problem column on mysql. """ cursor = schema_editor.connection.cursor() # Check its mysql - should probs check its not too old. if not hasattr(cursor.db, "mysql_version"): return # Get the db_name and table_name. db_name = cursor.db.get_connection_params()['db'] table_name = (apps.get_model("accounts.User") ._meta.local_many_to_many[0].m2m_db_table()) # Check the problem column exists. cursor.execute("SELECT COLUMN_NAME" " FROM INFORMATION_SCHEMA.COLUMNS" " WHERE TABLE_SCHEMA = '%s'" " AND TABLE_NAME = '%s'" " AND COLUMN_NAME = 'pootleprofile_id';" % (db_name, table_name)) if not cursor.fetchone(): return # Get constraints for column. cursor.execute("SELECT CONSTRAINT_NAME " " FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE " " WHERE TABLE_SCHEMA = '%s' " " AND TABLE_NAME = '%s' " " AND COLUMN_NAME = 'pootleprofile_id'" % (db_name, table_name)) uniq = None fk = None default = False for constraint in cursor.fetchall(): if constraint[0].endswith("uniq"): uniq = constraint[0] elif constraint[0].startswith("pootleprofile_id_refs"): fk = constraint[0] elif constraint[0] == "pootleprofile_id": default = True # Removing uniq/fk has to happen in this order. if uniq: # Remove unique constraint. cursor.execute("ALTER TABLE %s " " DROP KEY %s" % (table_name, uniq)) if fk: # Remove foreign key constraint. cursor.execute("ALTER TABLE %s " " DROP FOREIGN KEY %s" % (table_name, fk)) if default: # Remove unique constraint from older migrated db. cursor.execute("DROP INDEX pootleprofile_id" " ON %s;" % (table_name)) # Remove column. cursor.execute("ALTER TABLE %s " " DROP COLUMN pootleprofile_id" % (table_name)) class Migration(migrations.Migration): dependencies = [ ('accounts', '0002_user_alt_src_langs'), ] operations = [ migrations.RunPython(fix_accounts_alt_src_langs), ]
gpl-3.0
manuelcortez/socializer
doc/documentation_importer.py
1
1072
# -*- coding: utf-8 -*- from codecs import open """ This script converts the hold documentation (saved in markdown files) in a python file with a list of strings to translate it using gettext.""" def prepare_documentation_in_file(fileSource, fileDest): """ This takes documentation written in a markdown file and put all the contents in a python file, to create a translatable documentation. @fileSource str: A markdown(.md) file. @fileDest str: A file where this will put the new strings""" f1 = open(fileSource, "r", encoding="utf-8") f2 = open(fileDest, "w", encoding="utf-8") lns = f1.readlines() f2.write("# -*- coding: utf-8 -*-\n") f2.write("documentation = [\n") for i in lns: if "\n" == i or i.splitlines()[0] == "": newvar = "\"\",\n" elif "\n" == i[-1]: newvar = "_(u\"\"\"%s\"\"\"),\n" % (i.splitlines()[0]) else: newvar = "_(u\"\"\"%s\"\"\"),\n" % (i) f2.write(newvar) f1.close() f2.write("]") f2.close() prepare_documentation_in_file("manual.md", "strings.py") prepare_documentation_in_file("changelog.md", "changelog.py")
gpl-2.0
bcoca/ansible
test/units/playbook/test_block.py
58
2714
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat import unittest from ansible.playbook.block import Block from ansible.playbook.task import Task class TestBlock(unittest.TestCase): def test_construct_empty_block(self): b = Block() def test_construct_block_with_role(self): pass def test_load_block_simple(self): ds = dict( block=[], rescue=[], always=[], # otherwise=[], ) b = Block.load(ds) self.assertEqual(b.block, []) self.assertEqual(b.rescue, []) self.assertEqual(b.always, []) # not currently used # self.assertEqual(b.otherwise, []) def test_load_block_with_tasks(self): ds = dict( block=[dict(action='block')], rescue=[dict(action='rescue')], always=[dict(action='always')], # otherwise=[dict(action='otherwise')], ) b = Block.load(ds) self.assertEqual(len(b.block), 1) self.assertIsInstance(b.block[0], Task) self.assertEqual(len(b.rescue), 1) self.assertIsInstance(b.rescue[0], Task) self.assertEqual(len(b.always), 1) self.assertIsInstance(b.always[0], Task) # not currently used # self.assertEqual(len(b.otherwise), 1) # self.assertIsInstance(b.otherwise[0], Task) def test_load_implicit_block(self): ds = [dict(action='foo')] b = Block.load(ds) self.assertEqual(len(b.block), 1) self.assertIsInstance(b.block[0], Task) def test_deserialize(self): ds = dict( block=[dict(action='block')], rescue=[dict(action='rescue')], always=[dict(action='always')], ) b = Block.load(ds) data = dict(parent=ds, parent_type='Block') b.deserialize(data) self.assertIsInstance(b._parent, Block)
gpl-3.0
tinchoss/Python_Android
python/src/Tools/scripts/linktree.py
101
2425
#! /usr/bin/env python # linktree # # Make a copy of a directory tree with symbolic links to all files in the # original tree. # All symbolic links go to a special symbolic link at the top, so you # can easily fix things if the original source tree moves. # See also "mkreal". # # usage: mklinks oldtree newtree import sys, os LINK = '.LINK' # Name of special symlink at the top. debug = 0 def main(): if not 3 <= len(sys.argv) <= 4: print 'usage:', sys.argv[0], 'oldtree newtree [linkto]' return 2 oldtree, newtree = sys.argv[1], sys.argv[2] if len(sys.argv) > 3: link = sys.argv[3] link_may_fail = 1 else: link = LINK link_may_fail = 0 if not os.path.isdir(oldtree): print oldtree + ': not a directory' return 1 try: os.mkdir(newtree, 0777) except os.error, msg: print newtree + ': cannot mkdir:', msg return 1 linkname = os.path.join(newtree, link) try: os.symlink(os.path.join(os.pardir, oldtree), linkname) except os.error, msg: if not link_may_fail: print linkname + ': cannot symlink:', msg return 1 else: print linkname + ': warning: cannot symlink:', msg linknames(oldtree, newtree, link) return 0 def linknames(old, new, link): if debug: print 'linknames', (old, new, link) try: names = os.listdir(old) except os.error, msg: print old + ': warning: cannot listdir:', msg return for name in names: if name not in (os.curdir, os.pardir): oldname = os.path.join(old, name) linkname = os.path.join(link, name) newname = os.path.join(new, name) if debug > 1: print oldname, newname, linkname if os.path.isdir(oldname) and \ not os.path.islink(oldname): try: os.mkdir(newname, 0777) ok = 1 except: print newname + \ ': warning: cannot mkdir:', msg ok = 0 if ok: linkname = os.path.join(os.pardir, linkname) linknames(oldname, newname, linkname) else: os.symlink(linkname, newname) if __name__ == '__main__': sys.exit(main())
apache-2.0
songfj/scrapy
scrapy/extensions/feedexport.py
109
7559
""" Feed Exports extension See documentation in docs/topics/feed-exports.rst """ import os import sys import logging import posixpath from tempfile import TemporaryFile from datetime import datetime from six.moves.urllib.parse import urlparse from ftplib import FTP from zope.interface import Interface, implementer from twisted.internet import defer, threads from w3lib.url import file_uri_to_path from scrapy import signals from scrapy.utils.ftp import ftp_makedirs_cwd from scrapy.exceptions import NotConfigured from scrapy.utils.misc import load_object from scrapy.utils.log import failure_to_exc_info logger = logging.getLogger(__name__) class IFeedStorage(Interface): """Interface that all Feed Storages must implement""" def __init__(uri): """Initialize the storage with the parameters given in the URI""" def open(spider): """Open the storage for the given spider. It must return a file-like object that will be used for the exporters""" def store(file): """Store the given file stream""" @implementer(IFeedStorage) class BlockingFeedStorage(object): def open(self, spider): return TemporaryFile(prefix='feed-') def store(self, file): return threads.deferToThread(self._store_in_thread, file) def _store_in_thread(self, file): raise NotImplementedError @implementer(IFeedStorage) class StdoutFeedStorage(object): def __init__(self, uri, _stdout=sys.stdout): self._stdout = _stdout def open(self, spider): return self._stdout def store(self, file): pass @implementer(IFeedStorage) class FileFeedStorage(object): def __init__(self, uri): self.path = file_uri_to_path(uri) def open(self, spider): dirname = os.path.dirname(self.path) if dirname and not os.path.exists(dirname): os.makedirs(dirname) return open(self.path, 'ab') def store(self, file): file.close() class S3FeedStorage(BlockingFeedStorage): def __init__(self, uri): from scrapy.conf import settings try: import boto except ImportError: raise NotConfigured self.connect_s3 = boto.connect_s3 u = urlparse(uri) self.bucketname = u.hostname self.access_key = u.username or settings['AWS_ACCESS_KEY_ID'] self.secret_key = u.password or settings['AWS_SECRET_ACCESS_KEY'] self.keyname = u.path def _store_in_thread(self, file): file.seek(0) conn = self.connect_s3(self.access_key, self.secret_key) bucket = conn.get_bucket(self.bucketname, validate=False) key = bucket.new_key(self.keyname) key.set_contents_from_file(file) key.close() class FTPFeedStorage(BlockingFeedStorage): def __init__(self, uri): u = urlparse(uri) self.host = u.hostname self.port = int(u.port or '21') self.username = u.username self.password = u.password self.path = u.path def _store_in_thread(self, file): file.seek(0) ftp = FTP() ftp.connect(self.host, self.port) ftp.login(self.username, self.password) dirname, filename = posixpath.split(self.path) ftp_makedirs_cwd(ftp, dirname) ftp.storbinary('STOR %s' % filename, file) ftp.quit() class SpiderSlot(object): def __init__(self, file, exporter, storage, uri): self.file = file self.exporter = exporter self.storage = storage self.uri = uri self.itemcount = 0 class FeedExporter(object): def __init__(self, settings): self.settings = settings self.urifmt = settings['FEED_URI'] if not self.urifmt: raise NotConfigured self.format = settings['FEED_FORMAT'].lower() self.storages = self._load_components('FEED_STORAGES') self.exporters = self._load_components('FEED_EXPORTERS') if not self._storage_supported(self.urifmt): raise NotConfigured if not self._exporter_supported(self.format): raise NotConfigured self.store_empty = settings.getbool('FEED_STORE_EMPTY') self.export_fields = settings.getlist('FEED_EXPORT_FIELDS') or None uripar = settings['FEED_URI_PARAMS'] self._uripar = load_object(uripar) if uripar else lambda x, y: None @classmethod def from_crawler(cls, crawler): o = cls(crawler.settings) crawler.signals.connect(o.open_spider, signals.spider_opened) crawler.signals.connect(o.close_spider, signals.spider_closed) crawler.signals.connect(o.item_scraped, signals.item_scraped) return o def open_spider(self, spider): uri = self.urifmt % self._get_uri_params(spider) storage = self._get_storage(uri) file = storage.open(spider) exporter = self._get_exporter(file, fields_to_export=self.export_fields) exporter.start_exporting() self.slot = SpiderSlot(file, exporter, storage, uri) def close_spider(self, spider): slot = self.slot if not slot.itemcount and not self.store_empty: return slot.exporter.finish_exporting() logfmt = "%s %%(format)s feed (%%(itemcount)d items) in: %%(uri)s" log_args = {'format': self.format, 'itemcount': slot.itemcount, 'uri': slot.uri} d = defer.maybeDeferred(slot.storage.store, slot.file) d.addCallback(lambda _: logger.info(logfmt % "Stored", log_args, extra={'spider': spider})) d.addErrback(lambda f: logger.error(logfmt % "Error storing", log_args, exc_info=failure_to_exc_info(f), extra={'spider': spider})) return d def item_scraped(self, item, spider): slot = self.slot slot.exporter.export_item(item) slot.itemcount += 1 return item def _load_components(self, setting_prefix): conf = dict(self.settings['%s_BASE' % setting_prefix]) conf.update(self.settings[setting_prefix]) d = {} for k, v in conf.items(): try: d[k] = load_object(v) except NotConfigured: pass return d def _exporter_supported(self, format): if format in self.exporters: return True logger.error("Unknown feed format: %(format)s", {'format': format}) def _storage_supported(self, uri): scheme = urlparse(uri).scheme if scheme in self.storages: try: self._get_storage(uri) return True except NotConfigured: logger.error("Disabled feed storage scheme: %(scheme)s", {'scheme': scheme}) else: logger.error("Unknown feed storage scheme: %(scheme)s", {'scheme': scheme}) def _get_exporter(self, *args, **kwargs): return self.exporters[self.format](*args, **kwargs) def _get_storage(self, uri): return self.storages[urlparse(uri).scheme](uri) def _get_uri_params(self, spider): params = {} for k in dir(spider): params[k] = getattr(spider, k) ts = datetime.utcnow().replace(microsecond=0).isoformat().replace(':', '-') params['time'] = ts self._uripar(params, spider) return params
bsd-3-clause
tanium/pytan
BUILD/doc/source/examples/create_package_code.py
1
4051
# import the basic python packages we need import os import sys import tempfile import pprint import traceback # disable python from generating a .pyc file sys.dont_write_bytecode = True # change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API pytan_loc = "~/gh/pytan" pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib') # Determine our script name, script dir my_file = os.path.abspath(sys.argv[0]) my_dir = os.path.dirname(my_file) # try to automatically determine the pytan lib directory by assuming it is in '../../lib/' parent_dir = os.path.dirname(my_dir) pytan_root_dir = os.path.dirname(parent_dir) lib_dir = os.path.join(pytan_root_dir, 'lib') # add pytan_loc and lib_dir to the PYTHONPATH variable path_adds = [lib_dir, pytan_static_path] [sys.path.append(aa) for aa in path_adds if aa not in sys.path] # import pytan import pytan # create a dictionary of arguments for the pytan handler handler_args = {} # establish our connection info for the Tanium Server handler_args['username'] = "Administrator" handler_args['password'] = "Tanium2015!" handler_args['host'] = "10.0.1.240" handler_args['port'] = "443" # optional # optional, level 0 is no output except warnings/errors # level 1 through 12 are more and more verbose handler_args['loglevel'] = 1 # optional, use a debug format for the logging output (uses two lines per log entry) handler_args['debugformat'] = False # optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES # very useful for capturing the full exchange of XML requests and responses handler_args['record_all_requests'] = True # instantiate a handler using all of the arguments in the handler_args dictionary print "...CALLING: pytan.handler() with args: {}".format(handler_args) handler = pytan.Handler(**handler_args) # print out the handler string print "...OUTPUT: handler string: {}".format(handler) # setup the arguments for the handler.delete() method delete_kwargs = {} delete_kwargs["objtype"] = u'package' delete_kwargs["name"] = u'package49' # setup the arguments for the handler() class kwargs = {} kwargs["expire_seconds"] = 1500 kwargs["display_name"] = u'package49 API test' kwargs["name"] = u'package49' kwargs["parameters_json_file"] = u'../doc/example_of_all_package_parameters.json' kwargs["verify_expire_seconds"] = 3600 kwargs["command"] = u'package49 $1 $2 $3 $4 $5 $6 $7 $8' kwargs["file_urls"] = [u'3600::testing.vbs||https://content.tanium.com/files/initialcontent/bundles/2014-10-01_11-32-15-7844/custom_tagging_-_remove_tags_[non-windows]/CustomTagRemove.sh'] kwargs["verify_filter_options"] = [u'and'] kwargs["verify_filters"] = [u'Custom Tags, that contains:tag'] kwargs["command_timeout_seconds"] = 9999 # delete the object in case it already exists # catch and print the exception error if it does not exist print "...CALLING: handler.delete() with args: {}".format(delete_kwargs) try: handler.delete(**delete_kwargs) except Exception as e: print "...EXCEPTION: {}".format(e) print "...CALLING: handler.create_package() with args: {}".format(kwargs) response = handler.create_package(**kwargs) print "...OUTPUT: Type of response: ", type(response) print "...OUTPUT: print of response:" print response # call the export_obj() method to convert response to JSON and store it in out export_kwargs = {} export_kwargs['obj'] = response export_kwargs['export_format'] = 'json' print "...CALLING: handler.export_obj() with args {}".format(export_kwargs) out = handler.export_obj(**export_kwargs) # trim the output if it is more than 15 lines long if len(out.splitlines()) > 15: out = out.splitlines()[0:15] out.append('..trimmed for brevity..') out = '\n'.join(out) print "...OUTPUT: print the objects returned in JSON format:" print out # delete the object, we are done with it now print "...CALLING: handler.delete() with args: {}".format(delete_kwargs) delete_response = handler.delete(**delete_kwargs) print "...OUTPUT: print the delete response" print delete_response
mit
fengalin/gstation-edit
gstation_edit/messages/utility_settings_resp.py
1
2960
""" gstation-edit UtilitySettingsResponse definition """ # this file is part of gstation-edit # Copyright (C) F LAIGNEL 2009-2017 <[email protected]> # # gstation-edit is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # gstation-edit is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program. If not, see <http://www.gnu.org/licenses/>. from gstation_edit.messages.jstation_sysex_event import JStationSysexEvent class UtilitySettingsResponse(JStationSysexEvent): PROCEDURE_ID = 0x12 VERSION = 1 def __init__(self, channel=-1, sysex_buffer=None, stereo_mono=-1, dry_track=-1, digital_out_level=-1, global_cabinet=-1, midi_merge=-1, midi_channel=-1): self.stereo_mono = stereo_mono self.dry_track = dry_track self.digital_out_level = digital_out_level self.global_cabinet = global_cabinet self.midi_merge = midi_merge self.midi_channel = midi_channel JStationSysexEvent.__init__(self, channel, sysex_buffer=sysex_buffer) def parse_data_buffer(self): JStationSysexEvent.parse_data_buffer(self) data_len = self.read_next_bytes(4) if self.is_valid(): if data_len == 6: self.stereo_mono = self.read_next_bytes(2) self.dry_track = self.read_next_bytes(2) self.digital_out_level = self.read_next_bytes(2) self.global_cabinet = self.read_next_bytes(2) self.midi_merge = self.read_next_bytes(2) self.midi_channel = self.read_next_bytes(2) else: print('UtilitySettingsResponse: data len error: '\ 'got %d expected 6'%(data_len)) # Build to send def build_data_buffer(self): data = list() data.append(self.stereo_mono) data.append(self.dry_track) data.append(self.digital_out_level) data.append(self.global_cabinet) data.append(self.midi_merge) data.append(self.midi_channel) JStationSysexEvent.build_data_buffer(self, post_len_data=data) def __str__( self ): return '%s, stereo mono: %d, dry track: %d, '\ 'digital out level: %d, global cabinet: %d, '\ 'midi merge: %d, midi channel: %d'\ %(JStationSysexEvent.__str__(self), self.stereo_mono, self.dry_track, self.digital_out_level, self.global_cabinet, self.midi_merge, self.midi_channel)
lgpl-3.0
aduric/crossfit
nonrel/django/forms/fields.py
103
38484
""" Field classes. """ import datetime import os import re import time import urlparse import warnings from decimal import Decimal, DecimalException try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.core.exceptions import ValidationError from django.core import validators import django.utils.copycompat as copy from django.utils import formats from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_unicode, smart_str from django.utils.functional import lazy # Provide this import for backwards compatibility. from django.core.validators import EMPTY_VALUES from util import ErrorList from widgets import TextInput, PasswordInput, HiddenInput, MultipleHiddenInput, \ ClearableFileInput, CheckboxInput, Select, NullBooleanSelect, SelectMultiple, \ DateInput, DateTimeInput, TimeInput, SplitDateTimeWidget, SplitHiddenDateTimeWidget, \ FILE_INPUT_CONTRADICTION __all__ = ( 'Field', 'CharField', 'IntegerField', 'DEFAULT_DATE_INPUT_FORMATS', 'DateField', 'DEFAULT_TIME_INPUT_FORMATS', 'TimeField', 'DEFAULT_DATETIME_INPUT_FORMATS', 'DateTimeField', 'TimeField', 'RegexField', 'EmailField', 'FileField', 'ImageField', 'URLField', 'BooleanField', 'NullBooleanField', 'ChoiceField', 'MultipleChoiceField', 'ComboField', 'MultiValueField', 'FloatField', 'DecimalField', 'SplitDateTimeField', 'IPAddressField', 'FilePathField', 'SlugField', 'TypedChoiceField', 'TypedMultipleChoiceField' ) def en_format(name): """ Helper function to stay backward compatible. """ from django.conf.locale.en import formats warnings.warn( "`django.forms.fields.DEFAULT_%s` is deprecated; use `django.utils.formats.get_format('%s')` instead." % (name, name), DeprecationWarning ) return getattr(formats, name) DEFAULT_DATE_INPUT_FORMATS = lazy(lambda: en_format('DATE_INPUT_FORMATS'), tuple, list)() DEFAULT_TIME_INPUT_FORMATS = lazy(lambda: en_format('TIME_INPUT_FORMATS'), tuple, list)() DEFAULT_DATETIME_INPUT_FORMATS = lazy(lambda: en_format('DATETIME_INPUT_FORMATS'), tuple, list)() class Field(object): widget = TextInput # Default widget to use when rendering this type of Field. hidden_widget = HiddenInput # Default widget to use when rendering this as "hidden". default_validators = [] # Default set of validators default_error_messages = { 'required': _(u'This field is required.'), 'invalid': _(u'Enter a valid value.'), } # Tracks each time a Field instance is created. Used to retain order. creation_counter = 0 def __init__(self, required=True, widget=None, label=None, initial=None, help_text=None, error_messages=None, show_hidden_initial=False, validators=[], localize=False): # required -- Boolean that specifies whether the field is required. # True by default. # widget -- A Widget class, or instance of a Widget class, that should # be used for this Field when displaying it. Each Field has a # default Widget that it'll use if you don't specify this. In # most cases, the default widget is TextInput. # label -- A verbose name for this field, for use in displaying this # field in a form. By default, Django will use a "pretty" # version of the form field name, if the Field is part of a # Form. # initial -- A value to use in this Field's initial display. This value # is *not* used as a fallback if data isn't given. # help_text -- An optional string to use as "help text" for this Field. # error_messages -- An optional dictionary to override the default # messages that the field will raise. # show_hidden_initial -- Boolean that specifies if it is needed to render a # hidden widget with initial value after widget. # validators -- List of addtional validators to use # localize -- Boolean that specifies if the field should be localized. if label is not None: label = smart_unicode(label) self.required, self.label, self.initial = required, label, initial self.show_hidden_initial = show_hidden_initial if help_text is None: self.help_text = u'' else: self.help_text = smart_unicode(help_text) widget = widget or self.widget if isinstance(widget, type): widget = widget() # Trigger the localization machinery if needed. self.localize = localize if self.localize: widget.is_localized = True # Let the widget know whether it should display as required. widget.is_required = self.required # Hook into self.widget_attrs() for any Field-specific HTML attributes. extra_attrs = self.widget_attrs(widget) if extra_attrs: widget.attrs.update(extra_attrs) self.widget = widget # Increase the creation counter, and save our local copy. self.creation_counter = Field.creation_counter Field.creation_counter += 1 messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self.error_messages = messages self.validators = self.default_validators + validators def prepare_value(self, value): return value def to_python(self, value): return value def validate(self, value): if value in validators.EMPTY_VALUES and self.required: raise ValidationError(self.error_messages['required']) def run_validators(self, value): if value in validators.EMPTY_VALUES: return errors = [] for v in self.validators: try: v(value) except ValidationError, e: if hasattr(e, 'code') and e.code in self.error_messages: message = self.error_messages[e.code] if e.params: message = message % e.params errors.append(message) else: errors.extend(e.messages) if errors: raise ValidationError(errors) def clean(self, value): """ Validates the given value and returns its "cleaned" value as an appropriate Python object. Raises ValidationError for any errors. """ value = self.to_python(value) self.validate(value) self.run_validators(value) return value def bound_data(self, data, initial): """ Return the value that should be shown for this field on render of a bound form, given the submitted POST data for the field and the initial data, if any. For most fields, this will simply be data; FileFields need to handle it a bit differently. """ return data def widget_attrs(self, widget): """ Given a Widget instance (*not* a Widget class), returns a dictionary of any HTML attributes that should be added to the Widget, based on this Field. """ return {} def __deepcopy__(self, memo): result = copy.copy(self) memo[id(self)] = result result.widget = copy.deepcopy(self.widget, memo) return result class CharField(Field): def __init__(self, max_length=None, min_length=None, *args, **kwargs): self.max_length, self.min_length = max_length, min_length super(CharField, self).__init__(*args, **kwargs) if min_length is not None: self.validators.append(validators.MinLengthValidator(min_length)) if max_length is not None: self.validators.append(validators.MaxLengthValidator(max_length)) def to_python(self, value): "Returns a Unicode object." if value in validators.EMPTY_VALUES: return u'' return smart_unicode(value) def widget_attrs(self, widget): if self.max_length is not None and isinstance(widget, (TextInput, PasswordInput)): # The HTML attribute is maxlength, not max_length. return {'maxlength': str(self.max_length)} class IntegerField(Field): default_error_messages = { 'invalid': _(u'Enter a whole number.'), 'max_value': _(u'Ensure this value is less than or equal to %(limit_value)s.'), 'min_value': _(u'Ensure this value is greater than or equal to %(limit_value)s.'), } def __init__(self, max_value=None, min_value=None, *args, **kwargs): self.max_value, self.min_value = max_value, min_value super(IntegerField, self).__init__(*args, **kwargs) if max_value is not None: self.validators.append(validators.MaxValueValidator(max_value)) if min_value is not None: self.validators.append(validators.MinValueValidator(min_value)) def to_python(self, value): """ Validates that int() can be called on the input. Returns the result of int(). Returns None for empty values. """ value = super(IntegerField, self).to_python(value) if value in validators.EMPTY_VALUES: return None if self.localize: value = formats.sanitize_separators(value) try: value = int(str(value)) except (ValueError, TypeError): raise ValidationError(self.error_messages['invalid']) return value class FloatField(IntegerField): default_error_messages = { 'invalid': _(u'Enter a number.'), } def to_python(self, value): """ Validates that float() can be called on the input. Returns the result of float(). Returns None for empty values. """ value = super(IntegerField, self).to_python(value) if value in validators.EMPTY_VALUES: return None if self.localize: value = formats.sanitize_separators(value) try: value = float(value) except (ValueError, TypeError): raise ValidationError(self.error_messages['invalid']) return value class DecimalField(Field): default_error_messages = { 'invalid': _(u'Enter a number.'), 'max_value': _(u'Ensure this value is less than or equal to %(limit_value)s.'), 'min_value': _(u'Ensure this value is greater than or equal to %(limit_value)s.'), 'max_digits': _('Ensure that there are no more than %s digits in total.'), 'max_decimal_places': _('Ensure that there are no more than %s decimal places.'), 'max_whole_digits': _('Ensure that there are no more than %s digits before the decimal point.') } def __init__(self, max_value=None, min_value=None, max_digits=None, decimal_places=None, *args, **kwargs): self.max_value, self.min_value = max_value, min_value self.max_digits, self.decimal_places = max_digits, decimal_places Field.__init__(self, *args, **kwargs) if max_value is not None: self.validators.append(validators.MaxValueValidator(max_value)) if min_value is not None: self.validators.append(validators.MinValueValidator(min_value)) def to_python(self, value): """ Validates that the input is a decimal number. Returns a Decimal instance. Returns None for empty values. Ensures that there are no more than max_digits in the number, and no more than decimal_places digits after the decimal point. """ if value in validators.EMPTY_VALUES: return None if self.localize: value = formats.sanitize_separators(value) value = smart_str(value).strip() try: value = Decimal(value) except DecimalException: raise ValidationError(self.error_messages['invalid']) return value def validate(self, value): super(DecimalField, self).validate(value) if value in validators.EMPTY_VALUES: return # Check for NaN, Inf and -Inf values. We can't compare directly for NaN, # since it is never equal to itself. However, NaN is the only value that # isn't equal to itself, so we can use this to identify NaN if value != value or value == Decimal("Inf") or value == Decimal("-Inf"): raise ValidationError(self.error_messages['invalid']) sign, digittuple, exponent = value.as_tuple() decimals = abs(exponent) # digittuple doesn't include any leading zeros. digits = len(digittuple) if decimals > digits: # We have leading zeros up to or past the decimal point. Count # everything past the decimal point as a digit. We do not count # 0 before the decimal point as a digit since that would mean # we would not allow max_digits = decimal_places. digits = decimals whole_digits = digits - decimals if self.max_digits is not None and digits > self.max_digits: raise ValidationError(self.error_messages['max_digits'] % self.max_digits) if self.decimal_places is not None and decimals > self.decimal_places: raise ValidationError(self.error_messages['max_decimal_places'] % self.decimal_places) if self.max_digits is not None and self.decimal_places is not None and whole_digits > (self.max_digits - self.decimal_places): raise ValidationError(self.error_messages['max_whole_digits'] % (self.max_digits - self.decimal_places)) return value class DateField(Field): widget = DateInput default_error_messages = { 'invalid': _(u'Enter a valid date.'), } def __init__(self, input_formats=None, *args, **kwargs): super(DateField, self).__init__(*args, **kwargs) self.input_formats = input_formats def to_python(self, value): """ Validates that the input can be converted to a date. Returns a Python datetime.date object. """ if value in validators.EMPTY_VALUES: return None if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value for format in self.input_formats or formats.get_format('DATE_INPUT_FORMATS'): try: return datetime.date(*time.strptime(value, format)[:3]) except ValueError: continue raise ValidationError(self.error_messages['invalid']) class TimeField(Field): widget = TimeInput default_error_messages = { 'invalid': _(u'Enter a valid time.') } def __init__(self, input_formats=None, *args, **kwargs): super(TimeField, self).__init__(*args, **kwargs) self.input_formats = input_formats def to_python(self, value): """ Validates that the input can be converted to a time. Returns a Python datetime.time object. """ if value in validators.EMPTY_VALUES: return None if isinstance(value, datetime.time): return value for format in self.input_formats or formats.get_format('TIME_INPUT_FORMATS'): try: return datetime.time(*time.strptime(value, format)[3:6]) except ValueError: continue raise ValidationError(self.error_messages['invalid']) class DateTimeField(Field): widget = DateTimeInput default_error_messages = { 'invalid': _(u'Enter a valid date/time.'), } def __init__(self, input_formats=None, *args, **kwargs): super(DateTimeField, self).__init__(*args, **kwargs) self.input_formats = input_formats def to_python(self, value): """ Validates that the input can be converted to a datetime. Returns a Python datetime.datetime object. """ if value in validators.EMPTY_VALUES: return None if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): return datetime.datetime(value.year, value.month, value.day) if isinstance(value, list): # Input comes from a SplitDateTimeWidget, for example. So, it's two # components: date and time. if len(value) != 2: raise ValidationError(self.error_messages['invalid']) if value[0] in validators.EMPTY_VALUES and value[1] in validators.EMPTY_VALUES: return None value = '%s %s' % tuple(value) for format in self.input_formats or formats.get_format('DATETIME_INPUT_FORMATS'): try: return datetime.datetime(*time.strptime(value, format)[:6]) except ValueError: continue raise ValidationError(self.error_messages['invalid']) class RegexField(CharField): def __init__(self, regex, max_length=None, min_length=None, error_message=None, *args, **kwargs): """ regex can be either a string or a compiled regular expression object. error_message is an optional error message to use, if 'Enter a valid value' is too generic for you. """ # error_message is just kept for backwards compatibility: if error_message: error_messages = kwargs.get('error_messages') or {} error_messages['invalid'] = error_message kwargs['error_messages'] = error_messages super(RegexField, self).__init__(max_length, min_length, *args, **kwargs) if isinstance(regex, basestring): regex = re.compile(regex) self.regex = regex self.validators.append(validators.RegexValidator(regex=regex)) class EmailField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid e-mail address.'), } default_validators = [validators.validate_email] def clean(self, value): value = self.to_python(value).strip() return super(EmailField, self).clean(value) class FileField(Field): widget = ClearableFileInput default_error_messages = { 'invalid': _(u"No file was submitted. Check the encoding type on the form."), 'missing': _(u"No file was submitted."), 'empty': _(u"The submitted file is empty."), 'max_length': _(u'Ensure this filename has at most %(max)d characters (it has %(length)d).'), 'contradiction': _(u'Please either submit a file or check the clear checkbox, not both.') } def __init__(self, *args, **kwargs): self.max_length = kwargs.pop('max_length', None) super(FileField, self).__init__(*args, **kwargs) def to_python(self, data): if data in validators.EMPTY_VALUES: return None # UploadedFile objects should have name and size attributes. try: file_name = data.name file_size = data.size except AttributeError: raise ValidationError(self.error_messages['invalid']) if self.max_length is not None and len(file_name) > self.max_length: error_values = {'max': self.max_length, 'length': len(file_name)} raise ValidationError(self.error_messages['max_length'] % error_values) if not file_name: raise ValidationError(self.error_messages['invalid']) if not file_size: raise ValidationError(self.error_messages['empty']) return data def clean(self, data, initial=None): # If the widget got contradictory inputs, we raise a validation error if data is FILE_INPUT_CONTRADICTION: raise ValidationError(self.error_messages['contradiction']) # False means the field value should be cleared; further validation is # not needed. if data is False: if not self.required: return False # If the field is required, clearing is not possible (the widget # shouldn't return False data in that case anyway). False is not # in validators.EMPTY_VALUES; if a False value makes it this far # it should be validated from here on out as None (so it will be # caught by the required check). data = None if not data and initial: return initial return super(FileField, self).clean(data) def bound_data(self, data, initial): if data in (None, FILE_INPUT_CONTRADICTION): return initial return data class ImageField(FileField): default_error_messages = { 'invalid_image': _(u"Upload a valid image. The file you uploaded was either not an image or a corrupted image."), } def to_python(self, data): """ Checks that the file-upload field data contains a valid image (GIF, JPG, PNG, possibly others -- whatever the Python Imaging Library supports). """ f = super(ImageField, self).to_python(data) if f is None: return None # Try to import PIL in either of the two ways it can end up installed. try: from PIL import Image except ImportError: import Image # We need to get a file object for PIL. We might have a path or we might # have to read the data into memory. if hasattr(data, 'temporary_file_path'): file = data.temporary_file_path() else: if hasattr(data, 'read'): file = StringIO(data.read()) else: file = StringIO(data['content']) try: # load() is the only method that can spot a truncated JPEG, # but it cannot be called sanely after verify() trial_image = Image.open(file) trial_image.load() # Since we're about to use the file again we have to reset the # file object if possible. if hasattr(file, 'reset'): file.reset() # verify() is the only method that can spot a corrupt PNG, # but it must be called immediately after the constructor trial_image = Image.open(file) trial_image.verify() except ImportError: # Under PyPy, it is possible to import PIL. However, the underlying # _imaging C module isn't available, so an ImportError will be # raised. Catch and re-raise. raise except Exception: # Python Imaging Library doesn't recognize it as an image raise ValidationError(self.error_messages['invalid_image']) if hasattr(f, 'seek') and callable(f.seek): f.seek(0) return f class URLField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid URL.'), 'invalid_link': _(u'This URL appears to be a broken link.'), } def __init__(self, max_length=None, min_length=None, verify_exists=False, validator_user_agent=validators.URL_VALIDATOR_USER_AGENT, *args, **kwargs): super(URLField, self).__init__(max_length, min_length, *args, **kwargs) self.validators.append(validators.URLValidator(verify_exists=verify_exists, validator_user_agent=validator_user_agent)) def to_python(self, value): if value: url_fields = list(urlparse.urlsplit(value)) if not url_fields[0]: # If no URL scheme given, assume http:// url_fields[0] = 'http' if not url_fields[1]: # Assume that if no domain is provided, that the path segment # contains the domain. url_fields[1] = url_fields[2] url_fields[2] = '' # Rebuild the url_fields list, since the domain segment may now # contain the path too. value = urlparse.urlunsplit(url_fields) url_fields = list(urlparse.urlsplit(value)) if not url_fields[2]: # the path portion may need to be added before query params url_fields[2] = '/' value = urlparse.urlunsplit(url_fields) return super(URLField, self).to_python(value) class BooleanField(Field): widget = CheckboxInput def to_python(self, value): """Returns a Python boolean object.""" # Explicitly check for the string 'False', which is what a hidden field # will submit for False. Also check for '0', since this is what # RadioSelect will provide. Because bool("True") == bool('1') == True, # we don't need to handle that explicitly. if value in ('False', '0'): value = False else: value = bool(value) value = super(BooleanField, self).to_python(value) if not value and self.required: raise ValidationError(self.error_messages['required']) return value class NullBooleanField(BooleanField): """ A field whose valid values are None, True and False. Invalid values are cleaned to None. """ widget = NullBooleanSelect def to_python(self, value): """ Explicitly checks for the string 'True' and 'False', which is what a hidden field will submit for True and False, and for '1' and '0', which is what a RadioField will submit. Unlike the Booleanfield we need to explicitly check for True, because we are not using the bool() function """ if value in (True, 'True', '1'): return True elif value in (False, 'False', '0'): return False else: return None def validate(self, value): pass class ChoiceField(Field): widget = Select default_error_messages = { 'invalid_choice': _(u'Select a valid choice. %(value)s is not one of the available choices.'), } def __init__(self, choices=(), required=True, widget=None, label=None, initial=None, help_text=None, *args, **kwargs): super(ChoiceField, self).__init__(required=required, widget=widget, label=label, initial=initial, help_text=help_text, *args, **kwargs) self.choices = choices def _get_choices(self): return self._choices def _set_choices(self, value): # Setting choices also sets the choices on the widget. # choices can be any iterable, but we call list() on it because # it will be consumed more than once. self._choices = self.widget.choices = list(value) choices = property(_get_choices, _set_choices) def to_python(self, value): "Returns a Unicode object." if value in validators.EMPTY_VALUES: return u'' return smart_unicode(value) def validate(self, value): """ Validates that the input is in self.choices. """ super(ChoiceField, self).validate(value) if value and not self.valid_value(value): raise ValidationError(self.error_messages['invalid_choice'] % {'value': value}) def valid_value(self, value): "Check to see if the provided value is a valid choice" for k, v in self.choices: if isinstance(v, (list, tuple)): # This is an optgroup, so look inside the group for options for k2, v2 in v: if value == smart_unicode(k2): return True else: if value == smart_unicode(k): return True return False class TypedChoiceField(ChoiceField): def __init__(self, *args, **kwargs): self.coerce = kwargs.pop('coerce', lambda val: val) self.empty_value = kwargs.pop('empty_value', '') super(TypedChoiceField, self).__init__(*args, **kwargs) def to_python(self, value): """ Validates that the value is in self.choices and can be coerced to the right type. """ value = super(TypedChoiceField, self).to_python(value) super(TypedChoiceField, self).validate(value) if value == self.empty_value or value in validators.EMPTY_VALUES: return self.empty_value try: value = self.coerce(value) except (ValueError, TypeError, ValidationError): raise ValidationError(self.error_messages['invalid_choice'] % {'value': value}) return value def validate(self, value): pass class MultipleChoiceField(ChoiceField): hidden_widget = MultipleHiddenInput widget = SelectMultiple default_error_messages = { 'invalid_choice': _(u'Select a valid choice. %(value)s is not one of the available choices.'), 'invalid_list': _(u'Enter a list of values.'), } def to_python(self, value): if not value: return [] elif not isinstance(value, (list, tuple)): raise ValidationError(self.error_messages['invalid_list']) return [smart_unicode(val) for val in value] def validate(self, value): """ Validates that the input is a list or tuple. """ if self.required and not value: raise ValidationError(self.error_messages['required']) # Validate that each value in the value list is in self.choices. for val in value: if not self.valid_value(val): raise ValidationError(self.error_messages['invalid_choice'] % {'value': val}) class TypedMultipleChoiceField(MultipleChoiceField): def __init__(self, *args, **kwargs): self.coerce = kwargs.pop('coerce', lambda val: val) self.empty_value = kwargs.pop('empty_value', []) super(TypedMultipleChoiceField, self).__init__(*args, **kwargs) def to_python(self, value): """ Validates that the values are in self.choices and can be coerced to the right type. """ value = super(TypedMultipleChoiceField, self).to_python(value) super(TypedMultipleChoiceField, self).validate(value) if value == self.empty_value or value in validators.EMPTY_VALUES: return self.empty_value new_value = [] for choice in value: try: new_value.append(self.coerce(choice)) except (ValueError, TypeError, ValidationError): raise ValidationError(self.error_messages['invalid_choice'] % {'value': choice}) return new_value def validate(self, value): pass class ComboField(Field): """ A Field whose clean() method calls multiple Field clean() methods. """ def __init__(self, fields=(), *args, **kwargs): super(ComboField, self).__init__(*args, **kwargs) # Set 'required' to False on the individual fields, because the # required validation will be handled by ComboField, not by those # individual fields. for f in fields: f.required = False self.fields = fields def clean(self, value): """ Validates the given value against all of self.fields, which is a list of Field instances. """ super(ComboField, self).clean(value) for field in self.fields: value = field.clean(value) return value class MultiValueField(Field): """ A Field that aggregates the logic of multiple Fields. Its clean() method takes a "decompressed" list of values, which are then cleaned into a single value according to self.fields. Each value in this list is cleaned by the corresponding field -- the first value is cleaned by the first field, the second value is cleaned by the second field, etc. Once all fields are cleaned, the list of clean values is "compressed" into a single value. Subclasses should not have to implement clean(). Instead, they must implement compress(), which takes a list of valid values and returns a "compressed" version of those values -- a single value. You'll probably want to use this with MultiWidget. """ default_error_messages = { 'invalid': _(u'Enter a list of values.'), } def __init__(self, fields=(), *args, **kwargs): super(MultiValueField, self).__init__(*args, **kwargs) # Set 'required' to False on the individual fields, because the # required validation will be handled by MultiValueField, not by those # individual fields. for f in fields: f.required = False self.fields = fields def validate(self, value): pass def clean(self, value): """ Validates every value in the given list. A value is validated against the corresponding Field in self.fields. For example, if this MultiValueField was instantiated with fields=(DateField(), TimeField()), clean() would call DateField.clean(value[0]) and TimeField.clean(value[1]). """ clean_data = [] errors = ErrorList() if not value or isinstance(value, (list, tuple)): if not value or not [v for v in value if v not in validators.EMPTY_VALUES]: if self.required: raise ValidationError(self.error_messages['required']) else: return self.compress([]) else: raise ValidationError(self.error_messages['invalid']) for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None if self.required and field_value in validators.EMPTY_VALUES: raise ValidationError(self.error_messages['required']) try: clean_data.append(field.clean(field_value)) except ValidationError, e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. errors.extend(e.messages) if errors: raise ValidationError(errors) out = self.compress(clean_data) self.validate(out) return out def compress(self, data_list): """ Returns a single value for the given list of values. The values can be assumed to be valid. For example, if this MultiValueField was instantiated with fields=(DateField(), TimeField()), this might return a datetime object created by combining the date and time in data_list. """ raise NotImplementedError('Subclasses must implement this method.') class FilePathField(ChoiceField): def __init__(self, path, match=None, recursive=False, required=True, widget=None, label=None, initial=None, help_text=None, *args, **kwargs): self.path, self.match, self.recursive = path, match, recursive super(FilePathField, self).__init__(choices=(), required=required, widget=widget, label=label, initial=initial, help_text=help_text, *args, **kwargs) if self.required: self.choices = [] else: self.choices = [("", "---------")] if self.match is not None: self.match_re = re.compile(self.match) if recursive: for root, dirs, files in sorted(os.walk(self.path)): for f in files: if self.match is None or self.match_re.search(f): f = os.path.join(root, f) self.choices.append((f, f.replace(path, "", 1))) else: try: for f in sorted(os.listdir(self.path)): full_file = os.path.join(self.path, f) if os.path.isfile(full_file) and (self.match is None or self.match_re.search(f)): self.choices.append((full_file, f)) except OSError: pass self.widget.choices = self.choices class SplitDateTimeField(MultiValueField): widget = SplitDateTimeWidget hidden_widget = SplitHiddenDateTimeWidget default_error_messages = { 'invalid_date': _(u'Enter a valid date.'), 'invalid_time': _(u'Enter a valid time.'), } def __init__(self, input_date_formats=None, input_time_formats=None, *args, **kwargs): errors = self.default_error_messages.copy() if 'error_messages' in kwargs: errors.update(kwargs['error_messages']) localize = kwargs.get('localize', False) fields = ( DateField(input_formats=input_date_formats, error_messages={'invalid': errors['invalid_date']}, localize=localize), TimeField(input_formats=input_time_formats, error_messages={'invalid': errors['invalid_time']}, localize=localize), ) super(SplitDateTimeField, self).__init__(fields, *args, **kwargs) def compress(self, data_list): if data_list: # Raise a validation error if time or date is empty # (possible if SplitDateTimeField has required=False). if data_list[0] in validators.EMPTY_VALUES: raise ValidationError(self.error_messages['invalid_date']) if data_list[1] in validators.EMPTY_VALUES: raise ValidationError(self.error_messages['invalid_time']) return datetime.datetime.combine(*data_list) return None class IPAddressField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid IPv4 address.'), } default_validators = [validators.validate_ipv4_address] class SlugField(CharField): default_error_messages = { 'invalid': _(u"Enter a valid 'slug' consisting of letters, numbers," u" underscores or hyphens."), } default_validators = [validators.validate_slug]
bsd-3-clause
marty331/jakesclock
flask/lib/python2.7/site-packages/sqlalchemy/dialects/mssql/pymssql.py
48
3019
# mssql/pymssql.py # Copyright (C) 2005-2015 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ .. dialect:: mssql+pymssql :name: pymssql :dbapi: pymssql :connectstring: mssql+pymssql://<username>:<password>@<freetds_name>?\ charset=utf8 :url: http://pymssql.org/ pymssql is a Python module that provides a Python DBAPI interface around `FreeTDS <http://www.freetds.org/>`_. Compatible builds are available for Linux, MacOSX and Windows platforms. """ from .base import MSDialect from ... import types as sqltypes, util, processors import re class _MSNumeric_pymssql(sqltypes.Numeric): def result_processor(self, dialect, type_): if not self.asdecimal: return processors.to_float else: return sqltypes.Numeric.result_processor(self, dialect, type_) class MSDialect_pymssql(MSDialect): supports_sane_rowcount = False driver = 'pymssql' colspecs = util.update_copy( MSDialect.colspecs, { sqltypes.Numeric: _MSNumeric_pymssql, sqltypes.Float: sqltypes.Float, } ) @classmethod def dbapi(cls): module = __import__('pymssql') # pymmsql < 2.1.1 doesn't have a Binary method. we use string client_ver = tuple(int(x) for x in module.__version__.split(".")) if client_ver < (2, 1, 1): # TODO: monkeypatching here is less than ideal module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) if client_ver < (1, ): util.warn("The pymssql dialect expects at least " "the 1.0 series of the pymssql DBAPI.") return module def __init__(self, **params): super(MSDialect_pymssql, self).__init__(**params) self.use_scope_identity = True def _get_server_version_info(self, connection): vers = connection.scalar("select @@version") m = re.match( r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers) if m: return tuple(int(x) for x in m.group(1, 2, 3, 4)) else: return None def create_connect_args(self, url): opts = url.translate_connect_args(username='user') opts.update(url.query) port = opts.pop('port', None) if port and 'host' in opts: opts['host'] = "%s:%s" % (opts['host'], port) return [[], opts] def is_disconnect(self, e, connection, cursor): for msg in ( "Adaptive Server connection timed out", "Net-Lib error during Connection reset by peer", "message 20003", # connection timeout "Error 10054", "Not connected to any MS SQL server", "Connection is closed" ): if msg in str(e): return True else: return False dialect = MSDialect_pymssql
gpl-2.0
matterker/csvkit
csvkit/convert/__init__.py
17
2264
#!/usr/bin/env python import six from csvkit.convert.csvitself import csv2csv from csvkit.convert.fixed import fixed2csv from csvkit.convert.geojs import geojson2csv from csvkit.convert.js import json2csv from csvkit.convert.ndjs import ndjson2csv from csvkit.convert.xls import xls2csv from csvkit.convert.xlsx import xlsx2csv SUPPORTED_FORMATS = ['fixed', 'xls', 'xlsx', 'csv', 'json', 'geojson', 'ndjson'] # DBF is supported for Python 2 only if six.PY2: from csvkit.convert.dbase import dbf2csv SUPPORTED_FORMATS.append('dbf') def convert(f, format, schema=None, key=None, **kwargs): """ Convert a file of a specified format to CSV. """ if not f: raise ValueError('f must not be None') if not format: raise ValueError('format must not be None') if format == 'fixed': if not schema: raise ValueError('schema must not be null when format is "fixed"') return fixed2csv(f, schema, **kwargs) elif format == 'xls': return xls2csv(f, **kwargs) elif format == 'xlsx': return xlsx2csv(f, **kwargs) elif format == 'json': return json2csv(f, key, **kwargs) elif format == 'ndjson': return ndjson2csv(f, **kwargs) elif format == 'geojson': return geojson2csv(f, **kwargs) elif format == 'csv': return csv2csv(f, **kwargs) elif format == 'dbf': if six.PY3: raise ValueError('format "dbf" is not supported forthis version of Python.') return dbf2csv(f, **kwargs) else: raise ValueError('format "%s" is not supported' % format) def guess_format(filename): """ Try to guess a file's format based on its extension (or lack thereof). """ last_period = filename.rfind('.') if last_period == -1: # No extension: assume fixed-width return 'fixed' extension = filename[last_period + 1:] if extension == 'xls': return extension elif extension == 'xlsx': return extension elif extension in ['json', 'js']: return 'json' elif extension == 'csv': return extension elif extension == 'fixed': return extension elif extension == 'dbf': return extension return None
mit
jorgebastida/gordon
tests/kinesisstream/tests.py
1
1149
import os from gordon.utils_tests import BaseIntegrationTest, BaseBuildTest from gordon.utils import valid_cloudformation_name from gordon import utils class IntegrationTest(BaseIntegrationTest): def setUp(self): self.stream = self.create_kinesis_stream() self.extra_env['KINESIS_INTEGRATION'] = self.stream['StreamDescription']['StreamARN'] super(IntegrationTest, self).setUp() def test_0001_project(self): self._test_project_step('0001_project') self.assert_stack_succeed('p') self.assert_stack_succeed('r') lambda_ = self.get_lambda(valid_cloudformation_name('kinesisconsumer:consumer')) self.assertEqual(lambda_['Runtime'], 'python2.7') aliases = self.get_lambda_aliases(function_name=lambda_['FunctionName']) self.assertEqual(list(aliases.keys()), ['current']) class BuildTest(BaseBuildTest): def test_0001_project(self): self._test_project_step('0001_project') self.assertBuild('0001_project', '0001_p.json') self.assertBuild('0001_project', '0002_pr_r.json') self.assertBuild('0001_project', '0003_r.json')
bsd-3-clause
lootr/netzob
netzob/src/netzob/Common/CommandLine.py
2
4169
# -*- coding: utf-8 -*- # +---------------------------------------------------------------------------+ # | 01001110 01100101 01110100 01111010 01101111 01100010 | # | | # | Netzob : Inferring communication protocols | # +---------------------------------------------------------------------------+ # | Copyright (C) 2011-2017 Georges Bossert and Frédéric Guihéry | # | This program is free software: you can redistribute it and/or modify | # | it under the terms of the GNU General Public License as published by | # | the Free Software Foundation, either version 3 of the License, or | # | (at your option) any later version. | # | | # | This program is distributed in the hope that it will be useful, | # | but WITHOUT ANY WARRANTY; without even the implied warranty of | # | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | # | GNU General Public License for more details. | # | | # | You should have received a copy of the GNU General Public License | # | along with this program. If not, see <http://www.gnu.org/licenses/>. | # +---------------------------------------------------------------------------+ # | @url : http://www.netzob.org | # | @contact : [email protected] | # | @sponsors : Amossys, http://www.amossys.fr | # | Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ | # +---------------------------------------------------------------------------+ # +---------------------------------------------------------------------------+ # | Standard library imports # +---------------------------------------------------------------------------+ import optparse # +---------------------------------------------------------------------------+ # | Local imports # +---------------------------------------------------------------------------+ from netzob import release # +---------------------------------------------- # | CommandLine # +---------------------------------------------- class CommandLine(object): """Reads, validates and parses the command line arguments provided by users""" def __init__(self): self.parser = None self.providedOptions = None self.providedArguments = None self.configure() def configure(self): """Configure the parser based on Netzob's usage and the definition of its options and arguments""" self.usage = "usage: %prog [options]" self.parser = optparse.OptionParser( self.usage, prog=release.appname, version=release.version) self.parser.add_option( "-d", "--debugLevel", dest="debugLevel", help="Activate debug information ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')" ) # self.parser.add_option("-i", "--interactive", action="store_true", dest="interactive", help="Starts an interactive Netzob session") def parse(self): """Read and parse the provided arguments and options""" (self.providedOptions, self.providedArguments) = self.parser.parse_args() def isInteractiveConsoleRequested(self): """Compute and returns if the user has requested the initiation of an interactive session""" if self.parser is None: self.parse() if self.providedOptions is None: return False return self.providedOptions.interactive def getOptions(self): return self.providedOptions def getConfiguredParser(self): """Return (if available) the parser configured to manage provided arguments and options by user. @return: the parser""" return self.parser
gpl-3.0
mstriemer/zamboni
mkt/developers/views_payments.py
4
19817
import functools import json import urllib from django import http from django.conf import settings from django.contrib import messages from django.core.urlresolvers import reverse from django.shortcuts import get_object_or_404, redirect, render from django.views.decorators.http import require_POST import commonware import jinja2 import waffle from slumber.exceptions import HttpClientError from tower import ugettext as _ from waffle.decorators import waffle_switch import mkt from lib.crypto import generate_key from lib.pay_server import client from mkt.access import acl from mkt.constants import PAID_PLATFORMS, PLATFORMS_NAMES from mkt.constants.payments import (PAYMENT_METHOD_ALL, PAYMENT_METHOD_CARD, PAYMENT_METHOD_OPERATOR, PROVIDER_BANGO, PROVIDER_CHOICES) from mkt.developers import forms, forms_payments from mkt.developers.decorators import dev_required from mkt.developers.models import CantCancel, PaymentAccount, UserInappKey from mkt.developers.providers import get_provider, get_providers from mkt.inapp.models import InAppProduct from mkt.inapp.serializers import InAppProductForm from mkt.prices.models import Price from mkt.site.decorators import json_view, login_required, write from mkt.webapps.models import Webapp log = commonware.log.getLogger('z.devhub') @dev_required @require_POST def disable_payments(request, addon_id, addon): return redirect(addon.get_dev_url('payments')) @dev_required(owner_for_post=True, webapp=True) def payments(request, addon_id, addon, webapp=False): premium_form = forms_payments.PremiumForm( request.POST or None, request=request, addon=addon, user=request.user) region_form = forms.RegionForm( request.POST or None, product=addon, request=request) upsell_form = forms_payments.UpsellForm( request.POST or None, addon=addon, user=request.user) providers = get_providers() if 'form-TOTAL_FORMS' in request.POST: formset_data = request.POST else: formset_data = None account_list_formset = forms_payments.AccountListFormSet( data=formset_data, provider_data=[ {'addon': addon, 'user': request.user, 'provider': provider} for provider in providers]) if request.method == 'POST': active_forms = [premium_form, region_form, upsell_form] if formset_data is not None: active_forms.append(account_list_formset) success = all(form.is_valid() for form in active_forms) if success: region_form.save() try: premium_form.save() except client.Error as err: success = False log.error('Error setting payment information (%s)' % err) messages.error( request, _(u'We encountered a problem connecting to the ' u'payment server.')) raise # We want to see these exceptions! is_free_inapp = addon.premium_type == mkt.ADDON_FREE_INAPP is_now_paid = (addon.premium_type in mkt.ADDON_PREMIUMS or is_free_inapp) # If we haven't changed to a free app, check the upsell. if is_now_paid and success: try: if not is_free_inapp: upsell_form.save() if formset_data is not None: account_list_formset.save() except client.Error as err: log.error('Error saving payment information (%s)' % err) messages.error( request, _(u'We encountered a problem connecting to ' u'the payment server.')) success = False raise # We want to see all the solitude errors now. # If everything happened successfully, give the user a pat on the back. if success: messages.success(request, _('Changes successfully saved.')) return redirect(addon.get_dev_url('payments')) # TODO: refactor this (bug 945267) android_pay = waffle.flag_is_active(request, 'android-payments') desktop_pay = waffle.flag_is_active(request, 'desktop-payments') # If android payments is not allowed then firefox os must # be 'checked' and android-mobile and android-tablet should not be. invalid_paid_platform_state = [] if not android_pay: # When android-payments is off... invalid_paid_platform_state += [('android-mobile', True), ('android-tablet', True), ('firefoxos', False)] if not desktop_pay: # When desktop-payments is off... invalid_paid_platform_state += [('desktop', True)] cannot_be_paid = ( addon.premium_type == mkt.ADDON_FREE and any(premium_form.device_data['free-%s' % x] == y for x, y in invalid_paid_platform_state)) try: tier_zero = Price.objects.get(price='0.00', active=True) tier_zero_id = tier_zero.pk except Price.DoesNotExist: tier_zero = None tier_zero_id = '' # Get the regions based on tier zero. This should be all the # regions with payments enabled. paid_region_ids_by_name = [] if tier_zero: paid_region_ids_by_name = tier_zero.region_ids_by_name() platforms = PAID_PLATFORMS(request) paid_platform_names = [unicode(platform[1]) for platform in platforms] provider_regions = {} if tier_zero: provider_regions = tier_zero.provider_regions() return render(request, 'developers/payments/premium.html', {'addon': addon, 'webapp': webapp, 'premium': addon.premium, 'form': premium_form, 'upsell_form': upsell_form, 'tier_zero_id': tier_zero_id, 'region_form': region_form, 'PLATFORMS_NAMES': PLATFORMS_NAMES, 'is_paid': (addon.premium_type in mkt.ADDON_PREMIUMS or addon.premium_type == mkt.ADDON_FREE_INAPP), 'cannot_be_paid': cannot_be_paid, 'paid_platform_names': paid_platform_names, 'is_packaged': addon.is_packaged, # Bango values 'account_list_forms': account_list_formset.forms, 'account_list_formset': account_list_formset, # Waffles 'api_pricelist_url': reverse('price-list'), 'payment_methods': { PAYMENT_METHOD_ALL: _('All'), PAYMENT_METHOD_CARD: _('Credit card'), PAYMENT_METHOD_OPERATOR: _('Carrier'), }, 'provider_lookup': dict(PROVIDER_CHOICES), 'all_paid_region_ids_by_name': paid_region_ids_by_name, 'providers': providers, 'provider_regions': provider_regions, 'enabled_provider_ids': [acct.payment_account.provider for acct in addon.all_payment_accounts()] }) @login_required @json_view def payment_accounts(request): app_slug = request.GET.get('app-slug', '') if app_slug: app = Webapp.objects.get(app_slug=app_slug) app_name = app.name else: app_name = '' accounts = PaymentAccount.objects.filter( user=request.user, provider__in=[p.provider for p in get_providers()], inactive=False) def account(acc): def payment_account_names(app): account_names = [unicode(acc.payment_account) for acc in app.all_payment_accounts()] return (unicode(app.name), account_names) addon_payment_accounts = acc.addonpaymentaccount_set.all() associated_apps = [apa.addon for apa in addon_payment_accounts if hasattr(apa, 'addon')] app_names = u', '.join(unicode(app.name) for app in associated_apps) app_payment_accounts = json.dumps(dict([payment_account_names(app) for app in associated_apps])) provider = acc.get_provider() data = { 'account-url': reverse('mkt.developers.provider.payment_account', args=[acc.pk]), 'agreement-url': acc.get_agreement_url(), 'agreement': 'accepted' if acc.agreed_tos else 'rejected', 'current-app-name': jinja2.escape(app_name), 'app-names': jinja2.escape(app_names), 'app-payment-accounts': jinja2.escape(app_payment_accounts), 'delete-url': reverse( 'mkt.developers.provider.delete_payment_account', args=[acc.pk]), 'id': acc.pk, 'name': jinja2.escape(unicode(acc)), 'provider': provider.name, 'provider-full': unicode(provider.full), 'shared': acc.shared, 'portal-url': provider.get_portal_url(app_slug) } return data return map(account, accounts) @login_required def payment_accounts_form(request): webapp = get_object_or_404(Webapp, app_slug=request.GET.get('app_slug')) provider = get_provider(name=request.GET.get('provider')) account_list_formset = forms_payments.AccountListFormSet( provider_data=[ {'user': request.user, 'addon': webapp, 'provider': p} for p in get_providers()]) account_list_form = next(form for form in account_list_formset.forms if form.provider.name == provider.name) return render(request, 'developers/payments/includes/bango_accounts_form.html', {'account_list_form': account_list_form}) @write @require_POST @login_required @json_view def payments_accounts_add(request): provider = get_provider(name=request.POST.get('provider')) form = provider.forms['account'](request.POST) if not form.is_valid(): return json_view.error(form.errors) try: obj = provider.account_create(request.user, form.cleaned_data) except HttpClientError as e: log.error('Client error create {0} account: {1}'.format( provider.name, e)) return http.HttpResponseBadRequest(json.dumps(e.content)) return {'pk': obj.pk, 'agreement-url': obj.get_agreement_url()} @write @login_required @json_view def payments_account(request, id): account = get_object_or_404(PaymentAccount, pk=id, user=request.user) provider = account.get_provider() if request.POST: form = provider.forms['account'](request.POST, account=account) if form.is_valid(): form.save() else: return json_view.error(form.errors) return provider.account_retrieve(account) @write @require_POST @login_required def payments_accounts_delete(request, id): account = get_object_or_404(PaymentAccount, pk=id, user=request.user) try: account.cancel(disable_refs=True) except CantCancel: log.info('Could not cancel account.') return http.HttpResponse('Cannot cancel account', status=409) log.info('Account cancelled: %s' % id) return http.HttpResponse('success') @login_required def in_app_keys(request): """ Allows developers to get a simulation-only key for in-app payments. This key cannot be used for real payments. """ keys = UserInappKey.objects.no_cache().filter( solitude_seller__user=request.user ) # TODO(Kumar) support multiple test keys. For now there's only one. key = None key_public_id = None if keys.exists(): key = keys.get() # Attempt to retrieve the public id from solitude try: key_public_id = key.public_id() except HttpClientError, e: messages.error(request, _('A server error occurred ' 'when retrieving the application key.')) log.exception('Solitude connection error: {0}'.format(e.message)) if request.method == 'POST': if key: key.reset() messages.success(request, _('Secret was reset successfully.')) else: UserInappKey.create(request.user) messages.success(request, _('Key and secret were created successfully.')) return redirect(reverse('mkt.developers.apps.in_app_keys')) return render(request, 'developers/payments/in-app-keys.html', {'key': key, 'key_public_id': key_public_id}) @login_required def in_app_key_secret(request, pk): key = (UserInappKey.objects.no_cache() .filter(solitude_seller__user=request.user, pk=pk)) if not key.count(): # Either the record does not exist or it's not owned by the # logged in user. return http.HttpResponseForbidden() return http.HttpResponse(key.get().secret()) def require_in_app_payments(render_view): @functools.wraps(render_view) def inner(request, addon_id, addon, *args, **kwargs): setup_url = reverse('mkt.developers.apps.payments', args=[addon.app_slug]) if addon.premium_type not in mkt.ADDON_INAPPS: messages.error( request, _('Your app is not configured for in-app payments.')) return redirect(setup_url) if not addon.has_payment_account(): messages.error(request, _('No payment account for this app.')) return redirect(setup_url) # App is set up for payments; render the view. return render_view(request, addon_id, addon, *args, **kwargs) return inner @login_required @dev_required(webapp=True) @require_in_app_payments def in_app_payments(request, addon_id, addon, webapp=True, account=None): return render(request, 'developers/payments/in-app-payments.html', {'addon': addon}) @waffle_switch('in-app-products') @login_required @dev_required(webapp=True) @require_in_app_payments def in_app_products(request, addon_id, addon, webapp=True, account=None): owner = acl.check_addon_ownership(request, addon) products = addon.inappproduct_set.all() new_product = InAppProduct(webapp=addon) form = InAppProductForm() list_url = None detail_url = None if addon.origin: list_url = _fix_origin_link(reverse('in-app-products-list', kwargs={'origin': addon.origin})) detail_url = _fix_origin_link(reverse('in-app-products-detail', # {guid} is replaced in JS. kwargs={'origin': addon.origin, 'guid': "{guid}"})) return render(request, 'developers/payments/in-app-products.html', {'addon': addon, 'form': form, 'new_product': new_product, 'owner': owner, 'products': products, 'form': form, 'list_url': list_url, 'detail_url': detail_url, 'active_lang': request.LANG.lower()}) def _fix_origin_link(link): """ Return a properly URL encoded link that might contain an app origin. App origins look like ``app://fxpay.allizom.org`` but Django does not encode the double slashes. This seems to cause a problem on our production web servers maybe because double slashes are normalized. See https://bugzilla.mozilla.org/show_bug.cgi?id=1065006 """ return link.replace('//', '%2F%2F') @login_required @dev_required(owner_for_post=True, webapp=True) @require_in_app_payments def in_app_config(request, addon_id, addon, webapp=True): """ Allows developers to get a key/secret for doing in-app payments. """ config = get_inapp_config(addon) owner = acl.check_addon_ownership(request, addon) if request.method == 'POST': # Reset the in-app secret for the app. (client.api.generic .product(config['resource_pk']) .patch(data={'secret': generate_key(48)})) messages.success(request, _('Changes successfully saved.')) return redirect(reverse('mkt.developers.apps.in_app_config', args=[addon.app_slug])) return render(request, 'developers/payments/in-app-config.html', {'addon': addon, 'owner': owner, 'seller_config': config}) @login_required @dev_required(webapp=True) @require_in_app_payments def in_app_secret(request, addon_id, addon, webapp=True): config = get_inapp_config(addon) return http.HttpResponse(config['secret']) def get_inapp_config(addon): """ Returns a generic Solitude product, the app's in-app configuration. We use generic products in Solitude to represent an "app" that is enabled for in-app purchases. """ if not addon.solitude_public_id: # If the view accessing this method uses all the right # decorators then this error won't be raised. raise ValueError('The app {a} has not yet been configured ' 'for payments'.format(a=addon)) return client.api.generic.product.get_object( public_id=addon.solitude_public_id) @dev_required(webapp=True) def bango_portal_from_addon(request, addon_id, addon, webapp=True): try: bango = addon.payment_account(PROVIDER_BANGO) except addon.PayAccountDoesNotExist: log.error('Bango portal not available for app {app} ' 'with accounts {acct}' .format(app=addon, acct=list(addon.all_payment_accounts()))) return http.HttpResponseForbidden() else: account = bango.payment_account if not ((addon.authors.filter( pk=request.user.pk, addonuser__role=mkt.AUTHOR_ROLE_OWNER).exists()) and (account.solitude_seller.user.id == request.user.id)): log.error(('User not allowed to reach the Bango portal; ' 'pk=%s') % request.user.pk) return http.HttpResponseForbidden() return _redirect_to_bango_portal(account.account_id, 'addon_id: %s' % addon_id) def _redirect_to_bango_portal(package_id, source): try: bango_token = client.api.bango.login.post({'packageId': int(package_id)}) except HttpClientError as e: log.error('Failed to authenticate against Bango portal; %s' % source, exc_info=True) return http.HttpResponseBadRequest(json.dumps(e.content)) bango_url = '{base_url}{parameters}'.format(**{ 'base_url': settings.BANGO_BASE_PORTAL_URL, 'parameters': urllib.urlencode({ 'authenticationToken': bango_token['authentication_token'], 'emailAddress': bango_token['email_address'], 'packageId': package_id, 'personId': bango_token['person_id'], }) }) response = http.HttpResponse(status=204) response['Location'] = bango_url return response # TODO(andym): move these into a DRF API. @login_required @json_view def agreement(request, id): account = get_object_or_404(PaymentAccount, pk=id, user=request.user) provider = account.get_provider() if request.method == 'POST': return provider.terms_update(account) return provider.terms_retrieve(account)
bsd-3-clause
jreback/pandas
pandas/tests/arrays/boolean/test_arithmetic.py
6
3586
import operator import numpy as np import pytest import pandas as pd import pandas._testing as tm from pandas.arrays import FloatingArray @pytest.fixture def data(): return pd.array( [True, False] * 4 + [np.nan] + [True, False] * 44 + [np.nan] + [True, False], dtype="boolean", ) @pytest.fixture def left_array(): return pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean") @pytest.fixture def right_array(): return pd.array([True, False, None] * 3, dtype="boolean") # Basic test for the arithmetic array ops # ----------------------------------------------------------------------------- @pytest.mark.parametrize( "opname, exp", [ ("add", [True, True, None, True, False, None, None, None, None]), ("mul", [True, False, None, False, False, None, None, None, None]), ], ids=["add", "mul"], ) def test_add_mul(left_array, right_array, opname, exp): op = getattr(operator, opname) result = op(left_array, right_array) expected = pd.array(exp, dtype="boolean") tm.assert_extension_array_equal(result, expected) def test_sub(left_array, right_array): msg = ( r"numpy boolean subtract, the `-` operator, is (?:deprecated|not supported), " r"use the bitwise_xor, the `\^` operator, or the logical_xor function instead\." ) with pytest.raises(TypeError, match=msg): left_array - right_array def test_div(left_array, right_array): result = left_array / right_array expected = FloatingArray( np.array( [1.0, np.inf, np.nan, 0.0, np.nan, np.nan, np.nan, np.nan, np.nan], dtype="float64", ), np.array([False, False, True, False, False, True, True, True, True]), ) tm.assert_extension_array_equal(result, expected) @pytest.mark.parametrize( "opname", [ "floordiv", "mod", pytest.param( "pow", marks=pytest.mark.xfail(reason="TODO follow int8 behaviour? GH34686") ), ], ) def test_op_int8(left_array, right_array, opname): op = getattr(operator, opname) result = op(left_array, right_array) expected = op(left_array.astype("Int8"), right_array.astype("Int8")) tm.assert_extension_array_equal(result, expected) # Test generic characteristics / errors # ----------------------------------------------------------------------------- def test_error_invalid_values(data, all_arithmetic_operators): # invalid ops op = all_arithmetic_operators s = pd.Series(data) ops = getattr(s, op) # invalid scalars msg = ( "did not contain a loop with signature matching types|" "BooleanArray cannot perform the operation|" "not supported for the input types, and the inputs could not be safely coerced " "to any supported types according to the casting rule ''safe''" ) with pytest.raises(TypeError, match=msg): ops("foo") msg = ( r"unsupported operand type\(s\) for|" "Concatenation operation is not implemented for NumPy arrays" ) with pytest.raises(TypeError, match=msg): ops(pd.Timestamp("20180101")) # invalid array-likes if op not in ("__mul__", "__rmul__"): # TODO(extension) numpy's mul with object array sees booleans as numbers msg = ( r"unsupported operand type\(s\) for|can only concatenate str|" "not all arguments converted during string formatting" ) with pytest.raises(TypeError, match=msg): ops(pd.Series("foo", index=s.index))
bsd-3-clause
zubair-arbi/edx-platform
common/djangoapps/performance/tests/test_logs.py
110
6444
"""Tests that performance data is successfully logged.""" import datetime import dateutil import json import logging from StringIO import StringIO from django.test import TestCase from django.test.client import RequestFactory from performance.views import performance_log class PerformanceTrackingTest(TestCase): """ Tests that performance logs correctly handle events """ def setUp(self): super(PerformanceTrackingTest, self).setUp() self.request_factory = RequestFactory() self.stream = StringIO() self.handler = logging.StreamHandler(self.stream) self.log = logging.getLogger() self.log.setLevel(logging.INFO) for handler in self.log.handlers: self.log.removeHandler(handler) self.log.addHandler(self.handler) self.addCleanup(self.log.removeHandler, self.handler) self.addCleanup(self.handler.close) def test_empty_get(self): request = self.request_factory.get('/performance') pre_time = datetime.datetime.utcnow() performance_log(request) post_time = datetime.datetime.utcnow() self.handler.flush() logged_value = json.loads(self.stream.getvalue().strip()) self.assertEqual(logged_value['accept_language'], '') self.assertEqual(logged_value['agent'], '') self.assertEqual(logged_value['event'], '') self.assertEqual(logged_value['event_source'], 'browser') self.assertEqual(logged_value['expgroup'], '') self.assertEqual(logged_value['id'], '') self.assertEqual(logged_value['page'], '') self.assertEqual(logged_value['referer'], '') self.assertEqual(logged_value['value'], '') logged_time = dateutil.parser.parse(logged_value['time']).replace(tzinfo=None) self.assertTrue(pre_time <= logged_time) self.assertTrue(post_time >= logged_time) def test_empty_post(self): request = self.request_factory.post('/performance') pre_time = datetime.datetime.utcnow() performance_log(request) post_time = datetime.datetime.utcnow() self.handler.flush() logged_value = json.loads(self.stream.getvalue().strip()) self.assertEqual(logged_value['accept_language'], '') self.assertEqual(logged_value['agent'], '') self.assertEqual(logged_value['event'], '') self.assertEqual(logged_value['event_source'], 'browser') self.assertEqual(logged_value['expgroup'], '') self.assertEqual(logged_value['id'], '') self.assertEqual(logged_value['page'], '') self.assertEqual(logged_value['referer'], '') self.assertEqual(logged_value['value'], '') logged_time = dateutil.parser.parse(logged_value['time']).replace(tzinfo=None) self.assertTrue(pre_time <= logged_time) self.assertTrue(post_time >= logged_time) def test_populated_get(self): request = self.request_factory.get('/performance', {'event': "a_great_event", 'id': "12345012345", 'expgroup': "17", 'page': "atestpage", 'value': "100234"}) request.META['HTTP_ACCEPT_LANGUAGE'] = "en" request.META['HTTP_REFERER'] = "https://www.edx.org/evilpage" request.META['HTTP_USER_AGENT'] = "Mozilla/5.0" request.META['REMOTE_ADDR'] = "18.19.20.21" request.META['SERVER_NAME'] = "some-aws-server" pre_time = datetime.datetime.utcnow() performance_log(request) post_time = datetime.datetime.utcnow() self.handler.flush() logged_value = json.loads(self.stream.getvalue().strip()) self.assertEqual(logged_value['accept_language'], 'en') self.assertEqual(logged_value['agent'], 'Mozilla/5.0') self.assertEqual(logged_value['event'], 'a_great_event') self.assertEqual(logged_value['event_source'], 'browser') self.assertEqual(logged_value['expgroup'], '17') self.assertEqual(logged_value['host'], 'some-aws-server') self.assertEqual(logged_value['id'], '12345012345') self.assertEqual(logged_value['ip'], '18.19.20.21') self.assertEqual(logged_value['page'], 'atestpage') self.assertEqual(logged_value['referer'], 'https://www.edx.org/evilpage') self.assertEqual(logged_value['value'], '100234') logged_time = dateutil.parser.parse(logged_value['time']).replace(tzinfo=None) self.assertTrue(pre_time <= logged_time) self.assertTrue(post_time >= logged_time) def test_populated_post(self): request = self.request_factory.post('/performance', {'event': "a_great_event", 'id': "12345012345", 'expgroup': "17", 'page': "atestpage", 'value': "100234"}) request.META['HTTP_ACCEPT_LANGUAGE'] = "en" request.META['HTTP_REFERER'] = "https://www.edx.org/evilpage" request.META['HTTP_USER_AGENT'] = "Mozilla/5.0" request.META['REMOTE_ADDR'] = "18.19.20.21" request.META['SERVER_NAME'] = "some-aws-server" pre_time = datetime.datetime.utcnow() performance_log(request) post_time = datetime.datetime.utcnow() self.handler.flush() logged_value = json.loads(self.stream.getvalue().strip()) self.assertEqual(logged_value['accept_language'], 'en') self.assertEqual(logged_value['agent'], 'Mozilla/5.0') self.assertEqual(logged_value['event'], 'a_great_event') self.assertEqual(logged_value['event_source'], 'browser') self.assertEqual(logged_value['expgroup'], '17') self.assertEqual(logged_value['host'], 'some-aws-server') self.assertEqual(logged_value['id'], '12345012345') self.assertEqual(logged_value['ip'], '18.19.20.21') self.assertEqual(logged_value['page'], 'atestpage') self.assertEqual(logged_value['referer'], 'https://www.edx.org/evilpage') self.assertEqual(logged_value['value'], '100234') logged_time = dateutil.parser.parse(logged_value['time']).replace(tzinfo=None) self.assertTrue(pre_time <= logged_time) self.assertTrue(post_time >= logged_time)
agpl-3.0
Princu7/open-event-orga-server
app/views/users/settings.py
4
4570
import unicodedata from flask import Blueprint, render_template from flask import request, url_for, redirect, jsonify, flash from flask.ext import login from flask.ext.scrypt import generate_password_hash, generate_random_salt from app.helpers.data import DataManager, save_to_db from app.helpers.data_getter import DataGetter from app.models.email_notifications import EmailNotification from app.views.home import record_user_login_logout def get_or_create_notification_settings(event_id): email_notification = DataGetter \ .get_email_notification_settings_by_event_id(login.current_user.id, event_id) if email_notification: return email_notification else: email_notification = EmailNotification(next_event=1, new_paper=1, session_schedule=1, session_accept_reject=1, after_ticket_purchase=1, user_id=login.current_user.id, event_id=event_id) return email_notification settings = Blueprint('settings', __name__, url_prefix='/settings') @settings.route('/') def index_view(): return redirect(url_for('.contact_info_view')) @settings.route('/password/', methods=('POST', 'GET')) def password_view(): if request.method == 'POST': user = login.current_user if user.password == generate_password_hash(request.form['current_password'], user.salt): if request.form['new_password'] == request.form['repeat_password']: salt = generate_random_salt() user.password = generate_password_hash(request.form['new_password'], salt) user.salt = salt save_to_db(user, "password changed") record_user_login_logout('user_logout', login.current_user) login.logout_user() flash('Your password has been changed. Please login with your new password now.', 'success') return redirect(url_for('admin.login_view')) else: flash('The new password and the repeat don\'t match.', 'danger') else: flash('The current password is incorrect.', 'danger') return render_template('gentelella/users/settings/pages/password.html') @settings.route('/email-preferences/') def email_preferences_view(): events = DataGetter.get_all_events() message_settings = DataGetter.get_all_message_setting() settings = DataGetter.get_email_notification_settings(login.current_user.id) user = DataGetter.get_user(login.current_user.id) return render_template('gentelella/users/settings/pages/email_preferences.html', settings=settings, events=events, message_settings=message_settings, user=user) @settings.route('/applications/') def applications_view(): user = DataGetter.get_user(login.current_user.id) return render_template('gentelella/users/settings/pages/applications.html', user=user) @settings.route('/contact-info/', methods=('POST', 'GET')) def contact_info_view(): user_id = login.current_user.id if request.method == 'POST': DataManager.update_user(request.form, int(user_id), contacts_only_update=True) flash("Your contact info has been updated.", "success") return redirect(url_for('.contact_info_view')) profile = DataGetter.get_user(int(user_id)) return render_template('gentelella/users/settings/pages/contact_info.html', user=login.current_user) @settings.route('/email/toggle/', methods=('POST',)) def email_toggle_view(): if request.method == 'POST': name = request.form.get('name') value = int(request.form.get('value')) event_id = request.form.get('event_id') message = '' if name == 'global_email': ids = DataManager.toggle_email_notification_settings(login.current_user.id, value) else: name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore') email_notification = get_or_create_notification_settings(event_id) setattr(email_notification, name, value) save_to_db(email_notification, "EmailSettings Toggled") ids = [email_notification.id] return jsonify({ 'status': 'ok', 'message': message, 'notification_setting_ids': ids })
gpl-3.0
0x90sled/catapult
third_party/gsutil/third_party/boto/boto/s3/multidelete.py
244
4757
# Copyright (c) 2011 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from boto import handler import xml.sax class Deleted(object): """ A successfully deleted object in a multi-object delete request. :ivar key: Key name of the object that was deleted. :ivar version_id: Version id of the object that was deleted. :ivar delete_marker: If True, indicates the object deleted was a DeleteMarker. :ivar delete_marker_version_id: Version ID of the delete marker deleted. """ def __init__(self, key=None, version_id=None, delete_marker=False, delete_marker_version_id=None): self.key = key self.version_id = version_id self.delete_marker = delete_marker self.delete_marker_version_id = delete_marker_version_id def __repr__(self): if self.version_id: return '<Deleted: %s.%s>' % (self.key, self.version_id) else: return '<Deleted: %s>' % self.key def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'Key': self.key = value elif name == 'VersionId': self.version_id = value elif name == 'DeleteMarker': if value.lower() == 'true': self.delete_marker = True elif name == 'DeleteMarkerVersionId': self.delete_marker_version_id = value else: setattr(self, name, value) class Error(object): """ An unsuccessful deleted object in a multi-object delete request. :ivar key: Key name of the object that was not deleted. :ivar version_id: Version id of the object that was not deleted. :ivar code: Status code of the failed delete operation. :ivar message: Status message of the failed delete operation. """ def __init__(self, key=None, version_id=None, code=None, message=None): self.key = key self.version_id = version_id self.code = code self.message = message def __repr__(self): if self.version_id: return '<Error: %s.%s(%s)>' % (self.key, self.version_id, self.code) else: return '<Error: %s(%s)>' % (self.key, self.code) def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'Key': self.key = value elif name == 'VersionId': self.version_id = value elif name == 'Code': self.code = value elif name == 'Message': self.message = value else: setattr(self, name, value) class MultiDeleteResult(object): """ The status returned from a MultiObject Delete request. :ivar deleted: A list of successfully deleted objects. Note that if the quiet flag was specified in the request, this list will be empty because only error responses would be returned. :ivar errors: A list of unsuccessfully deleted objects. """ def __init__(self, bucket=None): self.bucket = None self.deleted = [] self.errors = [] def startElement(self, name, attrs, connection): if name == 'Deleted': d = Deleted() self.deleted.append(d) return d elif name == 'Error': e = Error() self.errors.append(e) return e return None def endElement(self, name, value, connection): setattr(self, name, value)
bsd-3-clause
fernandolopez/xremotebot
tests/user_test.py
1
2114
import unittest from datetime import datetime, timedelta from xremotebot.models.user import User, UsernameAlreadyTaken from .test_helper import db from xremotebot.lib import db as prod_db import xremotebot.models.user class UserTest(unittest.TestCase): def setUp(self): (engine, self.session) = db() prod_db.Base.metadata.create_all(engine) self.user = User( username='test', password='magic', api_key='random', api_key_expiration=datetime.now() + timedelta(5) ) self.session.add(self.user) self.session.commit() def test_key_valid(self): self.assertFalse(self.user.api_key_expired()) def test_key_expired(self): self.user.api_key_expiration = datetime.now() - timedelta(5) self.assertTrue(self.user.api_key_expired()) def test_login_with_invalid_user(self): user = User.login(username='tst', password='magic', session=self.session) self.assertIsNone(user) def test_login_with_invalid_password(self): user = User.login(username='test', password='mgic', session=self.session) self.assertIsNone(user) def test_login_valid(self): user = User.login(username='test', password='magic', session=self.session) self.assertIsInstance(user, User) def test_renewed_api_key_is_different(self): previous = self.user.api_key self.user.renew_api_key() self.assertNotEqual(previous, self.user.api_key) def test_renewed_api_key_is_not_expired(self): self.user.api_key_expiration = datetime.now() - timedelta(5) self.user.renew_api_key() self.assertFalse(self.user.api_key_expired()) def test_create_user_with_taken_username(self): with self.assertRaises(UsernameAlreadyTaken): User.create('test', 'asd', self.session) def test_create_user_with_free_username(self): user = User.create('tset', 'asd', self.session) self.assertIsInstance(user, User)
mit
cpaulik/scipy
scipy/weave/swig2_spec.py
98
14256
""" This module allows one to use SWIG2 (SWIG version >= 1.3) wrapped objects from Weave. SWIG-1.3 wraps objects differently from SWIG-1.1. This module is a template for a SWIG2 wrapped converter. To wrap any special code that uses SWIG the user simply needs to override the defaults in the swig2_converter class. These special circumstances arise when one has wrapped code that uses C++ namespaces. However, for most straightforward SWIG wrappers this converter should work fine out of the box. Newer versions of SWIG (>=1.3.22) represent the wrapped object using a PyCObject and also a PySwigObject (>=1.3.24). This code supports all of these options transparently. Since SWIG-1.3.x is under intense development there are several issues to consider when using the swig2_converter. 1. For SWIG versions <= 1.3.19, the runtime code was built either into the module or into a separate library called libswigpy (or something like that). In the latter case, the users Python modules were linked to this library and shared type information (this was common for large projects with several modules that needed to share type information). If you are using multiple inheritance and want to be certain that type coercions from a derived class to a base class are done correctly, you will need to link to the libswigpy library. You will then need to add these to the keyword arguments passed along to `weave.inline`: a. Add a define_macros=[('SWIG_NOINCLUDE', None)] b. Add the swigpy library to the libraries like so: libraries=['swigpy'] c. If the libswigpy is in a non-standard location add the path to the library_dirs argument as `library_dirs=['/usr/local/lib']` or whatever. OTOH if you do not need to link to libswigpy (this is likely if you are not using multiple inheritance), then you do not need the above. However you are likely to get an annoying message of the form:: WARNING: swig_type_info is NULL. for each SWIG object you are inlining (during each call). To avoid this add a define_macros=[('NO_SWIG_WARN', None)]. 2. Since keeping track of a separate runtime is a pain, for SWIG versions >= 1.3.23 the type information was stored inside a special module. Thus in these versions there is no need to link to this special SWIG runtime library. This module handles these cases automatically and nothing special need be done. Using modules wrapped with different SWIG versions simultaneously. Lets say you have library 'A' that is wrapped using SWIG version 1.3.20. Then lets say you have a library 'B' wrapped using version 1.3.24. Now if you want to use both in weave.inline, we have a serious problem. The trouble is that both 'A' and 'B' may use different and incompatible runtime layouts. It is impossible to get the type conversions right in these cases. Thus it is strongly advised that you use one version of SWIG to wrap all of the code that you intend to inline using weave. Note that you can certainly use SWIG-1.3.23 for everything and do not have to use the latest and greatest SWIG to use weave.inline. Just make sure that when inlining SWIG wrapped objects that all such objects use the same runtime layout. By default, if you are using different versions and do need to inline these objects, the latest layout will be assumed. This might leave you with holes in your feet, but you have been warned. You can force the converter to use a specific runtime version if you want (see the `swig2_converter.__init__` method and its documentation). Prabhu Ramachandran <[email protected]> """ from __future__ import absolute_import, print_function import sys from .c_spec import common_base_converter from . import swigptr2 #---------------------------------------------------------------------- # Commonly used functions for the type query. This is done mainly to # avoid code duplication. #---------------------------------------------------------------------- swig2_common_code = \ ''' swig_type_info * Weave_SWIG_TypeQuery(const char *name) { swig_type_info *ty = SWIG_TypeQuery(name); #ifndef NO_SWIG_WARN if (ty == NULL) { printf("WARNING: swig_type_info is NULL.\\n"); } #endif return ty; } ''' #---------------------------------------------------------------------- # This code obtains the C++ pointer given a a SWIG2 wrapped C++ object # in Python. #---------------------------------------------------------------------- swig2_py_to_c_template = \ """ class %(type_name)s_handler { public: %(c_type)s convert_to_%(type_name)s(PyObject* py_obj, const char* name) { %(c_type)s c_ptr; swig_type_info *ty = Weave_SWIG_TypeQuery("%(c_type)s"); // work on this error reporting... if (SWIG_ConvertPtr(py_obj, (void **) &c_ptr, ty, SWIG_POINTER_EXCEPTION | 0) == -1) { handle_conversion_error(py_obj,"%(type_name)s", name); } %(inc_ref_count)s return c_ptr; } %(c_type)s py_to_%(type_name)s(PyObject* py_obj,const char* name) { %(c_type)s c_ptr; swig_type_info *ty = Weave_SWIG_TypeQuery("%(c_type)s"); // work on this error reporting... if (SWIG_ConvertPtr(py_obj, (void **) &c_ptr, ty, SWIG_POINTER_EXCEPTION | 0) == -1) { handle_bad_type(py_obj,"%(type_name)s", name); } %(inc_ref_count)s return c_ptr; } }; %(type_name)s_handler x__%(type_name)s_handler = %(type_name)s_handler(); #define convert_to_%(type_name)s(py_obj,name) \\ x__%(type_name)s_handler.convert_to_%(type_name)s(py_obj,name) #define py_to_%(type_name)s(py_obj,name) \\ x__%(type_name)s_handler.py_to_%(type_name)s(py_obj,name) """ #---------------------------------------------------------------------- # This code generates a new SWIG pointer object given a C++ pointer. # # Important note: The thisown flag of the returned object is set to 0 # by default. #---------------------------------------------------------------------- swig2_c_to_py_template = """ PyObject* %(type_name)s_to_py(void *obj) { swig_type_info *ty = Weave_SWIG_TypeQuery("%(c_type)s"); return SWIG_NewPointerObj(obj, ty, 0); } """ class swig2_converter(common_base_converter): """ A converter for SWIG >= 1.3 wrapped objects.""" def __init__(self, class_name="undefined", pycobj=0, runtime_version=None): """Initializes the instance. Parameters ---------- - class_name : `string` Name of class, this is set dynamically at build time by the `type_spec` method. - pycobj : `int` If `pycobj` is 0 then code is generated to deal with string representations of the SWIG wrapped pointer. If it is 1, then code is generated to deal with a PyCObject. If it is 2 then code is generated to deal with a PySwigObject. - runtime_version : `int` Specifies the SWIG_RUNTIME_VERSION to use. Defaults to `None`. In this case the runtime is automatically determined. This option is useful if you want to force the runtime_version to be a specific one and override the auto-detected one. """ self.class_name = class_name self.pycobj = pycobj # This is on if a PyCObject has been used. self.runtime_version = runtime_version common_base_converter.__init__(self) def _get_swig_runtime_version(self): """This method tries to deduce the SWIG runtime version. If the SWIG runtime layout changes, the `SWIG_TypeQuery` function will not work properly. """ versions = [] for key in sys.modules: idx = key.find('swig_runtime_data') if idx > -1: ver = int(key[idx+17:]) if ver not in versions: versions.append(ver) nver = len(versions) if nver == 0: return 0 elif nver == 1: return versions[0] else: print("WARNING: Multiple SWIG versions detected. No version was") print("explicitly specified. Using the highest possible version.") return max(versions) def init_info(self, runtime=0): """Keyword arguments: runtime -- If false (default), the user does not need to link to the swig runtime (libswipy). Newer versions of SWIG (>=1.3.23) do not need to build a SWIG runtime library at all. In these versions of SWIG the swig_type_info is stored in a common module. swig_type_info stores the type information and the type converters to cast pointers correctly. With earlier versions of SWIG (<1.3.22) one has to either link the weave module with a SWIG runtime library (libswigpy) in order to get the swig_type_info. Thus, if `runtime` is True, the user must link to the swipy runtime library and in this case type checking will be performed. With these versions of SWIG, if runtime is `False`, no type checking is done. """ common_base_converter.init_info(self) # These are generated on the fly instead of defined at # the class level. self.type_name = self.class_name self.c_type = self.class_name + "*" self.return_type = self.class_name + "*" self.to_c_return = None # not used self.check_func = None # not used if self.pycobj == 1: self.define_macros.append(("SWIG_COBJECT_TYPES", None)) self.define_macros.append(("SWIG_COBJECT_PYTHON", None)) elif self.pycobj == 2: self.define_macros.append(("SWIG_COBJECT_TYPES", None)) if self.runtime_version is None: self.runtime_version = self._get_swig_runtime_version() rv = self.runtime_version if rv == 0: # The runtime option is only useful for older versions of # SWIG. if runtime: self.define_macros.append(("SWIG_NOINCLUDE", None)) self.support_code.append(swigptr2.swigptr2_code_v0) elif rv == 1: self.support_code.append(swigptr2.swigptr2_code_v1) elif rv == 2: self.support_code.append(swigptr2.swigptr2_code_v2) elif rv == 3: self.support_code.append(swigptr2.swigptr2_code_v3) else: raise AssertionError("Unsupported version of the SWIG runtime: %s" % rv) self.support_code.append(swig2_common_code) def _get_swig_type(self, value): """Given the object in the form of `value`, this method returns information on the SWIG internal object repesentation type. Different versions of SWIG use different object representations. This method provides information on the type of internal representation. Currently returns one of ['', 'str', 'pycobj', 'pyswig']. """ swig_typ = '' if hasattr(value, 'this'): type_this = type(value.this) type_str = str(type_this) if isinstance(type_this, str): try: data = value.this.split('_') if data[2] == 'p': swig_typ = 'str' except AttributeError: pass elif type_str == "<type 'PyCObject'>": swig_typ = 'pycobj' elif type_str.find('PySwig') > -1: swig_typ = 'pyswig' return swig_typ def type_match(self,value): """ This is a generic type matcher for SWIG-1.3 objects. For specific instances, override this method. The method also handles cases where SWIG uses a PyCObject for the `this` attribute and not a string. """ if self._get_swig_type(value): return 1 else: return 0 def generate_build_info(self): if self.class_name != "undefined": res = common_base_converter.generate_build_info(self) else: # if there isn't a class_name, we don't want the # support_code to be included from . import base_info res = base_info.base_info() return res def py_to_c_code(self): return swig2_py_to_c_template % self.template_vars() def c_to_py_code(self): return swig2_c_to_py_template % self.template_vars() def type_spec(self,name,value): """ This returns a generic type converter for SWIG-1.3 objects. For specific instances, override this function if necessary.""" # factory swig_ob_type = self._get_swig_type(value) pycobj = 0 if swig_ob_type == 'str': class_name = value.this.split('_')[-1] elif swig_ob_type == 'pycobj': pycobj = 1 elif swig_ob_type == 'pyswig': pycobj = 2 else: raise AssertionError("Does not look like a SWIG object: %s" % value) if pycobj: class_name = value.__class__.__name__ if class_name[-3:] == 'Ptr': class_name = class_name[:-3] new_spec = self.__class__(class_name, pycobj, self.runtime_version) new_spec.name = name return new_spec def __cmp__(self,other): # only works for equal res = -1 try: res = cmp(self.name,other.name) or \ cmp(self.__class__, other.__class__) or \ cmp(self.class_name, other.class_name) or \ cmp(self.type_name,other.type_name) except: pass return res #---------------------------------------------------------------------- # Uncomment the next line if you want this to be a default converter # that is magically invoked by inline. #---------------------------------------------------------------------- # converters.default.insert(0, swig2_converter())
bsd-3-clause
tboyce021/home-assistant
homeassistant/components/onboarding/views.py
7
6996
"""Onboarding views.""" import asyncio import voluptuous as vol from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components.auth import indieauth from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.components.http.view import HomeAssistantView from homeassistant.const import HTTP_BAD_REQUEST, HTTP_FORBIDDEN from homeassistant.core import callback from .const import ( DEFAULT_AREAS, DOMAIN, STEP_CORE_CONFIG, STEP_INTEGRATION, STEP_USER, STEPS, ) async def async_setup(hass, data, store): """Set up the onboarding view.""" hass.http.register_view(OnboardingView(data, store)) hass.http.register_view(UserOnboardingView(data, store)) hass.http.register_view(CoreConfigOnboardingView(data, store)) hass.http.register_view(IntegrationOnboardingView(data, store)) class OnboardingView(HomeAssistantView): """Return the onboarding status.""" requires_auth = False url = "/api/onboarding" name = "api:onboarding" def __init__(self, data, store): """Initialize the onboarding view.""" self._store = store self._data = data async def get(self, request): """Return the onboarding status.""" return self.json( [{"step": key, "done": key in self._data["done"]} for key in STEPS] ) class _BaseOnboardingView(HomeAssistantView): """Base class for onboarding.""" step = None def __init__(self, data, store): """Initialize the onboarding view.""" self._store = store self._data = data self._lock = asyncio.Lock() @callback def _async_is_done(self): """Return if this step is done.""" return self.step in self._data["done"] async def _async_mark_done(self, hass): """Mark step as done.""" self._data["done"].append(self.step) await self._store.async_save(self._data) if set(self._data["done"]) == set(STEPS): hass.data[DOMAIN] = True class UserOnboardingView(_BaseOnboardingView): """View to handle create user onboarding step.""" url = "/api/onboarding/users" name = "api:onboarding:users" requires_auth = False step = STEP_USER @RequestDataValidator( vol.Schema( { vol.Required("name"): str, vol.Required("username"): str, vol.Required("password"): str, vol.Required("client_id"): str, vol.Required("language"): str, } ) ) async def post(self, request, data): """Handle user creation, area creation.""" hass = request.app["hass"] async with self._lock: if self._async_is_done(): return self.json_message("User step already done", HTTP_FORBIDDEN) provider = _async_get_hass_provider(hass) await provider.async_initialize() user = await hass.auth.async_create_user(data["name"], [GROUP_ID_ADMIN]) await hass.async_add_executor_job( provider.data.add_auth, data["username"], data["password"] ) credentials = await provider.async_get_or_create_credentials( {"username": data["username"]} ) await provider.data.async_save() await hass.auth.async_link_user(user, credentials) if "person" in hass.config.components: await hass.components.person.async_create_person( data["name"], user_id=user.id ) # Create default areas using the users supplied language. translations = await hass.helpers.translation.async_get_translations( data["language"], "area", DOMAIN ) area_registry = await hass.helpers.area_registry.async_get_registry() for area in DEFAULT_AREAS: area_registry.async_create( translations[f"component.onboarding.area.{area}"] ) await self._async_mark_done(hass) # Return authorization code for fetching tokens and connect # during onboarding. auth_code = hass.components.auth.create_auth_code(data["client_id"], user) return self.json({"auth_code": auth_code}) class CoreConfigOnboardingView(_BaseOnboardingView): """View to finish core config onboarding step.""" url = "/api/onboarding/core_config" name = "api:onboarding:core_config" step = STEP_CORE_CONFIG async def post(self, request): """Handle finishing core config step.""" hass = request.app["hass"] async with self._lock: if self._async_is_done(): return self.json_message( "Core config step already done", HTTP_FORBIDDEN ) await self._async_mark_done(hass) await hass.config_entries.flow.async_init( "met", context={"source": "onboarding"} ) if ( hass.components.hassio.is_hassio() and "raspberrypi" in hass.components.hassio.get_core_info()["machine"] ): await hass.config_entries.flow.async_init( "rpi_power", context={"source": "onboarding"} ) return self.json({}) class IntegrationOnboardingView(_BaseOnboardingView): """View to finish integration onboarding step.""" url = "/api/onboarding/integration" name = "api:onboarding:integration" step = STEP_INTEGRATION @RequestDataValidator( vol.Schema({vol.Required("client_id"): str, vol.Required("redirect_uri"): str}) ) async def post(self, request, data): """Handle token creation.""" hass = request.app["hass"] user = request["hass_user"] async with self._lock: if self._async_is_done(): return self.json_message( "Integration step already done", HTTP_FORBIDDEN ) await self._async_mark_done(hass) # Validate client ID and redirect uri if not await indieauth.verify_redirect_uri( request.app["hass"], data["client_id"], data["redirect_uri"] ): return self.json_message( "invalid client id or redirect uri", HTTP_BAD_REQUEST ) # Return authorization code so we can redirect user and log them in auth_code = hass.components.auth.create_auth_code(data["client_id"], user) return self.json({"auth_code": auth_code}) @callback def _async_get_hass_provider(hass): """Get the Home Assistant auth provider.""" for prv in hass.auth.auth_providers: if prv.type == "homeassistant": return prv raise RuntimeError("No Home Assistant provider found")
apache-2.0
jeamland/wsproto
wsproto/connection.py
1
16193
# -*- coding: utf-8 -*- """ wsproto/connection ~~~~~~~~~~~~~~ An implementation of a WebSocket connection. """ import os import base64 import hashlib from collections import deque from enum import Enum import h11 from .events import ( ConnectionRequested, ConnectionEstablished, ConnectionClosed, ConnectionFailed, TextReceived, BytesReceived ) from .frame_protocol import FrameProtocol, ParseFailed, CloseReason, Opcode # RFC6455, Section 1.3 - Opening Handshake ACCEPT_GUID = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" class ConnectionState(Enum): """ RFC 6455, Section 4 - Opening Handshake """ CONNECTING = 0 OPEN = 1 CLOSING = 2 CLOSED = 3 class ConnectionType(Enum): CLIENT = 1 SERVER = 2 CLIENT = ConnectionType.CLIENT SERVER = ConnectionType.SERVER # Some convenience utilities for working with HTTP headers def _normed_header_dict(h11_headers): # This mangles Set-Cookie headers. But it happens that we don't care about # any of those, so it's OK. For every other HTTP header, if there are # multiple instances then you're allowed to join them together with # commas. name_to_values = {} for name, value in h11_headers: name_to_values.setdefault(name, []).append(value) name_to_normed_value = {} for name, values in name_to_values.items(): name_to_normed_value[name] = b", ".join(values) return name_to_normed_value # We use this for parsing the proposed protocol list, and for parsing the # proposed and accepted extension lists. For the proposed protocol list it's # fine, because the ABNF is just 1#token. But for the extension lists, it's # wrong, because those can contain quoted strings, which can in turn contain # commas. XX FIXME def _split_comma_header(value): return [piece.strip() for piece in value.split(b',')] class WSConnection(object): """ A low-level WebSocket connection object. This wraps two other protocol objects, an HTTP/1.1 protocol object used to do the initial HTTP upgrade handshake and a WebSocket frame protocol object used to exchange messages and other control frames. :param conn_type: Whether this object is on the client- or server-side of a connection. To initialise as a client pass ``CLIENT`` otherwise pass ``SERVER``. :type conn_type: ``ConnectionType`` :param host: The hostname to pass to the server when acting as a client. :type host: ``str`` :param resource: The resource (aka path) to pass to the server when acting as a client. :type resource: ``str`` :param extensions: A list of extensions to use on this connection. Extensions should be instances of a subclass of :class:`Extension <wsproto.extensions.Extension>`. :param subprotocols: A list of subprotocols to request when acting as a client, ordered by preference. This has no impact on the connection itself. :type subprotocol: ``list`` of ``str`` """ def __init__(self, conn_type, host=None, resource=None, extensions=None, subprotocols=[]): self.client = conn_type is ConnectionType.CLIENT self.host = host self.resource = resource self.subprotocols = subprotocols self.extensions = extensions or [] self.version = b'13' self._state = ConnectionState.CONNECTING self._close_reason = None self._nonce = None self._outgoing = b'' self._events = deque() self._proto = None if self.client: self._upgrade_connection = h11.Connection(h11.CLIENT) else: self._upgrade_connection = h11.Connection(h11.SERVER) if self.client: self.initiate_connection() def initiate_connection(self): self._generate_nonce() headers = { b"Host": self.host.encode('ascii'), b"Upgrade": b'WebSocket', b"Connection": b'Upgrade', b"Sec-WebSocket-Key": self._nonce, b"Sec-WebSocket-Version": self.version, } if self.subprotocols: headers[b"Sec-WebSocket-Protocol"] = ", ".join(self.subprotocols) if self.extensions: offers = {e.name: e.offer(self) for e in self.extensions} extensions = [] for name, params in offers.items(): if params is True: extensions.append(name.encode('ascii')) elif params: # py34 annoyance: doesn't support bytestring formatting extensions.append(('%s; %s' % (name, params)) .encode("ascii")) if extensions: headers[b'Sec-WebSocket-Extensions'] = b', '.join(extensions) upgrade = h11.Request(method=b'GET', target=self.resource, headers=headers.items()) self._outgoing += self._upgrade_connection.send(upgrade) def send_data(self, payload, final=True): """ Send a message or part of a message to the remote peer. If ``final`` is ``False`` it indicates that this is part of a longer message. If ``final`` is ``True`` it indicates that this is either a self-contained message or the last part of a longer message. If ``payload`` is of type ``bytes`` then the message is flagged as being binary If it is of type ``str`` encoded as UTF-8 and sent as text. :param payload: The message body to send. :type payload: ``bytes`` or ``str`` :param final: Whether there are more parts to this message to be sent. :type final: ``bool`` """ self._outgoing += self._proto.send_data(payload, final) def close(self, code=CloseReason.NORMAL_CLOSURE, reason=None): self._outgoing += self._proto.close(code, reason) self._state = ConnectionState.CLOSING @property def closed(self): return self._state is ConnectionState.CLOSED def bytes_to_send(self, amount=None): """ Return any data that is to be sent to the remote peer. :param amount: (optional) The maximum number of bytes to be provided. If ``None`` or not provided it will return all available bytes. :type amount: ``int`` """ if amount is None: data = self._outgoing self._outgoing = b'' else: data = self._outgoing[:amount] self._outgoing = self._outgoing[amount:] return data def receive_bytes(self, data): """ Pass some received bytes to the connection for processing. :param data: The data received from the remote peer. :type data: ``bytes`` """ if data is None and self._state is ConnectionState.OPEN: # "If _The WebSocket Connection is Closed_ and no Close control # frame was received by the endpoint (such as could occur if the # underlying transport connection is lost), _The WebSocket # Connection Close Code_ is considered to be 1006." self._events.append(ConnectionClosed(CloseReason.ABNORMAL_CLOSURE)) self._state = ConnectionState.CLOSED return elif data is None: self._state = ConnectionState.CLOSED return if self._state is ConnectionState.CONNECTING: event, data = self._process_upgrade(data) if event is not None: self._events.append(event) if self._state is ConnectionState.OPEN: self._proto.receive_bytes(data) def _process_upgrade(self, data): self._upgrade_connection.receive_data(data) while True: event = self._upgrade_connection.next_event() if event is h11.NEED_DATA: break elif self.client and isinstance(event, h11.InformationalResponse): data = self._upgrade_connection.trailing_data[0] return self._establish_client_connection(event), data elif not self.client and isinstance(event, h11.Request): return self._process_connection_request(event), None self._incoming = b'' return None, None def events(self): """ Return a generator that provides any events that have been generated by protocol activity. :returns: generator """ while self._events: yield self._events.popleft() if self._proto is None: return try: for frame in self._proto.received_frames(): if frame.opcode is Opcode.PING: assert frame.frame_finished and frame.message_finished self._outgoing += self._proto.pong(frame.payload) elif frame.opcode is Opcode.CLOSE: code, reason = frame.payload self.close(code, reason) yield ConnectionClosed(code, reason) elif frame.opcode is Opcode.TEXT: yield TextReceived(frame.payload, frame.frame_finished, frame.message_finished) elif frame.opcode is Opcode.BINARY: yield BytesReceived(frame.payload, frame.frame_finished, frame.message_finished) except ParseFailed as exc: # XX FIXME: apparently autobahn intentionally deviates from the # spec in that on protocol errors it just closes the connection # rather than trying to send a CLOSE frame. Investigate whether we # should do the same. self.close(code=exc.code, reason=str(exc)) yield ConnectionClosed(exc.code, reason=str(exc)) def _generate_nonce(self): # os.urandom may be overkill for this use case, but I don't think this # is a bottleneck, and better safe than sorry... self._nonce = base64.b64encode(os.urandom(16)) def _generate_accept_token(self, token): accept_token = token + ACCEPT_GUID accept_token = hashlib.sha1(accept_token).digest() return base64.b64encode(accept_token) def _establish_client_connection(self, event): if event.status_code != 101: return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Bad status code from server") headers = _normed_header_dict(event.headers) if headers[b'connection'].lower() != b'upgrade': return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Missing Connection: Upgrade header") if headers[b'upgrade'].lower() != b'websocket': return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Missing Upgrade: WebSocket header") accept_token = self._generate_accept_token(self._nonce) if headers[b'sec-websocket-accept'] != accept_token: return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Bad accept token") subprotocol = headers.get(b'sec-websocket-protocol', None) if subprotocol is not None: if subprotocol not in self.subprotocols: return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "unrecognized subprotocol {!r}" .format(subprotocol)) extensions = headers.get(b'sec-websocket-extensions', None) if extensions: accepts = _split_comma_header(extensions) for accept in accepts: accept = accept.decode('ascii') name = accept.split(';', 1)[0].strip() for extension in self.extensions: if extension.name == name: extension.finalize(self, accept) break else: return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "unrecognized extension {!r}" .format(name)) self._proto = FrameProtocol(self.client, self.extensions) self._state = ConnectionState.OPEN return ConnectionEstablished(subprotocol, extensions) def _process_connection_request(self, event): if event.method != b'GET': return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Request method must be GET") headers = _normed_header_dict(event.headers) if headers[b'connection'].lower() != b'upgrade': return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Missing Connection: Upgrade header") if headers[b'upgrade'].lower() != b'websocket': return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Missing Upgrade: WebSocket header") if b'sec-websocket-version' not in headers: return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Missing Sec-WebSocket-Version header") # XX FIXME: need to check Sec-Websocket-Version, and respond with a # 400 if it's not what we expect if b'sec-websocket-protocol' in headers: proposed_subprotocols = _split_comma_header( headers[b'sec-websocket-protocol']) else: proposed_subprotocols = [] if b'sec-websocket-key' not in headers: return ConnectionFailed(CloseReason.PROTOCOL_ERROR, "Missing Sec-WebSocket-Key header") return ConnectionRequested(proposed_subprotocols, event) def accept(self, event, subprotocol=None): request = event.h11request request_headers = _normed_header_dict(request.headers) nonce = request_headers[b'sec-websocket-key'] accept_token = self._generate_accept_token(nonce) headers = { b"Upgrade": b'WebSocket', b"Connection": b'Upgrade', b"Sec-WebSocket-Accept": accept_token, } if subprotocol is not None: if subprotocol not in event.proposed_subprotocols: raise ValueError( "unexpected subprotocol {!r}".format(subprotocol)) headers[b'Sec-WebSocket-Protocol'] = subprotocol extensions = request_headers.get(b'sec-websocket-extensions', None) accepts = {} if extensions is not None: offers = _split_comma_header(extensions) for offer in offers: offer = offer.decode('ascii') name = offer.split(';', 1)[0].strip() for extension in self.extensions: if extension.name == name: accept = extension.accept(self, offer) if accept is True: accepts[extension.name] = True elif accept: accepts[extension.name] = accept.encode('ascii') if accepts: extensions = [] for name, params in accepts.items(): if params is True: extensions.append(name.encode('ascii')) else: # py34 annoyance: doesn't support bytestring formatting params = params.decode("ascii") extensions.append(('%s; %s' % (name, params)) .encode("ascii")) headers[b"Sec-WebSocket-Extensions"] = b', '.join(extensions) response = h11.InformationalResponse(status_code=101, headers=headers.items()) self._outgoing += self._upgrade_connection.send(response) self._proto = FrameProtocol(self.client, self.extensions) self._state = ConnectionState.OPEN
mit
debugger22/sympy
sympy/matrices/expressions/tests/test_indexing.py
85
2177
from sympy import (symbols, MatrixSymbol, MatPow, BlockMatrix, Identity, ZeroMatrix, ImmutableMatrix, eye, Sum) from sympy.utilities.pytest import raises k, l, m, n = symbols('k l m n', integer=True) i, j = symbols('i j', integer=True) W = MatrixSymbol('W', k, l) X = MatrixSymbol('X', l, m) Y = MatrixSymbol('Y', l, m) Z = MatrixSymbol('Z', m, n) A = MatrixSymbol('A', 2, 2) B = MatrixSymbol('B', 2, 2) x = MatrixSymbol('x', 1, 2) y = MatrixSymbol('x', 2, 1) def test_symbolic_indexing(): x12 = X[1, 2] assert all(s in str(x12) for s in ['1', '2', X.name]) # We don't care about the exact form of this. We do want to make sure # that all of these features are present def test_add_index(): assert (X + Y)[i, j] == X[i, j] + Y[i, j] def test_mul_index(): assert (A*y)[0, 0] == A[0, 0]*y[0, 0] + A[0, 1]*y[1, 0] assert (A*B).as_mutable() == (A.as_mutable() * B.as_mutable()) X = MatrixSymbol('X', n, m) Y = MatrixSymbol('Y', m, k) result = (X*Y)[4,2] expected = Sum(X[4, i]*Y[i, 2], (i, 0, m - 1)) assert result.args[0].dummy_eq(expected.args[0], i) assert result.args[1][1:] == expected.args[1][1:] def test_pow_index(): Q = MatPow(A, 2) assert Q[0, 0] == A[0, 0]**2 + A[0, 1]*A[1, 0] def test_transpose_index(): assert X.T[i, j] == X[j, i] def test_Identity_index(): I = Identity(3) assert I[0, 0] == I[1, 1] == I[2, 2] == 1 assert I[1, 0] == I[0, 1] == I[2, 1] == 0 raises(IndexError, lambda: I[3, 3]) def test_block_index(): I = Identity(3) Z = ZeroMatrix(3, 3) B = BlockMatrix([[I, I], [I, I]]) e3 = ImmutableMatrix(eye(3)) BB = BlockMatrix([[e3, e3], [e3, e3]]) assert B[0, 0] == B[3, 0] == B[0, 3] == B[3, 3] == 1 assert B[4, 3] == B[5, 1] == 0 BB = BlockMatrix([[e3, e3], [e3, e3]]) assert B.as_explicit() == BB.as_explicit() BI = BlockMatrix([[I, Z], [Z, I]]) assert BI.as_explicit().equals(eye(6)) def test_slicing(): A.as_explicit()[0, :] # does not raise an error def test_errors(): raises(IndexError, lambda: Identity(2)[1, 2, 3, 4, 5]) raises(IndexError, lambda: Identity(2)[[1, 2, 3, 4, 5]])
bsd-3-clause
40223136/-2015cd_midterm
static/Brython3.1.1-20150328-091302/Lib/difflib.py
737
82544
#! /usr/bin/env python3 """ Module difflib -- helpers for computing deltas between objects. Function get_close_matches(word, possibilities, n=3, cutoff=0.6): Use SequenceMatcher to return list of the best "good enough" matches. Function context_diff(a, b): For two lists of strings, return a delta in context diff format. Function ndiff(a, b): Return a delta: the difference between `a` and `b` (lists of strings). Function restore(delta, which): Return one of the two sequences that generated an ndiff delta. Function unified_diff(a, b): For two lists of strings, return a delta in unified diff format. Class SequenceMatcher: A flexible class for comparing pairs of sequences of any type. Class Differ: For producing human-readable deltas from sequences of lines of text. Class HtmlDiff: For producing HTML side by side comparison with change highlights. """ __all__ = ['get_close_matches', 'ndiff', 'restore', 'SequenceMatcher', 'Differ','IS_CHARACTER_JUNK', 'IS_LINE_JUNK', 'context_diff', 'unified_diff', 'HtmlDiff', 'Match'] import warnings import heapq from collections import namedtuple as _namedtuple Match = _namedtuple('Match', 'a b size') def _calculate_ratio(matches, length): if length: return 2.0 * matches / length return 1.0 class SequenceMatcher: """ SequenceMatcher is a flexible class for comparing pairs of sequences of any type, so long as the sequence elements are hashable. The basic algorithm predates, and is a little fancier than, an algorithm published in the late 1980's by Ratcliff and Obershelp under the hyperbolic name "gestalt pattern matching". The basic idea is to find the longest contiguous matching subsequence that contains no "junk" elements (R-O doesn't address junk). The same idea is then applied recursively to the pieces of the sequences to the left and to the right of the matching subsequence. This does not yield minimal edit sequences, but does tend to yield matches that "look right" to people. SequenceMatcher tries to compute a "human-friendly diff" between two sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the longest *contiguous* & junk-free matching subsequence. That's what catches peoples' eyes. The Windows(tm) windiff has another interesting notion, pairing up elements that appear uniquely in each sequence. That, and the method here, appear to yield more intuitive difference reports than does diff. This method appears to be the least vulnerable to synching up on blocks of "junk lines", though (like blank lines in ordinary text files, or maybe "<P>" lines in HTML files). That may be because this is the only method of the 3 that has a *concept* of "junk" <wink>. Example, comparing two strings, and considering blanks to be "junk": >>> s = SequenceMatcher(lambda x: x == " ", ... "private Thread currentThread;", ... "private volatile Thread currentThread;") >>> .ratio() returns a float in [0, 1], measuring the "similarity" of the sequences. As a rule of thumb, a .ratio() value over 0.6 means the sequences are close matches: >>> print(round(s.ratio(), 3)) 0.866 >>> If you're only interested in where the sequences match, .get_matching_blocks() is handy: >>> for block in s.get_matching_blocks(): ... print("a[%d] and b[%d] match for %d elements" % block) a[0] and b[0] match for 8 elements a[8] and b[17] match for 21 elements a[29] and b[38] match for 0 elements Note that the last tuple returned by .get_matching_blocks() is always a dummy, (len(a), len(b), 0), and this is the only case in which the last tuple element (number of elements matched) is 0. If you want to know how to change the first sequence into the second, use .get_opcodes(): >>> for opcode in s.get_opcodes(): ... print("%6s a[%d:%d] b[%d:%d]" % opcode) equal a[0:8] b[0:8] insert a[8:8] b[8:17] equal a[8:29] b[17:38] See the Differ class for a fancy human-friendly file differencer, which uses SequenceMatcher both to compare sequences of lines, and to compare sequences of characters within similar (near-matching) lines. See also function get_close_matches() in this module, which shows how simple code building on SequenceMatcher can be used to do useful work. Timing: Basic R-O is cubic time worst case and quadratic time expected case. SequenceMatcher is quadratic time for the worst case and has expected-case behavior dependent in a complicated way on how many elements the sequences have in common; best case time is linear. Methods: __init__(isjunk=None, a='', b='') Construct a SequenceMatcher. set_seqs(a, b) Set the two sequences to be compared. set_seq1(a) Set the first sequence to be compared. set_seq2(b) Set the second sequence to be compared. find_longest_match(alo, ahi, blo, bhi) Find longest matching block in a[alo:ahi] and b[blo:bhi]. get_matching_blocks() Return list of triples describing matching subsequences. get_opcodes() Return list of 5-tuples describing how to turn a into b. ratio() Return a measure of the sequences' similarity (float in [0,1]). quick_ratio() Return an upper bound on .ratio() relatively quickly. real_quick_ratio() Return an upper bound on ratio() very quickly. """ def __init__(self, isjunk=None, a='', b='', autojunk=True): """Construct a SequenceMatcher. Optional arg isjunk is None (the default), or a one-argument function that takes a sequence element and returns true iff the element is junk. None is equivalent to passing "lambda x: 0", i.e. no elements are considered to be junk. For example, pass lambda x: x in " \\t" if you're comparing lines as sequences of characters, and don't want to synch up on blanks or hard tabs. Optional arg a is the first of two sequences to be compared. By default, an empty string. The elements of a must be hashable. See also .set_seqs() and .set_seq1(). Optional arg b is the second of two sequences to be compared. By default, an empty string. The elements of b must be hashable. See also .set_seqs() and .set_seq2(). Optional arg autojunk should be set to False to disable the "automatic junk heuristic" that treats popular elements as junk (see module documentation for more information). """ # Members: # a # first sequence # b # second sequence; differences are computed as "what do # we need to do to 'a' to change it into 'b'?" # b2j # for x in b, b2j[x] is a list of the indices (into b) # at which x appears; junk and popular elements do not appear # fullbcount # for x in b, fullbcount[x] == the number of times x # appears in b; only materialized if really needed (used # only for computing quick_ratio()) # matching_blocks # a list of (i, j, k) triples, where a[i:i+k] == b[j:j+k]; # ascending & non-overlapping in i and in j; terminated by # a dummy (len(a), len(b), 0) sentinel # opcodes # a list of (tag, i1, i2, j1, j2) tuples, where tag is # one of # 'replace' a[i1:i2] should be replaced by b[j1:j2] # 'delete' a[i1:i2] should be deleted # 'insert' b[j1:j2] should be inserted # 'equal' a[i1:i2] == b[j1:j2] # isjunk # a user-supplied function taking a sequence element and # returning true iff the element is "junk" -- this has # subtle but helpful effects on the algorithm, which I'll # get around to writing up someday <0.9 wink>. # DON'T USE! Only __chain_b uses this. Use "in self.bjunk". # bjunk # the items in b for which isjunk is True. # bpopular # nonjunk items in b treated as junk by the heuristic (if used). self.isjunk = isjunk self.a = self.b = None self.autojunk = autojunk self.set_seqs(a, b) def set_seqs(self, a, b): """Set the two sequences to be compared. >>> s = SequenceMatcher() >>> s.set_seqs("abcd", "bcde") >>> s.ratio() 0.75 """ self.set_seq1(a) self.set_seq2(b) def set_seq1(self, a): """Set the first sequence to be compared. The second sequence to be compared is not changed. >>> s = SequenceMatcher(None, "abcd", "bcde") >>> s.ratio() 0.75 >>> s.set_seq1("bcde") >>> s.ratio() 1.0 >>> SequenceMatcher computes and caches detailed information about the second sequence, so if you want to compare one sequence S against many sequences, use .set_seq2(S) once and call .set_seq1(x) repeatedly for each of the other sequences. See also set_seqs() and set_seq2(). """ if a is self.a: return self.a = a self.matching_blocks = self.opcodes = None def set_seq2(self, b): """Set the second sequence to be compared. The first sequence to be compared is not changed. >>> s = SequenceMatcher(None, "abcd", "bcde") >>> s.ratio() 0.75 >>> s.set_seq2("abcd") >>> s.ratio() 1.0 >>> SequenceMatcher computes and caches detailed information about the second sequence, so if you want to compare one sequence S against many sequences, use .set_seq2(S) once and call .set_seq1(x) repeatedly for each of the other sequences. See also set_seqs() and set_seq1(). """ if b is self.b: return self.b = b self.matching_blocks = self.opcodes = None self.fullbcount = None self.__chain_b() # For each element x in b, set b2j[x] to a list of the indices in # b where x appears; the indices are in increasing order; note that # the number of times x appears in b is len(b2j[x]) ... # when self.isjunk is defined, junk elements don't show up in this # map at all, which stops the central find_longest_match method # from starting any matching block at a junk element ... # b2j also does not contain entries for "popular" elements, meaning # elements that account for more than 1 + 1% of the total elements, and # when the sequence is reasonably large (>= 200 elements); this can # be viewed as an adaptive notion of semi-junk, and yields an enormous # speedup when, e.g., comparing program files with hundreds of # instances of "return NULL;" ... # note that this is only called when b changes; so for cross-product # kinds of matches, it's best to call set_seq2 once, then set_seq1 # repeatedly def __chain_b(self): # Because isjunk is a user-defined (not C) function, and we test # for junk a LOT, it's important to minimize the number of calls. # Before the tricks described here, __chain_b was by far the most # time-consuming routine in the whole module! If anyone sees # Jim Roskind, thank him again for profile.py -- I never would # have guessed that. # The first trick is to build b2j ignoring the possibility # of junk. I.e., we don't call isjunk at all yet. Throwing # out the junk later is much cheaper than building b2j "right" # from the start. b = self.b self.b2j = b2j = {} for i, elt in enumerate(b): indices = b2j.setdefault(elt, []) indices.append(i) # Purge junk elements self.bjunk = junk = set() isjunk = self.isjunk if isjunk: for elt in b2j.keys(): if isjunk(elt): junk.add(elt) for elt in junk: # separate loop avoids separate list of keys del b2j[elt] # Purge popular elements that are not junk self.bpopular = popular = set() n = len(b) if self.autojunk and n >= 200: ntest = n // 100 + 1 for elt, idxs in b2j.items(): if len(idxs) > ntest: popular.add(elt) for elt in popular: # ditto; as fast for 1% deletion del b2j[elt] def isbjunk(self, item): "Deprecated; use 'item in SequenceMatcher().bjunk'." warnings.warn("'SequenceMatcher().isbjunk(item)' is deprecated;\n" "use 'item in SMinstance.bjunk' instead.", DeprecationWarning, 2) return item in self.bjunk def isbpopular(self, item): "Deprecated; use 'item in SequenceMatcher().bpopular'." warnings.warn("'SequenceMatcher().isbpopular(item)' is deprecated;\n" "use 'item in SMinstance.bpopular' instead.", DeprecationWarning, 2) return item in self.bpopular def find_longest_match(self, alo, ahi, blo, bhi): """Find longest matching block in a[alo:ahi] and b[blo:bhi]. If isjunk is not defined: Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where alo <= i <= i+k <= ahi blo <= j <= j+k <= bhi and for all (i',j',k') meeting those conditions, k >= k' i <= i' and if i == i', j <= j' In other words, of all maximal matching blocks, return one that starts earliest in a, and of all those maximal matching blocks that start earliest in a, return the one that starts earliest in b. >>> s = SequenceMatcher(None, " abcd", "abcd abcd") >>> s.find_longest_match(0, 5, 0, 9) Match(a=0, b=4, size=5) If isjunk is defined, first the longest matching block is determined as above, but with the additional restriction that no junk element appears in the block. Then that block is extended as far as possible by matching (only) junk elements on both sides. So the resulting block never matches on junk except as identical junk happens to be adjacent to an "interesting" match. Here's the same example as before, but considering blanks to be junk. That prevents " abcd" from matching the " abcd" at the tail end of the second sequence directly. Instead only the "abcd" can match, and matches the leftmost "abcd" in the second sequence: >>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd") >>> s.find_longest_match(0, 5, 0, 9) Match(a=1, b=0, size=4) If no blocks match, return (alo, blo, 0). >>> s = SequenceMatcher(None, "ab", "c") >>> s.find_longest_match(0, 2, 0, 1) Match(a=0, b=0, size=0) """ # CAUTION: stripping common prefix or suffix would be incorrect. # E.g., # ab # acab # Longest matching block is "ab", but if common prefix is # stripped, it's "a" (tied with "b"). UNIX(tm) diff does so # strip, so ends up claiming that ab is changed to acab by # inserting "ca" in the middle. That's minimal but unintuitive: # "it's obvious" that someone inserted "ac" at the front. # Windiff ends up at the same place as diff, but by pairing up # the unique 'b's and then matching the first two 'a's. a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.bjunk.__contains__ besti, bestj, bestsize = alo, blo, 0 # find longest junk-free match # during an iteration of the loop, j2len[j] = length of longest # junk-free match ending with a[i-1] and b[j] j2len = {} nothing = [] for i in range(alo, ahi): # look at all instances of a[i] in b; note that because # b2j has no junk keys, the loop is skipped if a[i] is junk j2lenget = j2len.get newj2len = {} for j in b2j.get(a[i], nothing): # a[i] matches b[j] if j < blo: continue if j >= bhi: break k = newj2len[j] = j2lenget(j-1, 0) + 1 if k > bestsize: besti, bestj, bestsize = i-k+1, j-k+1, k j2len = newj2len # Extend the best by non-junk elements on each end. In particular, # "popular" non-junk elements aren't in b2j, which greatly speeds # the inner loop above, but also means "the best" match so far # doesn't contain any junk *or* popular non-junk elements. while besti > alo and bestj > blo and \ not isbjunk(b[bestj-1]) and \ a[besti-1] == b[bestj-1]: besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 while besti+bestsize < ahi and bestj+bestsize < bhi and \ not isbjunk(b[bestj+bestsize]) and \ a[besti+bestsize] == b[bestj+bestsize]: bestsize += 1 # Now that we have a wholly interesting match (albeit possibly # empty!), we may as well suck up the matching junk on each # side of it too. Can't think of a good reason not to, and it # saves post-processing the (possibly considerable) expense of # figuring out what to do with it. In the case of an empty # interesting match, this is clearly the right thing to do, # because no other kind of match is possible in the regions. while besti > alo and bestj > blo and \ isbjunk(b[bestj-1]) and \ a[besti-1] == b[bestj-1]: besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 while besti+bestsize < ahi and bestj+bestsize < bhi and \ isbjunk(b[bestj+bestsize]) and \ a[besti+bestsize] == b[bestj+bestsize]: bestsize = bestsize + 1 return Match(besti, bestj, bestsize) def get_matching_blocks(self): """Return list of triples describing matching subsequences. Each triple is of the form (i, j, n), and means that a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in i and in j. New in Python 2.5, it's also guaranteed that if (i, j, n) and (i', j', n') are adjacent triples in the list, and the second is not the last triple in the list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe adjacent equal blocks. The last triple is a dummy, (len(a), len(b), 0), and is the only triple with n==0. >>> s = SequenceMatcher(None, "abxcd", "abcd") >>> list(s.get_matching_blocks()) [Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)] """ if self.matching_blocks is not None: return self.matching_blocks la, lb = len(self.a), len(self.b) # This is most naturally expressed as a recursive algorithm, but # at least one user bumped into extreme use cases that exceeded # the recursion limit on their box. So, now we maintain a list # ('queue`) of blocks we still need to look at, and append partial # results to `matching_blocks` in a loop; the matches are sorted # at the end. queue = [(0, la, 0, lb)] matching_blocks = [] while queue: alo, ahi, blo, bhi = queue.pop() i, j, k = x = self.find_longest_match(alo, ahi, blo, bhi) # a[alo:i] vs b[blo:j] unknown # a[i:i+k] same as b[j:j+k] # a[i+k:ahi] vs b[j+k:bhi] unknown if k: # if k is 0, there was no matching block matching_blocks.append(x) if alo < i and blo < j: queue.append((alo, i, blo, j)) if i+k < ahi and j+k < bhi: queue.append((i+k, ahi, j+k, bhi)) matching_blocks.sort() # It's possible that we have adjacent equal blocks in the # matching_blocks list now. Starting with 2.5, this code was added # to collapse them. i1 = j1 = k1 = 0 non_adjacent = [] for i2, j2, k2 in matching_blocks: # Is this block adjacent to i1, j1, k1? if i1 + k1 == i2 and j1 + k1 == j2: # Yes, so collapse them -- this just increases the length of # the first block by the length of the second, and the first # block so lengthened remains the block to compare against. k1 += k2 else: # Not adjacent. Remember the first block (k1==0 means it's # the dummy we started with), and make the second block the # new block to compare against. if k1: non_adjacent.append((i1, j1, k1)) i1, j1, k1 = i2, j2, k2 if k1: non_adjacent.append((i1, j1, k1)) non_adjacent.append( (la, lb, 0) ) self.matching_blocks = non_adjacent return map(Match._make, self.matching_blocks) def get_opcodes(self): """Return list of 5-tuples describing how to turn a into b. Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the tuple preceding it, and likewise for j1 == the previous j2. The tags are strings, with these meanings: 'replace': a[i1:i2] should be replaced by b[j1:j2] 'delete': a[i1:i2] should be deleted. Note that j1==j2 in this case. 'insert': b[j1:j2] should be inserted at a[i1:i1]. Note that i1==i2 in this case. 'equal': a[i1:i2] == b[j1:j2] >>> a = "qabxcd" >>> b = "abycdf" >>> s = SequenceMatcher(None, a, b) >>> for tag, i1, i2, j1, j2 in s.get_opcodes(): ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) delete a[0:1] (q) b[0:0] () equal a[1:3] (ab) b[0:2] (ab) replace a[3:4] (x) b[2:3] (y) equal a[4:6] (cd) b[3:5] (cd) insert a[6:6] () b[5:6] (f) """ if self.opcodes is not None: return self.opcodes i = j = 0 self.opcodes = answer = [] for ai, bj, size in self.get_matching_blocks(): # invariant: we've pumped out correct diffs to change # a[:i] into b[:j], and the next matching block is # a[ai:ai+size] == b[bj:bj+size]. So we need to pump # out a diff to change a[i:ai] into b[j:bj], pump out # the matching block, and move (i,j) beyond the match tag = '' if i < ai and j < bj: tag = 'replace' elif i < ai: tag = 'delete' elif j < bj: tag = 'insert' if tag: answer.append( (tag, i, ai, j, bj) ) i, j = ai+size, bj+size # the list of matching blocks is terminated by a # sentinel with size 0 if size: answer.append( ('equal', ai, i, bj, j) ) return answer def get_grouped_opcodes(self, n=3): """ Isolate change clusters by eliminating ranges with no changes. Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint >>> a = list(map(str, range(1,40))) >>> b = a[:] >>> b[8:8] = ['i'] # Make an insertion >>> b[20] += 'x' # Make a replacement >>> b[23:28] = [] # Make a deletion >>> b[30] += 'y' # Make another replacement >>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes())) [[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)], [('equal', 16, 19, 17, 20), ('replace', 19, 20, 20, 21), ('equal', 20, 22, 21, 23), ('delete', 22, 27, 23, 23), ('equal', 27, 30, 23, 26)], [('equal', 31, 34, 27, 30), ('replace', 34, 35, 30, 31), ('equal', 35, 38, 31, 34)]] """ codes = self.get_opcodes() if not codes: codes = [("equal", 0, 1, 0, 1)] # Fixup leading and trailing groups if they show no changes. if codes[0][0] == 'equal': tag, i1, i2, j1, j2 = codes[0] codes[0] = tag, max(i1, i2-n), i2, max(j1, j2-n), j2 if codes[-1][0] == 'equal': tag, i1, i2, j1, j2 = codes[-1] codes[-1] = tag, i1, min(i2, i1+n), j1, min(j2, j1+n) nn = n + n group = [] for tag, i1, i2, j1, j2 in codes: # End the current group and start a new one whenever # there is a large range with no changes. if tag == 'equal' and i2-i1 > nn: group.append((tag, i1, min(i2, i1+n), j1, min(j2, j1+n))) yield group group = [] i1, j1 = max(i1, i2-n), max(j1, j2-n) group.append((tag, i1, i2, j1 ,j2)) if group and not (len(group)==1 and group[0][0] == 'equal'): yield group def ratio(self): """Return a measure of the sequences' similarity (float in [0,1]). Where T is the total number of elements in both sequences, and M is the number of matches, this is 2.0*M / T. Note that this is 1 if the sequences are identical, and 0 if they have nothing in common. .ratio() is expensive to compute if you haven't already computed .get_matching_blocks() or .get_opcodes(), in which case you may want to try .quick_ratio() or .real_quick_ratio() first to get an upper bound. >>> s = SequenceMatcher(None, "abcd", "bcde") >>> s.ratio() 0.75 >>> s.quick_ratio() 0.75 >>> s.real_quick_ratio() 1.0 """ matches = sum(triple[-1] for triple in self.get_matching_blocks()) return _calculate_ratio(matches, len(self.a) + len(self.b)) def quick_ratio(self): """Return an upper bound on ratio() relatively quickly. This isn't defined beyond that it is an upper bound on .ratio(), and is faster to compute. """ # viewing a and b as multisets, set matches to the cardinality # of their intersection; this counts the number of matches # without regard to order, so is clearly an upper bound if self.fullbcount is None: self.fullbcount = fullbcount = {} for elt in self.b: fullbcount[elt] = fullbcount.get(elt, 0) + 1 fullbcount = self.fullbcount # avail[x] is the number of times x appears in 'b' less the # number of times we've seen it in 'a' so far ... kinda avail = {} availhas, matches = avail.__contains__, 0 for elt in self.a: if availhas(elt): numb = avail[elt] else: numb = fullbcount.get(elt, 0) avail[elt] = numb - 1 if numb > 0: matches = matches + 1 return _calculate_ratio(matches, len(self.a) + len(self.b)) def real_quick_ratio(self): """Return an upper bound on ratio() very quickly. This isn't defined beyond that it is an upper bound on .ratio(), and is faster to compute than either .ratio() or .quick_ratio(). """ la, lb = len(self.a), len(self.b) # can't have more matches than the number of elements in the # shorter sequence return _calculate_ratio(min(la, lb), la + lb) def get_close_matches(word, possibilities, n=3, cutoff=0.6): """Use SequenceMatcher to return list of the best "good enough" matches. word is a sequence for which close matches are desired (typically a string). possibilities is a list of sequences against which to match word (typically a list of strings). Optional arg n (default 3) is the maximum number of close matches to return. n must be > 0. Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities that don't score at least that similar to word are ignored. The best (no more than n) matches among the possibilities are returned in a list, sorted by similarity score, most similar first. >>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"]) ['apple', 'ape'] >>> import keyword as _keyword >>> get_close_matches("wheel", _keyword.kwlist) ['while'] >>> get_close_matches("Apple", _keyword.kwlist) [] >>> get_close_matches("accept", _keyword.kwlist) ['except'] """ if not n > 0: raise ValueError("n must be > 0: %r" % (n,)) if not 0.0 <= cutoff <= 1.0: raise ValueError("cutoff must be in [0.0, 1.0]: %r" % (cutoff,)) result = [] s = SequenceMatcher() s.set_seq2(word) for x in possibilities: s.set_seq1(x) if s.real_quick_ratio() >= cutoff and \ s.quick_ratio() >= cutoff and \ s.ratio() >= cutoff: result.append((s.ratio(), x)) # Move the best scorers to head of list result = heapq.nlargest(n, result) # Strip scores for the best n matches return [x for score, x in result] def _count_leading(line, ch): """ Return number of `ch` characters at the start of `line`. Example: >>> _count_leading(' abc', ' ') 3 """ i, n = 0, len(line) while i < n and line[i] == ch: i += 1 return i class Differ: r""" Differ is a class for comparing sequences of lines of text, and producing human-readable differences or deltas. Differ uses SequenceMatcher both to compare sequences of lines, and to compare sequences of characters within similar (near-matching) lines. Each line of a Differ delta begins with a two-letter code: '- ' line unique to sequence 1 '+ ' line unique to sequence 2 ' ' line common to both sequences '? ' line not present in either input sequence Lines beginning with '? ' attempt to guide the eye to intraline differences, and were not present in either input sequence. These lines can be confusing if the sequences contain tab characters. Note that Differ makes no claim to produce a *minimal* diff. To the contrary, minimal diffs are often counter-intuitive, because they synch up anywhere possible, sometimes accidental matches 100 pages apart. Restricting synch points to contiguous matches preserves some notion of locality, at the occasional cost of producing a longer diff. Example: Comparing two texts. First we set up the texts, sequences of individual single-line strings ending with newlines (such sequences can also be obtained from the `readlines()` method of file-like objects): >>> text1 = ''' 1. Beautiful is better than ugly. ... 2. Explicit is better than implicit. ... 3. Simple is better than complex. ... 4. Complex is better than complicated. ... '''.splitlines(keepends=True) >>> len(text1) 4 >>> text1[0][-1] '\n' >>> text2 = ''' 1. Beautiful is better than ugly. ... 3. Simple is better than complex. ... 4. Complicated is better than complex. ... 5. Flat is better than nested. ... '''.splitlines(keepends=True) Next we instantiate a Differ object: >>> d = Differ() Note that when instantiating a Differ object we may pass functions to filter out line and character 'junk'. See Differ.__init__ for details. Finally, we compare the two: >>> result = list(d.compare(text1, text2)) 'result' is a list of strings, so let's pretty-print it: >>> from pprint import pprint as _pprint >>> _pprint(result) [' 1. Beautiful is better than ugly.\n', '- 2. Explicit is better than implicit.\n', '- 3. Simple is better than complex.\n', '+ 3. Simple is better than complex.\n', '? ++\n', '- 4. Complex is better than complicated.\n', '? ^ ---- ^\n', '+ 4. Complicated is better than complex.\n', '? ++++ ^ ^\n', '+ 5. Flat is better than nested.\n'] As a single multi-line string it looks like this: >>> print(''.join(result), end="") 1. Beautiful is better than ugly. - 2. Explicit is better than implicit. - 3. Simple is better than complex. + 3. Simple is better than complex. ? ++ - 4. Complex is better than complicated. ? ^ ---- ^ + 4. Complicated is better than complex. ? ++++ ^ ^ + 5. Flat is better than nested. Methods: __init__(linejunk=None, charjunk=None) Construct a text differencer, with optional filters. compare(a, b) Compare two sequences of lines; generate the resulting delta. """ def __init__(self, linejunk=None, charjunk=None): """ Construct a text differencer, with optional filters. The two optional keyword parameters are for filter functions: - `linejunk`: A function that should accept a single string argument, and return true iff the string is junk. The module-level function `IS_LINE_JUNK` may be used to filter out lines without visible characters, except for at most one splat ('#'). It is recommended to leave linejunk None; as of Python 2.3, the underlying SequenceMatcher class has grown an adaptive notion of "noise" lines that's better than any static definition the author has ever been able to craft. - `charjunk`: A function that should accept a string of length 1. The module-level function `IS_CHARACTER_JUNK` may be used to filter out whitespace characters (a blank or tab; **note**: bad idea to include newline in this!). Use of IS_CHARACTER_JUNK is recommended. """ self.linejunk = linejunk self.charjunk = charjunk def compare(self, a, b): r""" Compare two sequences of lines; generate the resulting delta. Each sequence must contain individual single-line strings ending with newlines. Such sequences can be obtained from the `readlines()` method of file-like objects. The delta generated also consists of newline- terminated strings, ready to be printed as-is via the writeline() method of a file-like object. Example: >>> print(''.join(Differ().compare('one\ntwo\nthree\n'.splitlines(True), ... 'ore\ntree\nemu\n'.splitlines(True))), ... end="") - one ? ^ + ore ? ^ - two - three ? - + tree + emu """ cruncher = SequenceMatcher(self.linejunk, a, b) for tag, alo, ahi, blo, bhi in cruncher.get_opcodes(): if tag == 'replace': g = self._fancy_replace(a, alo, ahi, b, blo, bhi) elif tag == 'delete': g = self._dump('-', a, alo, ahi) elif tag == 'insert': g = self._dump('+', b, blo, bhi) elif tag == 'equal': g = self._dump(' ', a, alo, ahi) else: raise ValueError('unknown tag %r' % (tag,)) for line in g: yield line def _dump(self, tag, x, lo, hi): """Generate comparison results for a same-tagged range.""" for i in range(lo, hi): yield '%s %s' % (tag, x[i]) def _plain_replace(self, a, alo, ahi, b, blo, bhi): assert alo < ahi and blo < bhi # dump the shorter block first -- reduces the burden on short-term # memory if the blocks are of very different sizes if bhi - blo < ahi - alo: first = self._dump('+', b, blo, bhi) second = self._dump('-', a, alo, ahi) else: first = self._dump('-', a, alo, ahi) second = self._dump('+', b, blo, bhi) for g in first, second: for line in g: yield line def _fancy_replace(self, a, alo, ahi, b, blo, bhi): r""" When replacing one block of lines with another, search the blocks for *similar* lines; the best-matching pair (if any) is used as a synch point, and intraline difference marking is done on the similar pair. Lots of work, but often worth it. Example: >>> d = Differ() >>> results = d._fancy_replace(['abcDefghiJkl\n'], 0, 1, ... ['abcdefGhijkl\n'], 0, 1) >>> print(''.join(results), end="") - abcDefghiJkl ? ^ ^ ^ + abcdefGhijkl ? ^ ^ ^ """ # don't synch up unless the lines have a similarity score of at # least cutoff; best_ratio tracks the best score seen so far best_ratio, cutoff = 0.74, 0.75 cruncher = SequenceMatcher(self.charjunk) eqi, eqj = None, None # 1st indices of equal lines (if any) # search for the pair that matches best without being identical # (identical lines must be junk lines, & we don't want to synch up # on junk -- unless we have to) for j in range(blo, bhi): bj = b[j] cruncher.set_seq2(bj) for i in range(alo, ahi): ai = a[i] if ai == bj: if eqi is None: eqi, eqj = i, j continue cruncher.set_seq1(ai) # computing similarity is expensive, so use the quick # upper bounds first -- have seen this speed up messy # compares by a factor of 3. # note that ratio() is only expensive to compute the first # time it's called on a sequence pair; the expensive part # of the computation is cached by cruncher if cruncher.real_quick_ratio() > best_ratio and \ cruncher.quick_ratio() > best_ratio and \ cruncher.ratio() > best_ratio: best_ratio, best_i, best_j = cruncher.ratio(), i, j if best_ratio < cutoff: # no non-identical "pretty close" pair if eqi is None: # no identical pair either -- treat it as a straight replace for line in self._plain_replace(a, alo, ahi, b, blo, bhi): yield line return # no close pair, but an identical pair -- synch up on that best_i, best_j, best_ratio = eqi, eqj, 1.0 else: # there's a close pair, so forget the identical pair (if any) eqi = None # a[best_i] very similar to b[best_j]; eqi is None iff they're not # identical # pump out diffs from before the synch point for line in self._fancy_helper(a, alo, best_i, b, blo, best_j): yield line # do intraline marking on the synch pair aelt, belt = a[best_i], b[best_j] if eqi is None: # pump out a '-', '?', '+', '?' quad for the synched lines atags = btags = "" cruncher.set_seqs(aelt, belt) for tag, ai1, ai2, bj1, bj2 in cruncher.get_opcodes(): la, lb = ai2 - ai1, bj2 - bj1 if tag == 'replace': atags += '^' * la btags += '^' * lb elif tag == 'delete': atags += '-' * la elif tag == 'insert': btags += '+' * lb elif tag == 'equal': atags += ' ' * la btags += ' ' * lb else: raise ValueError('unknown tag %r' % (tag,)) for line in self._qformat(aelt, belt, atags, btags): yield line else: # the synch pair is identical yield ' ' + aelt # pump out diffs from after the synch point for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi): yield line def _fancy_helper(self, a, alo, ahi, b, blo, bhi): g = [] if alo < ahi: if blo < bhi: g = self._fancy_replace(a, alo, ahi, b, blo, bhi) else: g = self._dump('-', a, alo, ahi) elif blo < bhi: g = self._dump('+', b, blo, bhi) for line in g: yield line def _qformat(self, aline, bline, atags, btags): r""" Format "?" output and deal with leading tabs. Example: >>> d = Differ() >>> results = d._qformat('\tabcDefghiJkl\n', '\tabcdefGhijkl\n', ... ' ^ ^ ^ ', ' ^ ^ ^ ') >>> for line in results: print(repr(line)) ... '- \tabcDefghiJkl\n' '? \t ^ ^ ^\n' '+ \tabcdefGhijkl\n' '? \t ^ ^ ^\n' """ # Can hurt, but will probably help most of the time. common = min(_count_leading(aline, "\t"), _count_leading(bline, "\t")) common = min(common, _count_leading(atags[:common], " ")) common = min(common, _count_leading(btags[:common], " ")) atags = atags[common:].rstrip() btags = btags[common:].rstrip() yield "- " + aline if atags: yield "? %s%s\n" % ("\t" * common, atags) yield "+ " + bline if btags: yield "? %s%s\n" % ("\t" * common, btags) # With respect to junk, an earlier version of ndiff simply refused to # *start* a match with a junk element. The result was cases like this: # before: private Thread currentThread; # after: private volatile Thread currentThread; # If you consider whitespace to be junk, the longest contiguous match # not starting with junk is "e Thread currentThread". So ndiff reported # that "e volatil" was inserted between the 't' and the 'e' in "private". # While an accurate view, to people that's absurd. The current version # looks for matching blocks that are entirely junk-free, then extends the # longest one of those as far as possible but only with matching junk. # So now "currentThread" is matched, then extended to suck up the # preceding blank; then "private" is matched, and extended to suck up the # following blank; then "Thread" is matched; and finally ndiff reports # that "volatile " was inserted before "Thread". The only quibble # remaining is that perhaps it was really the case that " volatile" # was inserted after "private". I can live with that <wink>. import re def IS_LINE_JUNK(line, pat=re.compile(r"\s*#?\s*$").match): r""" Return 1 for ignorable line: iff `line` is blank or contains a single '#'. Examples: >>> IS_LINE_JUNK('\n') True >>> IS_LINE_JUNK(' # \n') True >>> IS_LINE_JUNK('hello\n') False """ return pat(line) is not None def IS_CHARACTER_JUNK(ch, ws=" \t"): r""" Return 1 for ignorable character: iff `ch` is a space or tab. Examples: >>> IS_CHARACTER_JUNK(' ') True >>> IS_CHARACTER_JUNK('\t') True >>> IS_CHARACTER_JUNK('\n') False >>> IS_CHARACTER_JUNK('x') False """ return ch in ws ######################################################################## ### Unified Diff ######################################################################## def _format_range_unified(start, stop): 'Convert range to the "ed" format' # Per the diff spec at http://www.unix.org/single_unix_specification/ beginning = start + 1 # lines start numbering with one length = stop - start if length == 1: return '{}'.format(beginning) if not length: beginning -= 1 # empty ranges begin at line just before the range return '{},{}'.format(beginning, length) def unified_diff(a, b, fromfile='', tofile='', fromfiledate='', tofiledate='', n=3, lineterm='\n'): r""" Compare two sequences of lines; generate the delta as a unified diff. Unified diffs are a compact way of showing line changes and a few lines of context. The number of context lines is set by 'n' which defaults to three. By default, the diff control lines (those with ---, +++, or @@) are created with a trailing newline. This is helpful so that inputs created from file.readlines() result in diffs that are suitable for file.writelines() since both the inputs and outputs have trailing newlines. For inputs that do not have trailing newlines, set the lineterm argument to "" so that the output will be uniformly newline free. The unidiff format normally has a header for filenames and modification times. Any or all of these may be specified using strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. The modification times are normally expressed in the ISO 8601 format. Example: >>> for line in unified_diff('one two three four'.split(), ... 'zero one tree four'.split(), 'Original', 'Current', ... '2005-01-26 23:30:50', '2010-04-02 10:20:52', ... lineterm=''): ... print(line) # doctest: +NORMALIZE_WHITESPACE --- Original 2005-01-26 23:30:50 +++ Current 2010-04-02 10:20:52 @@ -1,4 +1,4 @@ +zero one -two -three +tree four """ started = False for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n): if not started: started = True fromdate = '\t{}'.format(fromfiledate) if fromfiledate else '' todate = '\t{}'.format(tofiledate) if tofiledate else '' yield '--- {}{}{}'.format(fromfile, fromdate, lineterm) yield '+++ {}{}{}'.format(tofile, todate, lineterm) first, last = group[0], group[-1] file1_range = _format_range_unified(first[1], last[2]) file2_range = _format_range_unified(first[3], last[4]) yield '@@ -{} +{} @@{}'.format(file1_range, file2_range, lineterm) for tag, i1, i2, j1, j2 in group: if tag == 'equal': for line in a[i1:i2]: yield ' ' + line continue if tag in {'replace', 'delete'}: for line in a[i1:i2]: yield '-' + line if tag in {'replace', 'insert'}: for line in b[j1:j2]: yield '+' + line ######################################################################## ### Context Diff ######################################################################## def _format_range_context(start, stop): 'Convert range to the "ed" format' # Per the diff spec at http://www.unix.org/single_unix_specification/ beginning = start + 1 # lines start numbering with one length = stop - start if not length: beginning -= 1 # empty ranges begin at line just before the range if length <= 1: return '{}'.format(beginning) return '{},{}'.format(beginning, beginning + length - 1) # See http://www.unix.org/single_unix_specification/ def context_diff(a, b, fromfile='', tofile='', fromfiledate='', tofiledate='', n=3, lineterm='\n'): r""" Compare two sequences of lines; generate the delta as a context diff. Context diffs are a compact way of showing line changes and a few lines of context. The number of context lines is set by 'n' which defaults to three. By default, the diff control lines (those with *** or ---) are created with a trailing newline. This is helpful so that inputs created from file.readlines() result in diffs that are suitable for file.writelines() since both the inputs and outputs have trailing newlines. For inputs that do not have trailing newlines, set the lineterm argument to "" so that the output will be uniformly newline free. The context diff format normally has a header for filenames and modification times. Any or all of these may be specified using strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. The modification times are normally expressed in the ISO 8601 format. If not specified, the strings default to blanks. Example: >>> print(''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(True), ... 'zero\none\ntree\nfour\n'.splitlines(True), 'Original', 'Current')), ... end="") *** Original --- Current *************** *** 1,4 **** one ! two ! three four --- 1,4 ---- + zero one ! tree four """ prefix = dict(insert='+ ', delete='- ', replace='! ', equal=' ') started = False for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n): if not started: started = True fromdate = '\t{}'.format(fromfiledate) if fromfiledate else '' todate = '\t{}'.format(tofiledate) if tofiledate else '' yield '*** {}{}{}'.format(fromfile, fromdate, lineterm) yield '--- {}{}{}'.format(tofile, todate, lineterm) first, last = group[0], group[-1] yield '***************' + lineterm file1_range = _format_range_context(first[1], last[2]) yield '*** {} ****{}'.format(file1_range, lineterm) if any(tag in {'replace', 'delete'} for tag, _, _, _, _ in group): for tag, i1, i2, _, _ in group: if tag != 'insert': for line in a[i1:i2]: yield prefix[tag] + line file2_range = _format_range_context(first[3], last[4]) yield '--- {} ----{}'.format(file2_range, lineterm) if any(tag in {'replace', 'insert'} for tag, _, _, _, _ in group): for tag, _, _, j1, j2 in group: if tag != 'delete': for line in b[j1:j2]: yield prefix[tag] + line def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK): r""" Compare `a` and `b` (lists of strings); return a `Differ`-style delta. Optional keyword parameters `linejunk` and `charjunk` are for filter functions (or None): - linejunk: A function that should accept a single string argument, and return true iff the string is junk. The default is None, and is recommended; as of Python 2.3, an adaptive notion of "noise" lines is used that does a good job on its own. - charjunk: A function that should accept a string of length 1. The default is module-level function IS_CHARACTER_JUNK, which filters out whitespace characters (a blank or tab; note: bad idea to include newline in this!). Tools/scripts/ndiff.py is a command-line front-end to this function. Example: >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), ... 'ore\ntree\nemu\n'.splitlines(keepends=True)) >>> print(''.join(diff), end="") - one ? ^ + ore ? ^ - two - three ? - + tree + emu """ return Differ(linejunk, charjunk).compare(a, b) def _mdiff(fromlines, tolines, context=None, linejunk=None, charjunk=IS_CHARACTER_JUNK): r"""Returns generator yielding marked up from/to side by side differences. Arguments: fromlines -- list of text lines to compared to tolines tolines -- list of text lines to be compared to fromlines context -- number of context lines to display on each side of difference, if None, all from/to text lines will be generated. linejunk -- passed on to ndiff (see ndiff documentation) charjunk -- passed on to ndiff (see ndiff documentation) This function returns an iterator which returns a tuple: (from line tuple, to line tuple, boolean flag) from/to line tuple -- (line num, line text) line num -- integer or None (to indicate a context separation) line text -- original line text with following markers inserted: '\0+' -- marks start of added text '\0-' -- marks start of deleted text '\0^' -- marks start of changed text '\1' -- marks end of added/deleted/changed text boolean flag -- None indicates context separation, True indicates either "from" or "to" line contains a change, otherwise False. This function/iterator was originally developed to generate side by side file difference for making HTML pages (see HtmlDiff class for example usage). Note, this function utilizes the ndiff function to generate the side by side difference markup. Optional ndiff arguments may be passed to this function and they in turn will be passed to ndiff. """ import re # regular expression for finding intraline change indices change_re = re.compile('(\++|\-+|\^+)') # create the difference iterator to generate the differences diff_lines_iterator = ndiff(fromlines,tolines,linejunk,charjunk) def _make_line(lines, format_key, side, num_lines=[0,0]): """Returns line of text with user's change markup and line formatting. lines -- list of lines from the ndiff generator to produce a line of text from. When producing the line of text to return, the lines used are removed from this list. format_key -- '+' return first line in list with "add" markup around the entire line. '-' return first line in list with "delete" markup around the entire line. '?' return first line in list with add/delete/change intraline markup (indices obtained from second line) None return first line in list with no markup side -- indice into the num_lines list (0=from,1=to) num_lines -- from/to current line number. This is NOT intended to be a passed parameter. It is present as a keyword argument to maintain memory of the current line numbers between calls of this function. Note, this function is purposefully not defined at the module scope so that data it needs from its parent function (within whose context it is defined) does not need to be of module scope. """ num_lines[side] += 1 # Handle case where no user markup is to be added, just return line of # text with user's line format to allow for usage of the line number. if format_key is None: return (num_lines[side],lines.pop(0)[2:]) # Handle case of intraline changes if format_key == '?': text, markers = lines.pop(0), lines.pop(0) # find intraline changes (store change type and indices in tuples) sub_info = [] def record_sub_info(match_object,sub_info=sub_info): sub_info.append([match_object.group(1)[0],match_object.span()]) return match_object.group(1) change_re.sub(record_sub_info,markers) # process each tuple inserting our special marks that won't be # noticed by an xml/html escaper. for key,(begin,end) in sub_info[::-1]: text = text[0:begin]+'\0'+key+text[begin:end]+'\1'+text[end:] text = text[2:] # Handle case of add/delete entire line else: text = lines.pop(0)[2:] # if line of text is just a newline, insert a space so there is # something for the user to highlight and see. if not text: text = ' ' # insert marks that won't be noticed by an xml/html escaper. text = '\0' + format_key + text + '\1' # Return line of text, first allow user's line formatter to do its # thing (such as adding the line number) then replace the special # marks with what the user's change markup. return (num_lines[side],text) def _line_iterator(): """Yields from/to lines of text with a change indication. This function is an iterator. It itself pulls lines from a differencing iterator, processes them and yields them. When it can it yields both a "from" and a "to" line, otherwise it will yield one or the other. In addition to yielding the lines of from/to text, a boolean flag is yielded to indicate if the text line(s) have differences in them. Note, this function is purposefully not defined at the module scope so that data it needs from its parent function (within whose context it is defined) does not need to be of module scope. """ lines = [] num_blanks_pending, num_blanks_to_yield = 0, 0 while True: # Load up next 4 lines so we can look ahead, create strings which # are a concatenation of the first character of each of the 4 lines # so we can do some very readable comparisons. while len(lines) < 4: try: lines.append(next(diff_lines_iterator)) except StopIteration: lines.append('X') s = ''.join([line[0] for line in lines]) if s.startswith('X'): # When no more lines, pump out any remaining blank lines so the # corresponding add/delete lines get a matching blank line so # all line pairs get yielded at the next level. num_blanks_to_yield = num_blanks_pending elif s.startswith('-?+?'): # simple intraline change yield _make_line(lines,'?',0), _make_line(lines,'?',1), True continue elif s.startswith('--++'): # in delete block, add block coming: we do NOT want to get # caught up on blank lines yet, just process the delete line num_blanks_pending -= 1 yield _make_line(lines,'-',0), None, True continue elif s.startswith(('--?+', '--+', '- ')): # in delete block and see a intraline change or unchanged line # coming: yield the delete line and then blanks from_line,to_line = _make_line(lines,'-',0), None num_blanks_to_yield,num_blanks_pending = num_blanks_pending-1,0 elif s.startswith('-+?'): # intraline change yield _make_line(lines,None,0), _make_line(lines,'?',1), True continue elif s.startswith('-?+'): # intraline change yield _make_line(lines,'?',0), _make_line(lines,None,1), True continue elif s.startswith('-'): # delete FROM line num_blanks_pending -= 1 yield _make_line(lines,'-',0), None, True continue elif s.startswith('+--'): # in add block, delete block coming: we do NOT want to get # caught up on blank lines yet, just process the add line num_blanks_pending += 1 yield None, _make_line(lines,'+',1), True continue elif s.startswith(('+ ', '+-')): # will be leaving an add block: yield blanks then add line from_line, to_line = None, _make_line(lines,'+',1) num_blanks_to_yield,num_blanks_pending = num_blanks_pending+1,0 elif s.startswith('+'): # inside an add block, yield the add line num_blanks_pending += 1 yield None, _make_line(lines,'+',1), True continue elif s.startswith(' '): # unchanged text, yield it to both sides yield _make_line(lines[:],None,0),_make_line(lines,None,1),False continue # Catch up on the blank lines so when we yield the next from/to # pair, they are lined up. while(num_blanks_to_yield < 0): num_blanks_to_yield += 1 yield None,('','\n'),True while(num_blanks_to_yield > 0): num_blanks_to_yield -= 1 yield ('','\n'),None,True if s.startswith('X'): raise StopIteration else: yield from_line,to_line,True def _line_pair_iterator(): """Yields from/to lines of text with a change indication. This function is an iterator. It itself pulls lines from the line iterator. Its difference from that iterator is that this function always yields a pair of from/to text lines (with the change indication). If necessary it will collect single from/to lines until it has a matching pair from/to pair to yield. Note, this function is purposefully not defined at the module scope so that data it needs from its parent function (within whose context it is defined) does not need to be of module scope. """ line_iterator = _line_iterator() fromlines,tolines=[],[] while True: # Collecting lines of text until we have a from/to pair while (len(fromlines)==0 or len(tolines)==0): from_line, to_line, found_diff = next(line_iterator) if from_line is not None: fromlines.append((from_line,found_diff)) if to_line is not None: tolines.append((to_line,found_diff)) # Once we have a pair, remove them from the collection and yield it from_line, fromDiff = fromlines.pop(0) to_line, to_diff = tolines.pop(0) yield (from_line,to_line,fromDiff or to_diff) # Handle case where user does not want context differencing, just yield # them up without doing anything else with them. line_pair_iterator = _line_pair_iterator() if context is None: while True: yield next(line_pair_iterator) # Handle case where user wants context differencing. We must do some # storage of lines until we know for sure that they are to be yielded. else: context += 1 lines_to_write = 0 while True: # Store lines up until we find a difference, note use of a # circular queue because we only need to keep around what # we need for context. index, contextLines = 0, [None]*(context) found_diff = False while(found_diff is False): from_line, to_line, found_diff = next(line_pair_iterator) i = index % context contextLines[i] = (from_line, to_line, found_diff) index += 1 # Yield lines that we have collected so far, but first yield # the user's separator. if index > context: yield None, None, None lines_to_write = context else: lines_to_write = index index = 0 while(lines_to_write): i = index % context index += 1 yield contextLines[i] lines_to_write -= 1 # Now yield the context lines after the change lines_to_write = context-1 while(lines_to_write): from_line, to_line, found_diff = next(line_pair_iterator) # If another change within the context, extend the context if found_diff: lines_to_write = context-1 else: lines_to_write -= 1 yield from_line, to_line, found_diff _file_template = """ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1" /> <title></title> <style type="text/css">%(styles)s </style> </head> <body> %(table)s%(legend)s </body> </html>""" _styles = """ table.diff {font-family:Courier; border:medium;} .diff_header {background-color:#e0e0e0} td.diff_header {text-align:right} .diff_next {background-color:#c0c0c0} .diff_add {background-color:#aaffaa} .diff_chg {background-color:#ffff77} .diff_sub {background-color:#ffaaaa}""" _table_template = """ <table class="diff" id="difflib_chg_%(prefix)s_top" cellspacing="0" cellpadding="0" rules="groups" > <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup> %(header_row)s <tbody> %(data_rows)s </tbody> </table>""" _legend = """ <table class="diff" summary="Legends"> <tr> <th colspan="2"> Legends </th> </tr> <tr> <td> <table border="" summary="Colors"> <tr><th> Colors </th> </tr> <tr><td class="diff_add">&nbsp;Added&nbsp;</td></tr> <tr><td class="diff_chg">Changed</td> </tr> <tr><td class="diff_sub">Deleted</td> </tr> </table></td> <td> <table border="" summary="Links"> <tr><th colspan="2"> Links </th> </tr> <tr><td>(f)irst change</td> </tr> <tr><td>(n)ext change</td> </tr> <tr><td>(t)op</td> </tr> </table></td> </tr> </table>""" class HtmlDiff(object): """For producing HTML side by side comparison with change highlights. This class can be used to create an HTML table (or a complete HTML file containing the table) showing a side by side, line by line comparison of text with inter-line and intra-line change highlights. The table can be generated in either full or contextual difference mode. The following methods are provided for HTML generation: make_table -- generates HTML for a single side by side table make_file -- generates complete HTML file with a single side by side table See tools/scripts/diff.py for an example usage of this class. """ _file_template = _file_template _styles = _styles _table_template = _table_template _legend = _legend _default_prefix = 0 def __init__(self,tabsize=8,wrapcolumn=None,linejunk=None, charjunk=IS_CHARACTER_JUNK): """HtmlDiff instance initializer Arguments: tabsize -- tab stop spacing, defaults to 8. wrapcolumn -- column number where lines are broken and wrapped, defaults to None where lines are not wrapped. linejunk,charjunk -- keyword arguments passed into ndiff() (used to by HtmlDiff() to generate the side by side HTML differences). See ndiff() documentation for argument default values and descriptions. """ self._tabsize = tabsize self._wrapcolumn = wrapcolumn self._linejunk = linejunk self._charjunk = charjunk def make_file(self,fromlines,tolines,fromdesc='',todesc='',context=False, numlines=5): """Returns HTML file of side by side comparison with change highlights Arguments: fromlines -- list of "from" lines tolines -- list of "to" lines fromdesc -- "from" file column header string todesc -- "to" file column header string context -- set to True for contextual differences (defaults to False which shows full differences). numlines -- number of context lines. When context is set True, controls number of lines displayed before and after the change. When context is False, controls the number of lines to place the "next" link anchors before the next change (so click of "next" link jumps to just before the change). """ return self._file_template % dict( styles = self._styles, legend = self._legend, table = self.make_table(fromlines,tolines,fromdesc,todesc, context=context,numlines=numlines)) def _tab_newline_replace(self,fromlines,tolines): """Returns from/to line lists with tabs expanded and newlines removed. Instead of tab characters being replaced by the number of spaces needed to fill in to the next tab stop, this function will fill the space with tab characters. This is done so that the difference algorithms can identify changes in a file when tabs are replaced by spaces and vice versa. At the end of the HTML generation, the tab characters will be replaced with a nonbreakable space. """ def expand_tabs(line): # hide real spaces line = line.replace(' ','\0') # expand tabs into spaces line = line.expandtabs(self._tabsize) # replace spaces from expanded tabs back into tab characters # (we'll replace them with markup after we do differencing) line = line.replace(' ','\t') return line.replace('\0',' ').rstrip('\n') fromlines = [expand_tabs(line) for line in fromlines] tolines = [expand_tabs(line) for line in tolines] return fromlines,tolines def _split_line(self,data_list,line_num,text): """Builds list of text lines by splitting text lines at wrap point This function will determine if the input text line needs to be wrapped (split) into separate lines. If so, the first wrap point will be determined and the first line appended to the output text line list. This function is used recursively to handle the second part of the split line to further split it. """ # if blank line or context separator, just add it to the output list if not line_num: data_list.append((line_num,text)) return # if line text doesn't need wrapping, just add it to the output list size = len(text) max = self._wrapcolumn if (size <= max) or ((size -(text.count('\0')*3)) <= max): data_list.append((line_num,text)) return # scan text looking for the wrap point, keeping track if the wrap # point is inside markers i = 0 n = 0 mark = '' while n < max and i < size: if text[i] == '\0': i += 1 mark = text[i] i += 1 elif text[i] == '\1': i += 1 mark = '' else: i += 1 n += 1 # wrap point is inside text, break it up into separate lines line1 = text[:i] line2 = text[i:] # if wrap point is inside markers, place end marker at end of first # line and start marker at beginning of second line because each # line will have its own table tag markup around it. if mark: line1 = line1 + '\1' line2 = '\0' + mark + line2 # tack on first line onto the output list data_list.append((line_num,line1)) # use this routine again to wrap the remaining text self._split_line(data_list,'>',line2) def _line_wrapper(self,diffs): """Returns iterator that splits (wraps) mdiff text lines""" # pull from/to data and flags from mdiff iterator for fromdata,todata,flag in diffs: # check for context separators and pass them through if flag is None: yield fromdata,todata,flag continue (fromline,fromtext),(toline,totext) = fromdata,todata # for each from/to line split it at the wrap column to form # list of text lines. fromlist,tolist = [],[] self._split_line(fromlist,fromline,fromtext) self._split_line(tolist,toline,totext) # yield from/to line in pairs inserting blank lines as # necessary when one side has more wrapped lines while fromlist or tolist: if fromlist: fromdata = fromlist.pop(0) else: fromdata = ('',' ') if tolist: todata = tolist.pop(0) else: todata = ('',' ') yield fromdata,todata,flag def _collect_lines(self,diffs): """Collects mdiff output into separate lists Before storing the mdiff from/to data into a list, it is converted into a single line of text with HTML markup. """ fromlist,tolist,flaglist = [],[],[] # pull from/to data and flags from mdiff style iterator for fromdata,todata,flag in diffs: try: # store HTML markup of the lines into the lists fromlist.append(self._format_line(0,flag,*fromdata)) tolist.append(self._format_line(1,flag,*todata)) except TypeError: # exceptions occur for lines where context separators go fromlist.append(None) tolist.append(None) flaglist.append(flag) return fromlist,tolist,flaglist def _format_line(self,side,flag,linenum,text): """Returns HTML markup of "from" / "to" text lines side -- 0 or 1 indicating "from" or "to" text flag -- indicates if difference on line linenum -- line number (used for line number column) text -- line text to be marked up """ try: linenum = '%d' % linenum id = ' id="%s%s"' % (self._prefix[side],linenum) except TypeError: # handle blank lines where linenum is '>' or '' id = '' # replace those things that would get confused with HTML symbols text=text.replace("&","&amp;").replace(">","&gt;").replace("<","&lt;") # make space non-breakable so they don't get compressed or line wrapped text = text.replace(' ','&nbsp;').rstrip() return '<td class="diff_header"%s>%s</td><td nowrap="nowrap">%s</td>' \ % (id,linenum,text) def _make_prefix(self): """Create unique anchor prefixes""" # Generate a unique anchor prefix so multiple tables # can exist on the same HTML page without conflicts. fromprefix = "from%d_" % HtmlDiff._default_prefix toprefix = "to%d_" % HtmlDiff._default_prefix HtmlDiff._default_prefix += 1 # store prefixes so line format method has access self._prefix = [fromprefix,toprefix] def _convert_flags(self,fromlist,tolist,flaglist,context,numlines): """Makes list of "next" links""" # all anchor names will be generated using the unique "to" prefix toprefix = self._prefix[1] # process change flags, generating middle column of next anchors/links next_id = ['']*len(flaglist) next_href = ['']*len(flaglist) num_chg, in_change = 0, False last = 0 for i,flag in enumerate(flaglist): if flag: if not in_change: in_change = True last = i # at the beginning of a change, drop an anchor a few lines # (the context lines) before the change for the previous # link i = max([0,i-numlines]) next_id[i] = ' id="difflib_chg_%s_%d"' % (toprefix,num_chg) # at the beginning of a change, drop a link to the next # change num_chg += 1 next_href[last] = '<a href="#difflib_chg_%s_%d">n</a>' % ( toprefix,num_chg) else: in_change = False # check for cases where there is no content to avoid exceptions if not flaglist: flaglist = [False] next_id = [''] next_href = [''] last = 0 if context: fromlist = ['<td></td><td>&nbsp;No Differences Found&nbsp;</td>'] tolist = fromlist else: fromlist = tolist = ['<td></td><td>&nbsp;Empty File&nbsp;</td>'] # if not a change on first line, drop a link if not flaglist[0]: next_href[0] = '<a href="#difflib_chg_%s_0">f</a>' % toprefix # redo the last link to link to the top next_href[last] = '<a href="#difflib_chg_%s_top">t</a>' % (toprefix) return fromlist,tolist,flaglist,next_href,next_id def make_table(self,fromlines,tolines,fromdesc='',todesc='',context=False, numlines=5): """Returns HTML table of side by side comparison with change highlights Arguments: fromlines -- list of "from" lines tolines -- list of "to" lines fromdesc -- "from" file column header string todesc -- "to" file column header string context -- set to True for contextual differences (defaults to False which shows full differences). numlines -- number of context lines. When context is set True, controls number of lines displayed before and after the change. When context is False, controls the number of lines to place the "next" link anchors before the next change (so click of "next" link jumps to just before the change). """ # make unique anchor prefixes so that multiple tables may exist # on the same page without conflict. self._make_prefix() # change tabs to spaces before it gets more difficult after we insert # markup fromlines,tolines = self._tab_newline_replace(fromlines,tolines) # create diffs iterator which generates side by side from/to data if context: context_lines = numlines else: context_lines = None diffs = _mdiff(fromlines,tolines,context_lines,linejunk=self._linejunk, charjunk=self._charjunk) # set up iterator to wrap lines that exceed desired width if self._wrapcolumn: diffs = self._line_wrapper(diffs) # collect up from/to lines and flags into lists (also format the lines) fromlist,tolist,flaglist = self._collect_lines(diffs) # process change flags, generating middle column of next anchors/links fromlist,tolist,flaglist,next_href,next_id = self._convert_flags( fromlist,tolist,flaglist,context,numlines) s = [] fmt = ' <tr><td class="diff_next"%s>%s</td>%s' + \ '<td class="diff_next">%s</td>%s</tr>\n' for i in range(len(flaglist)): if flaglist[i] is None: # mdiff yields None on separator lines skip the bogus ones # generated for the first line if i > 0: s.append(' </tbody> \n <tbody>\n') else: s.append( fmt % (next_id[i],next_href[i],fromlist[i], next_href[i],tolist[i])) if fromdesc or todesc: header_row = '<thead><tr>%s%s%s%s</tr></thead>' % ( '<th class="diff_next"><br /></th>', '<th colspan="2" class="diff_header">%s</th>' % fromdesc, '<th class="diff_next"><br /></th>', '<th colspan="2" class="diff_header">%s</th>' % todesc) else: header_row = '' table = self._table_template % dict( data_rows=''.join(s), header_row=header_row, prefix=self._prefix[1]) return table.replace('\0+','<span class="diff_add">'). \ replace('\0-','<span class="diff_sub">'). \ replace('\0^','<span class="diff_chg">'). \ replace('\1','</span>'). \ replace('\t','&nbsp;') del re def restore(delta, which): r""" Generate one of the two sequences that generated a delta. Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract lines originating from file 1 or 2 (parameter `which`), stripping off line prefixes. Examples: >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), ... 'ore\ntree\nemu\n'.splitlines(keepends=True)) >>> diff = list(diff) >>> print(''.join(restore(diff, 1)), end="") one two three >>> print(''.join(restore(diff, 2)), end="") ore tree emu """ try: tag = {1: "- ", 2: "+ "}[int(which)] except KeyError: raise ValueError('unknown delta choice (must be 1 or 2): %r' % which) prefixes = (" ", tag) for line in delta: if line[:2] in prefixes: yield line[2:] def _test(): import doctest, difflib return doctest.testmod(difflib) if __name__ == "__main__": _test()
gpl-3.0
tiagochiavericosta/edx-platform
common/djangoapps/util/testing.py
91
2951
import sys from mock import patch from django.conf import settings from django.core.urlresolvers import clear_url_caches, resolve class UrlResetMixin(object): """Mixin to reset urls.py before and after a test Django memoizes the function that reads the urls module (whatever module urlconf names). The module itself is also stored by python in sys.modules. To fully reload it, we need to reload the python module, and also clear django's cache of the parsed urls. However, the order in which we do this doesn't matter, because neither one will get reloaded until the next request Doing this is expensive, so it should only be added to tests that modify settings that affect the contents of urls.py """ def _reset_urls(self, urlconf_modules): """Reset `urls.py` for a set of Django apps.""" for urlconf in urlconf_modules: if urlconf in sys.modules: reload(sys.modules[urlconf]) clear_url_caches() # Resolve a URL so that the new urlconf gets loaded resolve('/') def setUp(self, *args, **kwargs): """Reset Django urls before tests and after tests If you need to reset `urls.py` from a particular Django app (or apps), specify these modules in *args. Examples: # Reload only the root urls.py super(MyTestCase, self).setUp() # Reload urls from my_app super(MyTestCase, self).setUp("my_app.urls") # Reload urls from my_app and another_app super(MyTestCase, self).setUp("my_app.urls", "another_app.urls") """ super(UrlResetMixin, self).setUp(**kwargs) urlconf_modules = [settings.ROOT_URLCONF] if args: urlconf_modules.extend(args) self._reset_urls(urlconf_modules) self.addCleanup(lambda: self._reset_urls(urlconf_modules)) class EventTestMixin(object): """ Generic mixin for verifying that events were emitted during a test. """ def setUp(self, tracker): super(EventTestMixin, self).setUp() self.tracker = tracker patcher = patch(self.tracker) self.mock_tracker = patcher.start() self.addCleanup(patcher.stop) def assert_no_events_were_emitted(self): """ Ensures no events were emitted since the last event related assertion. """ self.assertFalse(self.mock_tracker.emit.called) # pylint: disable=maybe-no-member def assert_event_emitted(self, event_name, **kwargs): """ Verify that an event was emitted with the given parameters. """ self.mock_tracker.emit.assert_any_call( # pylint: disable=maybe-no-member event_name, kwargs ) def reset_tracker(self): """ Reset the mock tracker in order to forget about old events. """ self.mock_tracker.reset_mock()
agpl-3.0
gangadhar-kadam/church-erpnext
selling/doctype/sales_order/sales_order.py
5
17285
# ERPNext - web based ERP (http://erpnext.com) # Copyright (C) 2012 Web Notes Technologies Pvt Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals import webnotes import webnotes.utils import json from webnotes.utils import cstr, flt, getdate from webnotes.model.bean import getlist from webnotes.model.code import get_obj from webnotes import msgprint sql = webnotes.conn.sql from controllers.selling_controller import SellingController class DocType(SellingController): def __init__(self, doc, doclist=None): self.doc = doc if not doclist: doclist = [] self.doclist = doclist self.tname = 'Sales Order Item' self.fname = 'sales_order_details' self.person_tname = 'Target Detail' self.partner_tname = 'Partner Target Detail' self.territory_tname = 'Territory Target Detail' def pull_quotation_details(self): self.doclist = self.doc.clear_table(self.doclist, 'other_charges') self.doclist = self.doc.clear_table(self.doclist, 'sales_order_details') self.doclist = self.doc.clear_table(self.doclist, 'sales_team') self.doclist = self.doc.clear_table(self.doclist, 'tc_details') if self.doc.quotation_no: get_obj('DocType Mapper', 'Quotation-Sales Order').dt_map('Quotation', 'Sales Order', self.doc.quotation_no, self.doc, self.doclist, "[['Quotation', 'Sales Order'],['Quotation Item', 'Sales Order Item'],['Sales Taxes and Charges','Sales Taxes and Charges'],['Sales Team','Sales Team'],['TC Detail','TC Detail']]") else: msgprint("Please select Quotation whose details need to pull") return cstr(self.doc.quotation_no) def get_contact_details(self): get_obj('Sales Common').get_contact_details(self,0) def get_comm_rate(self, sales_partner): return get_obj('Sales Common').get_comm_rate(sales_partner, self) def get_item_details(self, args=None): import json args = args and json.loads(args) or {} if args.get('item_code'): return get_obj('Sales Common').get_item_details(args, self) else: obj = get_obj('Sales Common') for doc in self.doclist: if doc.fields.get('item_code'): arg = {'item_code':doc.fields.get('item_code'), 'income_account':doc.fields.get('income_account'), 'cost_center': doc.fields.get('cost_center'), 'warehouse': doc.fields.get('warehouse')}; ret = obj.get_item_defaults(arg) for r in ret: if not doc.fields.get(r): doc.fields[r] = ret[r] def get_adj_percent(self, arg=''): get_obj('Sales Common').get_adj_percent(self) def get_available_qty(self,args): return get_obj('Sales Common').get_available_qty(eval(args)) def get_rate(self,arg): return get_obj('Sales Common').get_rate(arg) def load_default_taxes(self): self.doclist = get_obj('Sales Common').load_default_taxes(self) def get_other_charges(self): self.doclist = get_obj('Sales Common').get_other_charges(self) def get_tc_details(self): return get_obj('Sales Common').get_tc_details(self) def check_maintenance_schedule(self): nm = sql("select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1", self.doc.name) nm = nm and nm[0][0] or '' if not nm: return 'No' def check_maintenance_visit(self): nm = sql("select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1 and t1.completion_status='Fully Completed'", self.doc.name) nm = nm and nm[0][0] or '' if not nm: return 'No' def validate_fiscal_year(self): get_obj('Sales Common').validate_fiscal_year(self.doc.fiscal_year,self.doc.transaction_date,'Sales Order Date') def validate_reference_value(self): get_obj('DocType Mapper', 'Quotation-Sales Order', with_children = 1).validate_reference_value(self, self.doc.name) def validate_mandatory(self): # validate transaction date v/s delivery date if self.doc.delivery_date: if getdate(self.doc.transaction_date) > getdate(self.doc.delivery_date): msgprint("Expected Delivery Date cannot be before Sales Order Date") raise Exception # amendment date is necessary if document is amended if self.doc.amended_from and not self.doc.amendment_date: msgprint("Please Enter Amendment Date") raise Exception def validate_po(self): # validate p.o date v/s delivery date if self.doc.po_date and self.doc.delivery_date and getdate(self.doc.po_date) > getdate(self.doc.delivery_date): msgprint("Expected Delivery Date cannot be before Purchase Order Date") raise Exception if self.doc.po_no and self.doc.customer: so = webnotes.conn.sql("select name from `tabSales Order` \ where ifnull(po_no, '') = %s and name != %s and docstatus < 2\ and customer = %s", (self.doc.po_no, self.doc.name, self.doc.customer)) if so and so[0][0]: msgprint("""Another Sales Order (%s) exists against same PO No and Customer. Please be sure, you are not making duplicate entry.""" % so[0][0]) def validate_for_items(self): check_list, flag = [], 0 chk_dupl_itm = [] # Sales Order Items Validations for d in getlist(self.doclist, 'sales_order_details'): if self.doc.quotation_no and cstr(self.doc.quotation_no) == cstr(d.prevdoc_docname): flag = 1 if d.prevdoc_docname: if self.doc.quotation_date and getdate(self.doc.quotation_date) > getdate(self.doc.transaction_date): msgprint("Sales Order Date cannot be before Quotation Date") raise Exception # validates whether quotation no in doctype and in table is same if not cstr(d.prevdoc_docname) == cstr(self.doc.quotation_no): msgprint("Items in table does not belong to the Quotation No mentioned.") raise Exception # validates whether item is not entered twice e = [d.item_code, d.description, d.reserved_warehouse, d.prevdoc_docname or ''] f = [d.item_code, d.description] #check item is stock item st_itm = sql("select is_stock_item from `tabItem` where name = %s", d.item_code) if st_itm and st_itm[0][0] == 'Yes': if not d.reserved_warehouse: msgprint("""Please enter Reserved Warehouse for item %s as it is stock Item""" % d.item_code, raise_exception=1) if e in check_list: msgprint("Item %s has been entered twice." % d.item_code) else: check_list.append(e) elif st_itm and st_itm[0][0]== 'No': if f in chk_dupl_itm: msgprint("Item %s has been entered twice." % d.item_code) else: chk_dupl_itm.append(f) # used for production plan d.transaction_date = self.doc.transaction_date tot_avail_qty = sql("select projected_qty from `tabBin` \ where item_code = '%s' and warehouse = '%s'" % (d.item_code,d.reserved_warehouse)) d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0 if getlist(self.doclist, 'sales_order_details') and self.doc.quotation_no and flag == 0: msgprint("There are no items of the quotation selected", raise_exception=1) def validate_sales_mntc_quotation(self): for d in getlist(self.doclist, 'sales_order_details'): if d.prevdoc_docname: res = sql("select name from `tabQuotation` where name=%s and order_type = %s", (d.prevdoc_docname, self.doc.order_type)) if not res: msgprint("""Order Type (%s) should be same in Quotation: %s \ and current Sales Order""" % (self.doc.order_type, d.prevdoc_docname)) def validate_order_type(self): #validate delivery date if self.doc.order_type == 'Sales' and not self.doc.delivery_date: msgprint("Please enter 'Expected Delivery Date'") raise Exception self.validate_sales_mntc_quotation() def validate_proj_cust(self): if self.doc.project_name and self.doc.customer_name: res = sql("select name from `tabProject` where name = '%s' and (customer = '%s' or ifnull(customer,'')='')"%(self.doc.project_name, self.doc.customer)) if not res: msgprint("Customer - %s does not belong to project - %s. \n\nIf you want to use project for multiple customers then please make customer details blank in project - %s."%(self.doc.customer,self.doc.project_name,self.doc.project_name)) raise Exception def validate(self): super(DocType, self).validate() self.validate_fiscal_year() self.validate_order_type() self.validate_mandatory() self.validate_proj_cust() self.validate_po() #self.validate_reference_value() if self.doc.docstatus == 1: self.validate_for_items() sales_com_obj = get_obj(dt = 'Sales Common') sales_com_obj.check_active_sales_items(self) sales_com_obj.check_conversion_rate(self) sales_com_obj.validate_max_discount(self,'sales_order_details') sales_com_obj.get_allocated_sum(self) self.doclist = sales_com_obj.make_packing_list(self,'sales_order_details') if not self.doc.status: self.doc.status = "Draft" import utilities utilities.validate_status(self.doc.status, ["Draft", "Submitted", "Stopped", "Cancelled"]) if not self.doc.billing_status: self.doc.billing_status = 'Not Billed' if not self.doc.delivery_status: self.doc.delivery_status = 'Not Delivered' def check_prev_docstatus(self): for d in getlist(self.doclist, 'sales_order_details'): cancel_quo = sql("select name from `tabQuotation` where docstatus = 2 and name = '%s'" % d.prevdoc_docname) if cancel_quo: msgprint("Quotation :" + cstr(cancel_quo[0][0]) + " is already cancelled !") raise Exception , "Validation Error. " def update_enquiry_status(self, prevdoc, flag): enq = sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc) if enq: sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0])) def update_prevdoc_status(self, flag): for d in getlist(self.doclist, 'sales_order_details'): if d.prevdoc_docname: if flag=='submit': sql("update `tabQuotation` set status = 'Order Confirmed' where name=%s",d.prevdoc_docname) #update enquiry self.update_enquiry_status(d.prevdoc_docname, 'Order Confirmed') elif flag == 'cancel': chk = sql("select t1.name from `tabSales Order` t1, `tabSales Order Item` t2 where t2.parent = t1.name and t2.prevdoc_docname=%s and t1.name!=%s and t1.docstatus=1", (d.prevdoc_docname,self.doc.name)) if not chk: sql("update `tabQuotation` set status = 'Submitted' where name=%s",d.prevdoc_docname) #update enquiry self.update_enquiry_status(d.prevdoc_docname, 'Quotation Sent') def on_submit(self): self.check_prev_docstatus() self.update_stock_ledger(update_stock = 1) get_obj('Sales Common').check_credit(self,self.doc.grand_total) get_obj('Authorization Control').validate_approving_authority(self.doc.doctype, self.doc.grand_total, self) self.update_prevdoc_status('submit') webnotes.conn.set(self.doc, 'status', 'Submitted') def on_cancel(self): # Cannot cancel stopped SO if self.doc.status == 'Stopped': msgprint("Sales Order : '%s' cannot be cancelled as it is Stopped. Unstop it for any further transactions" %(self.doc.name)) raise Exception self.check_nextdoc_docstatus() self.update_stock_ledger(update_stock = -1) self.update_prevdoc_status('cancel') webnotes.conn.set(self.doc, 'status', 'Cancelled') def check_nextdoc_docstatus(self): # Checks Delivery Note submit_dn = sql("select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2 where t1.name = t2.parent and t2.prevdoc_docname = '%s' and t1.docstatus = 1" % (self.doc.name)) if submit_dn: msgprint("Delivery Note : " + cstr(submit_dn[0][0]) + " has been submitted against " + cstr(self.doc.doctype) + ". Please cancel Delivery Note : " + cstr(submit_dn[0][0]) + " first and then cancel "+ cstr(self.doc.doctype), raise_exception = 1) # Checks Sales Invoice submit_rv = sql("select t1.name from `tabSales Invoice` t1,`tabSales Invoice Item` t2 where t1.name = t2.parent and t2.sales_order = '%s' and t1.docstatus = 1" % (self.doc.name)) if submit_rv: msgprint("Sales Invoice : " + cstr(submit_rv[0][0]) + " has already been submitted against " +cstr(self.doc.doctype)+ ". Please cancel Sales Invoice : "+ cstr(submit_rv[0][0]) + " first and then cancel "+ cstr(self.doc.doctype), raise_exception = 1) #check maintenance schedule submit_ms = sql("select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1",self.doc.name) if submit_ms: msgprint("Maintenance Schedule : " + cstr(submit_ms[0][0]) + " has already been submitted against " +cstr(self.doc.doctype)+ ". Please cancel Maintenance Schedule : "+ cstr(submit_ms[0][0]) + " first and then cancel "+ cstr(self.doc.doctype), raise_exception = 1) # check maintenance visit submit_mv = sql("select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1",self.doc.name) if submit_mv: msgprint("Maintenance Visit : " + cstr(submit_mv[0][0]) + " has already been submitted against " +cstr(self.doc.doctype)+ ". Please cancel Maintenance Visit : " + cstr(submit_mv[0][0]) + " first and then cancel "+ cstr(self.doc.doctype), raise_exception = 1) # check production order pro_order = sql("""select name from `tabProduction Order` where sales_order = %s and docstatus = 1""", self.doc.name) if pro_order: msgprint("""Production Order: %s exists against this sales order. Please cancel production order first and then cancel this sales order""" % pro_order[0][0], raise_exception=1) def check_modified_date(self): mod_db = sql("select modified from `tabSales Order` where name = '%s'" % self.doc.name) date_diff = sql("select TIMEDIFF('%s', '%s')" % ( mod_db[0][0],cstr(self.doc.modified))) if date_diff and date_diff[0][0]: msgprint("%s: %s has been modified after you have opened. Please Refresh" % (self.doc.doctype, self.doc.name), raise_exception=1) def stop_sales_order(self): self.check_modified_date() self.update_stock_ledger(update_stock = -1,is_stopped = 1) webnotes.conn.set(self.doc, 'status', 'Stopped') msgprint("""%s: %s has been Stopped. To make transactions against this Sales Order you need to Unstop it.""" % (self.doc.doctype, self.doc.name)) def unstop_sales_order(self): self.check_modified_date() self.update_stock_ledger(update_stock = 1,is_stopped = 1) webnotes.conn.set(self.doc, 'status', 'Submitted') msgprint("%s: %s has been Unstopped" % (self.doc.doctype, self.doc.name)) def update_stock_ledger(self, update_stock, is_stopped = 0): for d in self.get_item_list(is_stopped): if webnotes.conn.get_value("Item", d['item_code'], "is_stock_item") == "Yes": args = { "item_code": d['item_code'], "reserved_qty": flt(update_stock) * flt(d['reserved_qty']), "posting_date": self.doc.transaction_date, "voucher_type": self.doc.doctype, "voucher_no": self.doc.name, "is_amended": self.doc.amended_from and 'Yes' or 'No' } get_obj('Warehouse', d['reserved_warehouse']).update_bin(args) def get_item_list(self, is_stopped): return get_obj('Sales Common').get_item_list( self, is_stopped) def on_update(self): pass @webnotes.whitelist() def get_orders(): # find customer id customer = webnotes.conn.get_value("Contact", {"email_id": webnotes.session.user}, "customer") if customer: orders = webnotes.conn.sql("""select name, creation, currency from `tabSales Order` where customer=%s and docstatus=1 order by creation desc limit 20 """, customer, as_dict=1) for order in orders: order.items = webnotes.conn.sql("""select item_name, qty, export_rate, export_amount, delivered_qty, stock_uom from `tabSales Order Item` where parent=%s order by idx""", order.name, as_dict=1) return orders else: return [] def get_website_args(): customer = webnotes.conn.get_value("Contact", {"email_id": webnotes.session.user}, "customer") bean = webnotes.bean("Sales Order", webnotes.form_dict.name) if bean.doc.customer != customer: return { "doc": {"name": "Not Allowed"} } else: return { "doc": bean.doc, "doclist": bean.doclist, "webnotes": webnotes, "utils": webnotes.utils } def get_currency_and_number_format(): return { "global_number_format": webnotes.conn.get_default("number_format") or "#,###.##", "currency": webnotes.conn.get_default("currency"), "currency_symbols": json.dumps(dict(webnotes.conn.sql("""select name, symbol from tabCurrency where ifnull(enabled,0)=1"""))) }
agpl-3.0
infobloxopen/infoblox-netmri
infoblox_netmri/api/broker/v2_10_0/spm_end_hosts_not_present_grid_broker.py
14
6629
from ..broker import Broker class SpmEndHostsNotPresentGridBroker(Broker): controller = "spm_end_hosts_not_present_grids" def index(self, **kwargs): """Lists the available spm end hosts not present grids. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` today :param starttime: The data returned will represent the spm end hosts not present grids with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data. :type starttime: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` tomorrow :param endtime: The data returned will represent the spm end hosts not present grids with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data. :type endtime: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` id :param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, VirtualNetworkID, NeighborDeviceID, NeighborType, NeighborIPDotted, NeighborIPNumeric, Network, NeighborName, NeighborMAC, NeighborIfIndex, OrgUniqueId, NetBIOSName, FirstSeen, LastSeen, DeviceID, DeviceType, DeviceName, DeviceIPDotted, DeviceIPNumeric, InterfaceID, Interface, VirtualNetworkMemberName, ifIndex, ifDescr, ifAlias, ifMAC, ifAdminStatus, ifOperStatus, ifSpeed, ifAdminDuplex, ifDuplex, VlanIndex, VlanName, VlanID, VTPDomain, TenantDn, BridgeDomainDn, EPGDn, ApName, ApIpAddress, ApSsid, Packets, Errors, ErrorPercentage. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each SpmEndHostsNotPresentGrid. Valid values are id, VirtualNetworkID, NeighborDeviceID, NeighborType, NeighborIPDotted, NeighborIPNumeric, Network, NeighborName, NeighborMAC, NeighborIfIndex, OrgUniqueId, NetBIOSName, FirstSeen, LastSeen, DeviceID, DeviceType, DeviceName, DeviceIPDotted, DeviceIPNumeric, InterfaceID, Interface, VirtualNetworkMemberName, ifIndex, ifDescr, ifAlias, ifMAC, ifAdminStatus, ifOperStatus, ifSpeed, ifAdminDuplex, ifDuplex, VlanIndex, VlanName, VlanID, VTPDomain, TenantDn, BridgeDomainDn, EPGDn, ApName, ApIpAddress, ApSsid, Packets, Errors, ErrorPercentage. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param refresh_ind: If true, the grid will be regenerated, rather than using any available cached grid data. :type refresh_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param async_ind: If true and if grid data is not yet available, it will return immediately with 202 status. User should retry again later. :type async_ind: Boolean **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return spm_end_hosts_not_present_grids: An array of the SpmEndHostsNotPresentGrid objects that match the specified input criteria. :rtype spm_end_hosts_not_present_grids: Array of SpmEndHostsNotPresentGrid | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return summary: A summary of calculation of selected columns, when applicable. :rtype summary: Hash """ return self.api_list_request(self._get_method_fullname("index"), kwargs) def data_partitions(self, **kwargs): """Returns data partitions with their statuses for specified grid. 0 - data not available for that date, 1 - data available but must be prepared, 2 - data prepared and immediately available **Inputs** **Outputs** """ return self.api_request(self._get_method_fullname("data_partitions"), kwargs)
apache-2.0
jinnykoo/christmas
tests/unit/shipping/method_tests.py
10
2641
from decimal import Decimal as D from django.test import TestCase from nose.plugins.attrib import attr import mock from oscar.apps.shipping import methods from oscar.apps.basket.models import Basket @attr('shipping') class TestFreeShipppingForEmptyBasket(TestCase): def setUp(self): self.method = methods.Free() self.basket = Basket() self.charge = self.method.calculate(self.basket) def test_is_free(self): self.assertEqual(D('0.00'), self.charge.incl_tax) self.assertEqual(D('0.00'), self.charge.excl_tax) def test_has_tax_known(self): self.assertTrue(self.charge.is_tax_known) def test_has_same_currency_as_basket(self): self.assertEqual(self.basket.currency, self.charge.currency) @attr('shipping') class TestFreeShipppingForNonEmptyBasket(TestCase): def setUp(self): self.method = methods.Free() self.basket = mock.Mock() self.basket.num_items = 1 self.charge = self.method.calculate(self.basket) def test_is_free(self): self.assertEqual(D('0.00'), self.charge.incl_tax) self.assertEqual(D('0.00'), self.charge.excl_tax) @attr('shipping') class TestNoShippingRequired(TestCase): def setUp(self): self.method = methods.NoShippingRequired() basket = Basket() self.charge = self.method.calculate(basket) def test_is_free_for_empty_basket(self): self.assertEqual(D('0.00'), self.charge.incl_tax) self.assertEqual(D('0.00'), self.charge.excl_tax) def test_has_a_different_code_to_free(self): self.assertTrue(methods.NoShippingRequired.code != methods.Free.code) @attr('shipping') class TestFixedPriceShippingWithoutTax(TestCase): def setUp(self): self.method = methods.FixedPrice(D('10.00')) basket = Basket() self.charge = self.method.calculate(basket) def test_has_correct_charge(self): self.assertEqual(D('10.00'), self.charge.excl_tax) def test_does_not_include_tax(self): self.assertFalse(self.charge.is_tax_known) @attr('shipping') class TestFixedPriceShippingWithTax(TestCase): def setUp(self): self.method = methods.FixedPrice( charge_excl_tax=D('10.00'), charge_incl_tax=D('12.00')) basket = Basket() self.charge = self.method.calculate(basket) def test_has_correct_charge(self): self.assertEqual(D('10.00'), self.charge.excl_tax) self.assertEqual(D('12.00'), self.charge.incl_tax) def test_does_include_tax(self): self.assertTrue(self.charge.is_tax_known)
bsd-3-clause
kaiyuanl/gem5
ext/ply/test/yacc_prec1.py
164
1556
# ----------------------------------------------------------------------------- # yacc_prec1.py # # Tests case where precedence specifier doesn't match up to terminals # ----------------------------------------------------------------------------- import sys if ".." not in sys.path: sys.path.insert(0,"..") import ply.yacc as yacc from calclex import tokens # Parsing rules precedence = ( ('left','+','-'), ('left','*','/'), ('right','UMINUS'), ) # dictionary of names names = { } def p_statement_assign(t): 'statement : NAME EQUALS expression' names[t[1]] = t[3] def p_statement_expr(t): 'statement : expression' print(t[1]) def p_expression_binop(t): '''expression : expression PLUS expression | expression MINUS expression | expression TIMES expression | expression DIVIDE expression''' if t[2] == '+' : t[0] = t[1] + t[3] elif t[2] == '-': t[0] = t[1] - t[3] elif t[2] == '*': t[0] = t[1] * t[3] elif t[2] == '/': t[0] = t[1] / t[3] def p_expression_uminus(t): 'expression : MINUS expression %prec UMINUS' t[0] = -t[2] def p_expression_group(t): 'expression : LPAREN expression RPAREN' t[0] = t[2] def p_expression_number(t): 'expression : NUMBER' t[0] = t[1] def p_expression_name(t): 'expression : NAME' try: t[0] = names[t[1]] except LookupError: print("Undefined name '%s'" % t[1]) t[0] = 0 def p_error(t): print("Syntax error at '%s'" % t.value) yacc.yacc()
bsd-3-clause
xsynergy510x/android_external_chromium_org
mojo/tools/generate_java_callback_interfaces.py
32
1702
"""Generate the org.chromium.mojo.bindings.Callbacks interface""" import argparse import sys CALLBACK_TEMPLATE = (""" /** * A generic %d-argument callback. * * %s */ interface Callback%d<%s> { /** * Call the callback. */ public void call(%s); } """) INTERFACE_TEMPLATE = ( """// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This file was generated using // mojo/tools/generate_java_callback_interfaces.py package org.chromium.mojo.bindings; /** * Contains a generic interface for callbacks. */ public interface Callbacks { %s }""") def GenerateCallback(nb_args): params = '\n * '.join( ['@param <T%d> the type of argument %d.' % (i+1, i+1) for i in xrange(nb_args)]) template_parameters = ', '.join(['T%d' % (i+1) for i in xrange(nb_args)]) callback_parameters = ', '.join(['T%d arg%d' % ((i+1), (i+1)) for i in xrange(nb_args)]) return CALLBACK_TEMPLATE % (nb_args, params, nb_args, template_parameters, callback_parameters) def main(): parser = argparse.ArgumentParser( description="Generate org.chromium.mojo.bindings.Callbacks") parser.add_argument("max_args", nargs=1, type=int, help="maximal number of arguments to generate callbacks for") args = parser.parse_args() max_args = args.max_args[0] print INTERFACE_TEMPLATE % ''.join([GenerateCallback(i+1) for i in xrange(max_args)]) return 0 if __name__ == "__main__": sys.exit(main())
bsd-3-clause
jenalgit/django
django/apps/registry.py
111
15215
import sys import threading import warnings from collections import Counter, OrderedDict, defaultdict from functools import partial from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured from django.utils import lru_cache from .config import AppConfig class Apps(object): """ A registry that stores the configuration of installed applications. It also keeps track of models eg. to provide reverse-relations. """ def __init__(self, installed_apps=()): # installed_apps is set to None when creating the master registry # because it cannot be populated at that point. Other registries must # provide a list of installed apps and are populated immediately. if installed_apps is None and hasattr(sys.modules[__name__], 'apps'): raise RuntimeError("You must supply an installed_apps argument.") # Mapping of app labels => model names => model classes. Every time a # model is imported, ModelBase.__new__ calls apps.register_model which # creates an entry in all_models. All imported models are registered, # regardless of whether they're defined in an installed application # and whether the registry has been populated. Since it isn't possible # to reimport a module safely (it could reexecute initialization code) # all_models is never overridden or reset. self.all_models = defaultdict(OrderedDict) # Mapping of labels to AppConfig instances for installed apps. self.app_configs = OrderedDict() # Stack of app_configs. Used to store the current state in # set_available_apps and set_installed_apps. self.stored_app_configs = [] # Whether the registry is populated. self.apps_ready = self.models_ready = self.ready = False # Lock for thread-safe population. self._lock = threading.Lock() # Maps ("app_label", "modelname") tuples to lists of functions to be # called when the corresponding model is ready. Used by this class's # `lazy_model_operation()` and `do_pending_operations()` methods. self._pending_operations = defaultdict(list) # Populate apps and models, unless it's the master registry. if installed_apps is not None: self.populate(installed_apps) def populate(self, installed_apps=None): """ Loads application configurations and models. This method imports each application module and then each model module. It is thread safe and idempotent, but not reentrant. """ if self.ready: return # populate() might be called by two threads in parallel on servers # that create threads before initializing the WSGI callable. with self._lock: if self.ready: return # app_config should be pristine, otherwise the code below won't # guarantee that the order matches the order in INSTALLED_APPS. if self.app_configs: raise RuntimeError("populate() isn't reentrant") # Load app configs and app modules. for entry in installed_apps: if isinstance(entry, AppConfig): app_config = entry else: app_config = AppConfig.create(entry) if app_config.label in self.app_configs: raise ImproperlyConfigured( "Application labels aren't unique, " "duplicates: %s" % app_config.label) self.app_configs[app_config.label] = app_config # Check for duplicate app names. counts = Counter( app_config.name for app_config in self.app_configs.values()) duplicates = [ name for name, count in counts.most_common() if count > 1] if duplicates: raise ImproperlyConfigured( "Application names aren't unique, " "duplicates: %s" % ", ".join(duplicates)) self.apps_ready = True # Load models. for app_config in self.app_configs.values(): all_models = self.all_models[app_config.label] app_config.import_models(all_models) self.clear_cache() self.models_ready = True for app_config in self.get_app_configs(): app_config.ready() self.ready = True def check_apps_ready(self): """ Raises an exception if all apps haven't been imported yet. """ if not self.apps_ready: raise AppRegistryNotReady("Apps aren't loaded yet.") def check_models_ready(self): """ Raises an exception if all models haven't been imported yet. """ if not self.models_ready: raise AppRegistryNotReady("Models aren't loaded yet.") def get_app_configs(self): """ Imports applications and returns an iterable of app configs. """ self.check_apps_ready() return self.app_configs.values() def get_app_config(self, app_label): """ Imports applications and returns an app config for the given label. Raises LookupError if no application exists with this label. """ self.check_apps_ready() try: return self.app_configs[app_label] except KeyError: message = "No installed app with label '%s'." % app_label for app_config in self.get_app_configs(): if app_config.name == app_label: message += " Did you mean '%s'?" % app_config.label break raise LookupError(message) # This method is performance-critical at least for Django's test suite. @lru_cache.lru_cache(maxsize=None) def get_models(self, include_auto_created=False, include_deferred=False, include_swapped=False): """ Returns a list of all installed models. By default, the following models aren't included: - auto-created models for many-to-many relations without an explicit intermediate table, - models created to satisfy deferred attribute queries, - models that have been swapped out. Set the corresponding keyword argument to True to include such models. """ self.check_models_ready() result = [] for app_config in self.app_configs.values(): result.extend(list(app_config.get_models( include_auto_created, include_deferred, include_swapped))) return result def get_model(self, app_label, model_name=None): """ Returns the model matching the given app_label and model_name. As a shortcut, this function also accepts a single argument in the form <app_label>.<model_name>. model_name is case-insensitive. Raises LookupError if no application exists with this label, or no model exists with this name in the application. Raises ValueError if called with a single argument that doesn't contain exactly one dot. """ self.check_models_ready() if model_name is None: app_label, model_name = app_label.split('.') return self.get_app_config(app_label).get_model(model_name.lower()) def register_model(self, app_label, model): # Since this method is called when models are imported, it cannot # perform imports because of the risk of import loops. It mustn't # call get_app_config(). model_name = model._meta.model_name app_models = self.all_models[app_label] if model_name in app_models: if (model.__name__ == app_models[model_name].__name__ and model.__module__ == app_models[model_name].__module__): warnings.warn( "Model '%s.%s' was already registered. " "Reloading models is not advised as it can lead to inconsistencies, " "most notably with related models." % (app_label, model_name), RuntimeWarning, stacklevel=2) else: raise RuntimeError( "Conflicting '%s' models in application '%s': %s and %s." % (model_name, app_label, app_models[model_name], model)) app_models[model_name] = model self.do_pending_operations(model) self.clear_cache() def is_installed(self, app_name): """ Checks whether an application with this name exists in the registry. app_name is the full name of the app eg. 'django.contrib.admin'. """ self.check_apps_ready() return any(ac.name == app_name for ac in self.app_configs.values()) def get_containing_app_config(self, object_name): """ Look for an app config containing a given object. object_name is the dotted Python path to the object. Returns the app config for the inner application in case of nesting. Returns None if the object isn't in any registered app config. """ self.check_apps_ready() candidates = [] for app_config in self.app_configs.values(): if object_name.startswith(app_config.name): subpath = object_name[len(app_config.name):] if subpath == '' or subpath[0] == '.': candidates.append(app_config) if candidates: return sorted(candidates, key=lambda ac: -len(ac.name))[0] def get_registered_model(self, app_label, model_name): """ Similar to get_model(), but doesn't require that an app exists with the given app_label. It's safe to call this method at import time, even while the registry is being populated. """ model = self.all_models[app_label].get(model_name.lower()) if model is None: raise LookupError( "Model '%s.%s' not registered." % (app_label, model_name)) return model def set_available_apps(self, available): """ Restricts the set of installed apps used by get_app_config[s]. available must be an iterable of application names. set_available_apps() must be balanced with unset_available_apps(). Primarily used for performance optimization in TransactionTestCase. This method is safe is the sense that it doesn't trigger any imports. """ available = set(available) installed = set(app_config.name for app_config in self.get_app_configs()) if not available.issubset(installed): raise ValueError("Available apps isn't a subset of installed " "apps, extra apps: %s" % ", ".join(available - installed)) self.stored_app_configs.append(self.app_configs) self.app_configs = OrderedDict( (label, app_config) for label, app_config in self.app_configs.items() if app_config.name in available) self.clear_cache() def unset_available_apps(self): """ Cancels a previous call to set_available_apps(). """ self.app_configs = self.stored_app_configs.pop() self.clear_cache() def set_installed_apps(self, installed): """ Enables a different set of installed apps for get_app_config[s]. installed must be an iterable in the same format as INSTALLED_APPS. set_installed_apps() must be balanced with unset_installed_apps(), even if it exits with an exception. Primarily used as a receiver of the setting_changed signal in tests. This method may trigger new imports, which may add new models to the registry of all imported models. They will stay in the registry even after unset_installed_apps(). Since it isn't possible to replay imports safely (eg. that could lead to registering listeners twice), models are registered when they're imported and never removed. """ if not self.ready: raise AppRegistryNotReady("App registry isn't ready yet.") self.stored_app_configs.append(self.app_configs) self.app_configs = OrderedDict() self.apps_ready = self.models_ready = self.ready = False self.clear_cache() self.populate(installed) def unset_installed_apps(self): """ Cancels a previous call to set_installed_apps(). """ self.app_configs = self.stored_app_configs.pop() self.apps_ready = self.models_ready = self.ready = True self.clear_cache() def clear_cache(self): """ Clears all internal caches, for methods that alter the app registry. This is mostly used in tests. """ # Call expire cache on each model. This will purge # the relation tree and the fields cache. self.get_models.cache_clear() if self.ready: # Circumvent self.get_models() to prevent that the cache is refilled. # This particularly prevents that an empty value is cached while cloning. for app_config in self.app_configs.values(): for model in app_config.get_models(include_auto_created=True): model._meta._expire_cache() def lazy_model_operation(self, function, *model_keys): """ Take a function and a number of ("app_label", "modelname") tuples, and when all the corresponding models have been imported and registered, call the function with the model classes as its arguments. The function passed to this method must accept exactly n models as arguments, where n=len(model_keys). """ # If this function depends on more than one model, we recursively turn # it into a chain of functions that accept a single model argument and # pass each in turn to lazy_model_operation. model_key, more_models = model_keys[0], model_keys[1:] if more_models: supplied_fn = function def function(model): next_function = partial(supplied_fn, model) self.lazy_model_operation(next_function, *more_models) # If the model is already loaded, pass it to the function immediately. # Otherwise, delay execution until the class is prepared. try: model_class = self.get_registered_model(*model_key) except LookupError: self._pending_operations[model_key].append(function) else: function(model_class) def do_pending_operations(self, model): """ Take a newly-prepared model and pass it to each function waiting for it. This is called at the very end of `Apps.register_model()`. """ key = model._meta.app_label, model._meta.model_name for function in self._pending_operations.pop(key, []): function(model) apps = Apps(installed_apps=None)
bsd-3-clause
orgito/ansible
lib/ansible/modules/cloud/vmware/vmware_vm_vm_drs_rule.py
55
12538
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2018, Abhijeet Kasurde <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: vmware_vm_vm_drs_rule short_description: Configure VMware DRS Affinity rule for virtual machine in given cluster description: - This module can be used to configure VMware DRS Affinity rule for virtual machine in given cluster. version_added: 2.5 author: - Abhijeet Kasurde (@Akasurde) notes: - Tested on vSphere 6.5 requirements: - "python >= 2.6" - PyVmomi options: cluster_name: description: - Desired cluster name where virtual machines are present for the DRS rule. required: True vms: description: - List of virtual machines name for which DRS rule needs to be applied. - Required if C(state) is set to C(present). drs_rule_name: description: - The name of the DRS rule to manage. required: True enabled: description: - If set to C(True), the DRS rule will be enabled. - Effective only if C(state) is set to C(present). default: False type: bool mandatory: description: - If set to C(True), the DRS rule will be mandatory. - Effective only if C(state) is set to C(present). default: False type: bool affinity_rule: description: - If set to C(True), the DRS rule will be an Affinity rule. - If set to C(False), the DRS rule will be an Anti-Affinity rule. - Effective only if C(state) is set to C(present). default: True type: bool state: description: - If set to C(present), then the DRS rule is created if not present. - If set to C(present), then the DRS rule is deleted and created if present already. - If set to C(absent), then the DRS rule is deleted if present. required: False default: present choices: [ present, absent ] extends_documentation_fragment: vmware.documentation ''' EXAMPLES = r''' - name: Create DRS Affinity Rule for VM-VM vmware_vm_vm_drs_rule: hostname: "{{ esxi_server }}" username: "{{ esxi_username }}" password: "{{ esxi_password }}" cluster_name: "{{ cluster_name }}" validate_certs: no vms: - vm1 - vm2 drs_rule_name: vm1-vm2-affinity-rule-001 enabled: True mandatory: True affinity_rule: True delegate_to: localhost - name: Create DRS Anti-Affinity Rule for VM-VM vmware_vm_vm_drs_rule: hostname: "{{ esxi_server }}" username: "{{ esxi_username }}" password: "{{ esxi_password }}" cluster_name: "{{ cluster_name }}" validate_certs: no vms: - vm1 - vm2 drs_rule_name: vm1-vm2-affinity-rule-001 enabled: True mandatory: True affinity_rule: False delegate_to: localhost - name: Delete DRS Affinity Rule for VM-VM vmware_vm_vm_drs_rule: hostname: "{{ esxi_server }}" username: "{{ esxi_username }}" password: "{{ esxi_password }}" cluster_name: "{{ cluster_name }}" validate_certs: no drs_rule_name: vm1-vm2-affinity-rule-001 state: absent delegate_to: localhost ''' RETURN = r''' result: description: metadata about DRS VM and VM rule returned: when state is present type: dict sample: { "rule_enabled": false, "rule_key": 20, "rule_mandatory": true, "rule_name": "drs_rule_0014", "rule_uuid": "525f3bc0-253f-825a-418e-2ec93bffc9ae", "rule_vms": [ "VM_65", "VM_146" ] } ''' try: from pyVmomi import vim, vmodl except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native from ansible.module_utils.vmware import (PyVmomi, vmware_argument_spec, wait_for_task, find_vm_by_id, find_cluster_by_name) class VmwareDrs(PyVmomi): def __init__(self, module): super(VmwareDrs, self).__init__(module) self.vm_list = module.params['vms'] self.cluster_name = module.params['cluster_name'] self.rule_name = module.params['drs_rule_name'] self.enabled = module.params['enabled'] self.mandatory = module.params['mandatory'] self.affinity_rule = module.params['affinity_rule'] self.state = module.params['state'] # Sanity check for cluster self.cluster_obj = find_cluster_by_name(content=self.content, cluster_name=self.cluster_name) if self.cluster_obj is None: self.module.fail_json(msg="Failed to find the cluster %s" % self.cluster_name) # Sanity check for virtual machines self.vm_obj_list = [] if self.state == 'present': # Get list of VMs only if state is present self.vm_obj_list = self.get_all_vms_info() # Getter def get_all_vms_info(self, vms_list=None): """ Function to get all VM objects using name from given cluster Args: vms_list: List of VM names Returns: List of VM managed objects """ vm_obj_list = [] if vms_list is None: vms_list = self.vm_list for vm_name in vms_list: vm_obj = find_vm_by_id(content=self.content, vm_id=vm_name, vm_id_type='vm_name', cluster=self.cluster_obj) if vm_obj is None: self.module.fail_json(msg="Failed to find the virtual machine %s " "in given cluster %s" % (vm_name, self.cluster_name)) vm_obj_list.append(vm_obj) return vm_obj_list def get_rule_key_by_name(self, cluster_obj=None, rule_name=None): """ Function to get a specific DRS rule key by name Args: rule_name: Name of rule cluster_obj: Cluster managed object Returns: Rule Object if found or None """ if cluster_obj is None: cluster_obj = self.cluster_obj if rule_name: rules_list = [rule for rule in cluster_obj.configuration.rule if rule.name == rule_name] if rules_list: return rules_list[0] # No rule found return None @staticmethod def normalize_rule_spec(rule_obj=None): """ Function to return human readable rule spec Args: rule_obj: Rule managed object Returns: Dictionary with Rule info """ if rule_obj is None: return {} return dict(rule_key=rule_obj.key, rule_enabled=rule_obj.enabled, rule_name=rule_obj.name, rule_mandatory=rule_obj.mandatory, rule_uuid=rule_obj.ruleUuid, rule_vms=[vm.name for vm in rule_obj.vm], rule_affinity=True if isinstance(rule_obj, vim.cluster.AffinityRuleSpec) else False, ) # Create def create(self): """ Function to create a DRS rule if rule does not exist """ rule_obj = self.get_rule_key_by_name(rule_name=self.rule_name) if rule_obj is not None: # Rule already exists, remove and create again # Cluster does not allow editing existing rule existing_rule = self.normalize_rule_spec(rule_obj=rule_obj) if ((sorted(existing_rule['rule_vms']) == sorted(self.vm_list)) and (existing_rule['rule_enabled'] == self.enabled) and (existing_rule['rule_mandatory'] == self.mandatory) and (existing_rule['rule_affinity'] == self.affinity_rule)): # Rule is same as existing rule, evacuate self.module.exit_json(changed=False, result=existing_rule) # Delete existing rule as we cannot edit it changed, result = self.delete(rule_name=self.rule_name) if not changed: self.module.fail_json(msg="Failed to delete while updating rule %s due to %s" % (self.rule_name, result)) changed, result = self.create_rule_spec() return changed, result def create_rule_spec(self): """ Function to create DRS rule """ changed = False if self.affinity_rule: rule = vim.cluster.AffinityRuleSpec() else: rule = vim.cluster.AntiAffinityRuleSpec() rule.vm = self.vm_obj_list rule.enabled = self.enabled rule.mandatory = self.mandatory rule.name = self.rule_name rule_spec = vim.cluster.RuleSpec(info=rule, operation='add') config_spec = vim.cluster.ConfigSpecEx(rulesSpec=[rule_spec]) try: task = self.cluster_obj.ReconfigureEx(config_spec, modify=True) changed, result = wait_for_task(task) except vmodl.fault.InvalidRequest as e: result = to_native(e.msg) except Exception as e: result = to_native(e) if changed: rule_obj = self.get_rule_key_by_name(rule_name=self.rule_name) result = self.normalize_rule_spec(rule_obj) return changed, result # Delete def delete(self, rule_name=None): """ Function to delete DRS rule using name """ changed = False if rule_name is None: rule_name = self.rule_name rule = self.get_rule_key_by_name(rule_name=rule_name) if rule is not None: rule_key = int(rule.key) rule_spec = vim.cluster.RuleSpec(removeKey=rule_key, operation='remove') config_spec = vim.cluster.ConfigSpecEx(rulesSpec=[rule_spec]) try: task = self.cluster_obj.ReconfigureEx(config_spec, modify=True) changed, result = wait_for_task(task) except vmodl.fault.InvalidRequest as e: result = to_native(e.msg) except Exception as e: result = to_native(e) else: result = 'No rule named %s exists' % self.rule_name return changed, result def main(): argument_spec = vmware_argument_spec() argument_spec.update(dict( state=dict(type='str', default='present', choices=['absent', 'present']), vms=dict(type='list'), cluster_name=dict(type='str', required=True), drs_rule_name=dict(type='str', required=True), enabled=dict(type='bool', default=False), mandatory=dict(type='bool', default=False), affinity_rule=dict(type='bool', default=True), ) ) required_if = [ ['state', 'present', ['vms']], ] module = AnsibleModule(argument_spec=argument_spec, required_if=required_if, supports_check_mode=True) results = dict(failed=False, changed=False) state = module.params['state'] vm_drs = VmwareDrs(module) if state == 'present': # Add Rule if module.check_mode: results['changed'] = True module.exit_json(**results) changed, result = vm_drs.create() if changed: results['changed'] = changed else: results['failed'] = True results['msg'] = "Failed to create DRS rule %s" % vm_drs.rule_name results['result'] = result elif state == 'absent': # Delete Rule if module.check_mode: results['changed'] = True module.exit_json(**results) changed, result = vm_drs.delete() if changed: results['changed'] = changed results['msg'] = "DRS rule %s deleted successfully." % vm_drs.rule_name else: if "No rule named" in result: results['msg'] = result module.exit_json(**results) results['failed'] = True results['msg'] = "Failed to delete DRS rule %s" % vm_drs.rule_name results['result'] = result if results['changed']: module.exit_json(**results) if results['failed']: module.fail_json(**results) if __name__ == '__main__': main()
gpl-3.0
walterreade/scikit-learn
examples/linear_model/plot_lasso_coordinate_descent_path.py
42
2944
""" ===================== Lasso and Elastic Net ===================== Lasso and elastic net (L1 and L2 penalisation) implemented using a coordinate descent. The coefficients can be forced to be positive. """ print(__doc__) # Author: Alexandre Gramfort <[email protected]> # License: BSD 3 clause from itertools import cycle import numpy as np import matplotlib.pyplot as plt from sklearn.linear_model import lasso_path, enet_path from sklearn import datasets diabetes = datasets.load_diabetes() X = diabetes.data y = diabetes.target X /= X.std(axis=0) # Standardize data (easier to set the l1_ratio parameter) # Compute paths eps = 5e-3 # the smaller it is the longer is the path print("Computing regularization path using the lasso...") alphas_lasso, coefs_lasso, _ = lasso_path(X, y, eps, fit_intercept=False) print("Computing regularization path using the positive lasso...") alphas_positive_lasso, coefs_positive_lasso, _ = lasso_path( X, y, eps, positive=True, fit_intercept=False) print("Computing regularization path using the elastic net...") alphas_enet, coefs_enet, _ = enet_path( X, y, eps=eps, l1_ratio=0.8, fit_intercept=False) print("Computing regularization path using the positve elastic net...") alphas_positive_enet, coefs_positive_enet, _ = enet_path( X, y, eps=eps, l1_ratio=0.8, positive=True, fit_intercept=False) # Display results plt.figure(1) ax = plt.gca() colors = cycle(['b', 'r', 'g', 'c', 'k']) neg_log_alphas_lasso = -np.log10(alphas_lasso) neg_log_alphas_enet = -np.log10(alphas_enet) for coef_l, coef_e, c in zip(coefs_lasso, coefs_enet, colors): l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c) l2 = plt.plot(neg_log_alphas_enet, coef_e, linestyle='--', c=c) plt.xlabel('-Log(alpha)') plt.ylabel('coefficients') plt.title('Lasso and Elastic-Net Paths') plt.legend((l1[-1], l2[-1]), ('Lasso', 'Elastic-Net'), loc='lower left') plt.axis('tight') plt.figure(2) ax = plt.gca() neg_log_alphas_positive_lasso = -np.log10(alphas_positive_lasso) for coef_l, coef_pl, c in zip(coefs_lasso, coefs_positive_lasso, colors): l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c) l2 = plt.plot(neg_log_alphas_positive_lasso, coef_pl, linestyle='--', c=c) plt.xlabel('-Log(alpha)') plt.ylabel('coefficients') plt.title('Lasso and positive Lasso') plt.legend((l1[-1], l2[-1]), ('Lasso', 'positive Lasso'), loc='lower left') plt.axis('tight') plt.figure(3) ax = plt.gca() neg_log_alphas_positive_enet = -np.log10(alphas_positive_enet) for (coef_e, coef_pe, c) in zip(coefs_enet, coefs_positive_enet, colors): l1 = plt.plot(neg_log_alphas_enet, coef_e, c=c) l2 = plt.plot(neg_log_alphas_positive_enet, coef_pe, linestyle='--', c=c) plt.xlabel('-Log(alpha)') plt.ylabel('coefficients') plt.title('Elastic-Net and positive Elastic-Net') plt.legend((l1[-1], l2[-1]), ('Elastic-Net', 'positive Elastic-Net'), loc='lower left') plt.axis('tight') plt.show()
bsd-3-clause
jeremiah-c-leary/vhdl-style-guide
vsg/token/configuration_declaration.py
1
1744
from vsg import parser class configuration_keyword(parser.keyword): ''' unique_id = configuration_body : configuration_keyword ''' def __init__(self, sString): parser.keyword.__init__(self, sString) class identifier(parser.identifier): ''' unique_id = configuration_body : identifier ''' def __init__(self, sString): parser.identifier.__init__(self, sString) class of_keyword(parser.keyword): ''' unique_id = configuration_body : of_keyword ''' def __init__(self, sString): parser.keyword.__init__(self, sString) class entity_name(parser.name): ''' unique_id = configuration_body : entity_name ''' def __init__(self, sString): parser.name.__init__(self, sString) class is_keyword(parser.keyword): ''' unique_id = configuration_body : is_keyword ''' def __init__(self, sString): parser.keyword.__init__(self, sString) class end_keyword(parser.keyword): ''' unique_id = configuration_body : end_keyword ''' def __init__(self, sString): parser.keyword.__init__(self, sString) class end_configuration_keyword(parser.keyword): ''' unique_id = configuration_body : end_configuration_keyword ''' def __init__(self, sString): parser.keyword.__init__(self, sString) class configuration_simple_name(parser.simple_name): ''' unique_id = configuration_body : configuration_simple_name ''' def __init__(self, sString): parser.simple_name.__init__(self, sString) class semicolon(parser.semicolon): ''' unique_id = configuration_body : semicolon ''' def __init__(self, sString=';'): parser.semicolon.__init__(self)
gpl-3.0
reverie/jotleaf.com
jotleaf/marketing/views.py
1
2641
import json import requests from django.conf import settings from django.contrib import messages from common.views import json as json_view from marketing.models import YwotTransfer from django.contrib.auth import get_user_model User = get_user_model() YWOT_CHECK_URL = "http://{}/connect/check/".format(settings.YWOT_HOST) @json_view def ywot_transfer_check(request): """ Returns { transfer_status: true/false/null } where true means it's already been accepted, null means the user hasn't decided yet false means the user already rejected the idea. """ username = request.POST['username'] sig = request.POST['sig'] if User.objects.filter(username__iexact=username).exists(): # User with this username has already been created, # nothing we can do anyway return try: yt = YwotTransfer.objects.get(ywot_username__iexact=username) except YwotTransfer.DoesNotExist: r = requests.post(YWOT_CHECK_URL, data={ 'username': username, 'sig': sig }) assert r.status_code == 200 result = json.loads(r.content) yt = YwotTransfer.objects.create( ywot_username = result['username'], ywot_password = result['password'], ywot_email = result['email'], valid_signature = sig ) return {'transfer_status': yt.transfer_status} @json_view def ywot_transfer_response(request): from main.views import _do_login_as username = request.POST['username'] sig = request.POST['sig'] response = request.POST['response'] yt = YwotTransfer.objects.get(ywot_username__iexact=username, valid_signature=sig) if response == 'no': yt.transfer_status = False yt.save() return {} assert response == 'yes' yt.transfer_status = True if yt.local_acct or User.objects.filter(username__iexact=username).exists(): # Account already exists. Just pretend we did something, but don't # log them in. Otherwise someone could make a corresponding YWOT account # to hijack a Jotleaf account. yt.save() messages.success(request, "Success! You can now log in as '%s'." % username) return { 'success': True } u = User.objects.create( username = username, password = yt.ywot_password, email = yt.ywot_email ) yt.local_acct = u yt.save() _do_login_as(request, username) messages.success(request, "Success! You're now logged in as '%s'." % username) return { 'success': True }
mit
PocketEngi/nyyhkyytteek
nyyhkyytteek/nyyhkyytteek/nyy.py
1
2517
#! /usr/bin/env python3 import tkinter as tk from tkinter import messagebox import requests import bs4 def main(): def scrolldown(*args): """Emulate scrolling by deleting the first line of content.""" content.set('\n'.join(content.get().split('\n')[1:])) def back(*args): """Retrieve the previously accessed URL and load it.""" try: future.append(history.pop()) url.set(history.pop()) go() except IndexError: pass def forward(*args): """Retrieve the page we went "back" from and load it.""" try: url.set(future.pop()) go() except IndexError: pass def go(*args): """Load the website written in the address bar.""" if url.get() == 'disappear': tk.messagebox.showinfo("POOF!", "Salivili hipput tupput täppyt äppyt tipput hilijalleen!") root.quit() return # "Render" the requested page page = requests.get(url.get()) soup = bs4.BeautifulSoup(page.text) [e.append('<{}>'.format(e['href'])) for e in soup.select('a[href]')] content.set(soup.body.get_text()) # save in history history.append(url.get()) # update window title root.title('nyyhkyytteek. ' + soup.title.string) root = tk.Tk() root.title('nyyhkyytteek') history = [] future = [] mainf = tk.Frame(root) mainf.grid(column=0, row=0) mainf.columnconfigure(0, weight=1) mainf.rowconfigure(0, weight=1) tk.Button(mainf, text='←', command=back).grid(column=0,row=1) tk.Button(mainf, text='→', command=forward).grid(column=1,row=1) url = tk.StringVar() url_entry = tk.Entry(mainf, width=100, textvariable=url) url_entry.grid(column=2, row=1) tk.Button(mainf, text='go!', command=go).grid(column=3, row=1) tk.Button(mainf, text='↓', command=scrolldown).grid(column=4,row=1) content = tk.StringVar(value='Enter a URL') content_page = tk.Message(mainf, width=800, justify='left', font=(None,12), textvariable=content) content_page.grid(column=2, row=2) url_entry.focus() root.bind('<Return>', go) root.bind('<Down>', scrolldown) root.mainloop() if __name__ == '__main__': main()
mit
statgen/pheweb
pheweb/file_utils.py
1
19673
from .utils import PheWebError, get_phenolist, chrom_order from . import conf from . import parse_utils import io import os import csv from contextlib import contextmanager import json import gzip import datetime from boltons.fileutils import AtomicSaver, mkdir_p import pysam import itertools, random from pathlib import Path from typing import List, Callable, Dict, Union, Iterator, Optional, Any def get_generated_path(*path_parts:str) -> str: path = os.path.join(conf.get_data_dir(), 'generated-by-pheweb', *path_parts) make_basedir(path) return path dbsnp_version = '154' genes_version = '37' def get_filepath(kind:str, *, must_exist:bool = True) -> str: if kind not in _single_filepaths: raise Exception("Unknown kind of filepath: {}".format(repr(kind))) filepath: str = _single_filepaths[kind]() if must_exist and not os.path.exists(filepath): raise PheWebError("Filepath {} of kind {} was requested but doesn't exist".format(filepath, kind)) return filepath _single_filepaths: Dict[str,Callable[[],str]] = { # in data_dir: 'correlations-raw': (lambda: os.path.join(conf.get_data_dir(), 'pheno-correlations.txt')), 'phenolist': (lambda: os.path.join(conf.get_data_dir(), 'pheno-list.json')), # depend on hg_build_number, dbsnp_version, genes_version: 'rsids': (lambda: get_generated_path('resources/rsids-v{}-hg{}.tsv.gz'.format(dbsnp_version, conf.get_hg_build_number()))), 'rsids-hg19': (lambda: get_generated_path('resources/rsids-v{}-hg19.tsv.gz'.format(dbsnp_version))), 'rsids-hg38': (lambda: get_generated_path('resources/rsids-v{}-hg38.tsv.gz'.format(dbsnp_version))), 'genes': (lambda: get_generated_path('resources/genes-v{}-hg{}.bed'.format(genes_version, conf.get_hg_build_number()))), 'genes-hg19': (lambda: get_generated_path('resources/genes-v{}-hg19.bed'.format(genes_version))), 'genes-hg38': (lambda: get_generated_path('resources/genes-v{}-hg38.bed'.format(genes_version))), 'gene-aliases-sqlite3': (lambda: get_generated_path('resources/gene_aliases-v{}.sqlite3'.format(genes_version))), # simple: 'unanno': (lambda: get_generated_path('sites/sites-unannotated.tsv')), 'sites-rsids': (lambda: get_generated_path('sites/sites-rsids.tsv')), 'sites': (lambda: get_generated_path('sites/sites.tsv')), 'best-phenos-by-gene-sqlite3': (lambda: get_generated_path('best-phenos-by-gene.sqlite3')), 'best-phenos-by-gene-old-json': (lambda: get_generated_path('best-phenos-by-gene.json')), 'correlations': (lambda: get_generated_path('pheno-correlations.txt')), 'cpras-rsids-sqlite3': (lambda: get_generated_path('sites/cpras-rsids.sqlite3')), 'matrix': (lambda: get_generated_path('matrix.tsv.gz')), 'top-hits': (lambda: get_generated_path('top_hits.json')), 'top-hits-1k': (lambda: get_generated_path('top_hits_1k.json')), 'top-hits-tsv': (lambda: get_generated_path('top_hits.tsv')), 'top-loci': (lambda: get_generated_path('top_loci.json')), 'top-loci-tsv': (lambda: get_generated_path('top_loci.tsv')), 'phenotypes_summary': (lambda: get_generated_path('phenotypes.json')), 'phenotypes_summary_tsv': (lambda: get_generated_path('phenotypes.tsv')), # directories for pheno filepaths: 'parsed': (lambda: get_generated_path('parsed')), 'pheno_gz': (lambda: get_generated_path('pheno_gz')), 'best_of_pheno': (lambda: get_generated_path('best_of_pheno')), 'manhattan': (lambda: get_generated_path('manhattan')), 'qq': (lambda: get_generated_path('qq')), } def get_pheno_filepath(kind:str, phenocode:str, *, must_exist:bool = True) -> str: if kind not in _pheno_filepaths: raise Exception("Unknown kind of filepath: {}".format(repr(kind))) filepath:str = _pheno_filepaths[kind](phenocode) if must_exist and not os.path.exists(filepath): raise PheWebError("Pheno filepath {} of kind {} for phenocode {} was requested but doesn't exist".format(filepath, kind, phenocode)) return filepath _pheno_filepaths: Dict[str,Callable[[str],str]] = { 'parsed': (lambda phenocode: get_generated_path('parsed', phenocode)), 'pheno_gz': (lambda phenocode: get_generated_path('pheno_gz', '{}.gz'.format(phenocode))), 'pheno_gz_tbi': (lambda phenocode: get_generated_path('pheno_gz', '{}.gz.tbi'.format(phenocode))), 'best_of_pheno': (lambda phenocode: get_generated_path('best_of_pheno', phenocode)), 'manhattan': (lambda phenocode: get_generated_path('manhattan', '{}.json'.format(phenocode))), 'qq': (lambda phenocode: get_generated_path('qq', '{}.json'.format(phenocode))), } def make_basedir(path:Union[str,Path]) -> None: mkdir_p(os.path.dirname(path)) def get_tmp_path(arg:Union[Path,str]) -> str: if isinstance(arg, Path): arg = str(arg) if arg.startswith(get_generated_path()): mkdir_p(get_generated_path('tmp')) tmp_basename = arg[len(get_generated_path()):].lstrip(os.path.sep).replace(os.path.sep, '-') ret = get_generated_path('tmp', tmp_basename) elif arg.startswith(os.path.sep): ret = arg + '.tmp' else: mkdir_p(get_generated_path('tmp')) ret = get_generated_path('tmp', arg) assert ret != arg, (ret, arg) while os.path.exists(ret): ret = '{}/{}-{}'.format(os.path.dirname(ret), random.choice('123456789'), os.path.basename(ret)) return ret def get_dated_tmp_path(prefix:str) -> str: assert '/' not in prefix, prefix time_str = datetime.datetime.isoformat(datetime.datetime.now()).replace(':', '-') return get_tmp_path(prefix + '-' + time_str) csv.register_dialect( 'pheweb-internal-dialect', delimiter='\t', doublequote=False, escapechar='\\', lineterminator='\n', quotechar='"', skipinitialspace=False, strict=True, ) ## Readers @contextmanager def VariantFileReader(filepath:Union[str,Path], only_per_variant_fields:bool = False): ''' Reads variants (as dictionaries) from an internal file. Iterable. Exposes `.fields`. with VariantFileReader('a.tsv') as reader: print(reader.fields) for variant in reader: print(variant) ''' with read_maybe_gzip(filepath) as f: reader:Iterator[List[str]] = csv.reader(f, dialect='pheweb-internal-dialect') try: fields = next(reader) except StopIteration: raise PheWebError("It looks like the file {} is empty".format(filepath)) if fields[0].startswith('#'): # This won't happen in normal use but it's convenient for temporary internal re-routing fields[0] = fields[0][1:] for field in fields: assert field in parse_utils.per_variant_fields or field in parse_utils.per_assoc_fields, field if only_per_variant_fields: yield _vfr_only_per_variant_fields(fields, reader) else: yield _vfr(fields, reader) class _vfr: def __init__(self, fields:List[str], reader:Iterator[List[str]]): self.fields = fields self._reader = reader def __iter__(self) -> Iterator[Dict[str,Any]]: return self._get_variants() def _get_variants(self) -> Iterator[Dict[str,Any]]: parsers: List[Callable[[str],Any]] = [parse_utils.reader_for_field[field] for field in self.fields] for unparsed_variant in self._reader: assert len(unparsed_variant) == len(self.fields), (unparsed_variant, self.fields) variant = {field: parser(value) for parser,field,value in zip(parsers, self.fields, unparsed_variant)} yield variant class _vfr_only_per_variant_fields: def __init__(self, fields:List[str], reader:Iterator[List[str]]): self._all_fields = fields self._extractors = [(parse_utils.reader_for_field[field], field, colidx) for colidx,field in enumerate(fields) if field in parse_utils.per_variant_fields] self.fields = [e[1] for e in self._extractors] self._reader = reader def __iter__(self) -> Iterator[Dict[str,Any]]: return self._get_variants() def _get_variants(self) -> Iterator[Dict[str,Any]]: for unparsed_variant in self._reader: assert len(unparsed_variant) == len(self._all_fields), (unparsed_variant, self._all_fields) variant = {field: parser(unparsed_variant[colidx]) for parser,field,colidx in self._extractors} yield variant @contextmanager def IndexedVariantFileReader(phenocode:str): filepath = get_pheno_filepath('pheno_gz', phenocode) with read_gzip(filepath) as f: reader:Iterator[List[str]] = csv.reader(f, dialect='pheweb-internal-dialect') fields = next(reader) if fields[0].startswith('#'): # previous version of PheWeb commented the header line fields[0] = fields[0][1:] for field in fields: assert field in parse_utils.per_variant_fields or field in parse_utils.per_assoc_fields, field colidxs = {field: idx for idx, field in enumerate(fields)} with pysam.TabixFile(filepath, parser=None) as tabix_file: yield _ivfr(tabix_file, colidxs) class _ivfr: def __init__(self, _tabix_file:pysam.TabixFile, _colidxs:Dict[str,int]): self._tabix_file=_tabix_file self._colidxs=_colidxs def _parse_variant_row(self, variant_row:List[str]) -> Dict[str,Any]: variant = {} for field in self._colidxs: val = variant_row[self._colidxs[field]] parser = parse_utils.reader_for_field[field] try: variant[field] = parser(val) except Exception as exc: raise PheWebError('ERROR: Failed to parse the value {!r} for field {!r} in file {!r}'.format(val, field, self._tabix_file.filename)) from exc return variant def get_region(self, chrom:str, start:int, end:int) -> Iterator[Dict[str,Any]]: ''' includes `start`, does not include `end` return is like [{ 'chrom': 'X', 'pos': 43254, ..., }, ...] ''' if start < 1: start = 1 if start >= end: return [] if chrom not in self._tabix_file.contigs: return [] # I do not understand why I need to use `pos-1`. # The pysam docs talk about being zero-based or one-based. Is this what they're referring to? # Doesn't make much sense to me. There must be a reason that I don't understand. try: tabix_iter = self._tabix_file.fetch(chrom, start-1, end-1, parser=None) except Exception as exc: raise PheWebError('ERROR when fetching {}-{}-{} from {}'.format(chrom, start-1, end-1, self._tabix_file.filename)) from exc reader:Iterator[List[str]] = csv.reader(tabix_iter, dialect='pheweb-internal-dialect') for variant_row in reader: yield self._parse_variant_row(variant_row) def get_variant(self, chrom:str, pos:int, ref:str, alt:int) -> Optional[Dict[str,Any]]: x = self.get_region(chrom, pos, pos+1) for variant in x: if variant['pos'] != pos: # print('WARNING: while looking for variant {}-{}-{}-{}, saw {!r}'.format(chrom, pos, ref, alt, variant)) continue if variant['ref'] == ref and variant['alt'] == alt and variant: return variant return None class MatrixReader: def __init__(self): self._filepath = get_generated_path('matrix.tsv.gz') phenos:List[Dict[str,Any]] = get_phenolist() phenocodes:List[str] = [pheno['phenocode'] for pheno in phenos] self._info_for_pheno = { pheno['phenocode']: {k: v for k,v in pheno.items() if k != 'assoc_files'} for pheno in phenos } with read_gzip(self._filepath) as f: reader = csv.reader(f, dialect='pheweb-internal-dialect') colnames = next(reader) assert colnames[0].startswith('#'), colnames colnames[0] = colnames[0][1:] self._colidxs:Dict[str,int] = {} # maps field -> column_index self._colidxs_for_pheno:Dict[str,Dict[str,int]] = {} # maps phenocode -> field -> column_index for colnum, colname in enumerate(colnames): if '@' in colname: x = colname.split('@') assert len(x) == 2, x field, phenocode = x assert field in parse_utils.fields, field assert phenocode in phenocodes, phenocode self._colidxs_for_pheno.setdefault(phenocode, {})[field] = colnum else: field = colname assert field in parse_utils.fields, (field) self._colidxs[field] = colnum def get_phenocodes(self) -> List[str]: return list(self._colidxs_for_pheno) @contextmanager def context(self): with pysam.TabixFile(self._filepath, parser=None) as tabix_file: yield _mr(tabix_file, self._colidxs, self._colidxs_for_pheno, self._info_for_pheno) class _mr(_ivfr): def __init__(self, _tabix_file:pysam.TabixFile, _colidxs:Dict[str,int], _colidxs_for_pheno:Dict[str,Dict[str,int]], _info_for_pheno:Dict[str,Dict[str,Any]]): self._tabix_file=_tabix_file self._colidxs=_colidxs self._colidxs_for_pheno=_colidxs_for_pheno self._info_for_pheno=_info_for_pheno def _parse_field(self, variant_row:List[str], field:str, phenocode:Optional[str] = None) -> Any: colidx = self._colidxs[field] if phenocode is None else self._colidxs_for_pheno[phenocode][field] val = variant_row[colidx] parser = parse_utils.reader_for_field[field] try: return parser(val) # type: ignore except Exception as exc: error_message = 'ERROR: Failed to parse the value {!r} for field {!r}'.format(val, field) if phenocode is not None: error_message += ' and phenocode {!r}'.format(phenocode) raise PheWebError(error_message) from exc def _parse_variant_row(self, variant_row:List[str]) -> Dict[str,Any]: variant:Dict[str,Any] = {'phenos': {}} for field in self._colidxs: variant[field] = self._parse_field(variant_row, field) for phenocode, fields in self._colidxs_for_pheno.items(): if any(variant_row[self._colidxs_for_pheno[phenocode][field]] != '' for field in fields): p = {} for field in fields: p[field] = self._parse_field(variant_row, field, phenocode) p.update(self._info_for_pheno[phenocode]) variant['phenos'][phenocode] = p return variant def with_chrom_idx(variants:Iterator[Dict[str,Any]]) -> Iterator[Dict[str,Any]]: for v in variants: v['chrom_idx'] = chrom_order[v['chrom']] yield v @contextmanager def read_gzip(filepath): # mypy doesn't like it # hopefully faster than `gzip.open(filepath, 'rt')` -- TODO: find out whether it is with gzip.GzipFile(filepath, 'rb') as f: # leave in binary mode (default), let TextIOWrapper decode with io.BufferedReader(f, buffer_size=2**18) as g: # 256KB buffer with io.TextIOWrapper(g) as h: # bytes -> unicode yield h @contextmanager def read_maybe_gzip(filepath:Union[str,Path]): if isinstance(filepath, Path): filepath = str(filepath) is_gzip = False with open(filepath, 'rb', buffering=0) as raw_f: # no need for buffers if raw_f.read(3) == b'\x1f\x8b\x08': is_gzip = True if is_gzip: with read_gzip(filepath) as f: yield f else: with open(filepath, 'rt', buffering=2**18) as f: # 256KB buffer yield f ## Writers @contextmanager def VariantFileWriter(filepath:str, allow_extra_fields:bool = False, use_gzip:bool = True): ''' Writes variants (represented by dictionaries) to an internal file. with VariantFileWriter('a.tsv') as writer: writer.write({'chrom': '2', 'pos': 47, ...}) Each variant/association/hit/loci written must have a subset of the keys of the first one. ''' part_file = get_tmp_path(filepath) make_basedir(filepath) if use_gzip: with AtomicSaver(filepath, text_mode=False, part_file=part_file, overwrite_part=True, rm_part_on_exc=False) as f: with gzip.open(f, 'wt', compresslevel=2) as f_gzip: yield _vfw(f_gzip, allow_extra_fields, filepath) else: with AtomicSaver(filepath, text_mode=True, part_file=part_file, overwrite_part=True, rm_part_on_exc=False) as f: yield _vfw(f, allow_extra_fields, filepath) class _vfw: def __init__(self, f, allow_extra_fields:bool, filepath:str): self._f = f self._allow_extra_fields = allow_extra_fields self._filepath = filepath def write(self, variant:Dict[str,Any]) -> None: if not hasattr(self, '_writer'): fields:List[str] = [] for field in parse_utils.fields: if field in variant: fields.append(field) extra_fields = list(set(variant.keys()) - set(fields)) if extra_fields: if not self._allow_extra_fields: raise PheWebError("ERROR: found unexpected fields {!r} among the expected fields {!r} while writing {!r}.".format( extra_fields, fields, self._filepath)) fields += extra_fields self._writer = csv.DictWriter(self._f, fieldnames=fields, dialect='pheweb-internal-dialect') self._writer.writeheader() self._writer.writerow(variant) def write_all(self, variants:Iterator[Dict[str,Any]]) -> None: for v in variants: self.write(v) def write_heterogenous_variantfile(filepath:str, assocs:List[Dict[str,Any]], use_gzip:bool = True) -> None: '''inject all necessary keys into the first association so that the writer will be made correctly''' if len(assocs) == 0: raise PheWebError("ERROR: tried to write file {!r} but didn't supply any variants") assocs[0] = {field:assocs[0].get(field,'') for field in set(itertools.chain.from_iterable(assocs))} with VariantFileWriter(filepath, allow_extra_fields=True, use_gzip=use_gzip) as vfw: vfw.write_all(assocs) def convert_VariantFile_to_IndexedVariantFile(vf_path:str, ivf_path:str) -> None: make_basedir(ivf_path) tmp_path = get_tmp_path(ivf_path) tmp_path = '{}/cvt-{}'.format(os.path.dirname(tmp_path), os.path.basename(tmp_path)) # Avoid using the same tmp path as augment-phenos pysam.tabix_compress(vf_path, tmp_path, force=True) os.rename(tmp_path, ivf_path) pysam.tabix_index( filename=ivf_path, force=True, seq_col=0, start_col=1, end_col=1, # note: `pysam.tabix_index` calls the first column `0`, but cmdline `tabix` calls it `1`. line_skip=1, # skip header ) def write_json(*, filepath:Optional[str] = None, data=None, indent:Optional[int] = None, sort_keys:bool = False) -> None: # Don't allow positional args, because I can never remember the order anyways assert filepath is not None and data is not None, filepath part_file = get_tmp_path(filepath) make_basedir(filepath) with AtomicSaver(filepath, text_mode=True, part_file=part_file, overwrite_part=True, rm_part_on_exc=False) as f: json.dump(data, f, indent=indent, sort_keys=sort_keys, default=_json_writer_default) def _json_writer_default(obj:Any) -> Any: import numpy as np if isinstance(obj, np.float32): return float(obj) raise TypeError('Object {!r} of type {} is not JSON serializable!'.format(obj, obj.__class__.__name__))
agpl-3.0
karban/field
resources/python/logilab/common/decorators.py
2
6416
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:[email protected] # # This file is part of logilab-common. # # logilab-common is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 2.1 of the License, or (at your option) any # later version. # # logilab-common is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License along # with logilab-common. If not, see <http://www.gnu.org/licenses/>. """ A few useful function/method decorators. """ __docformat__ = "restructuredtext en" import types from time import clock, time import sys, re # XXX rewrite so we can use the decorator syntax when keyarg has to be specified def _is_generator_function(callableobj): return callableobj.func_code.co_flags & 0x20 def cached(callableobj, keyarg=None): """Simple decorator to cache result of method call.""" assert not _is_generator_function(callableobj), 'cannot cache generator function: %s' % callableobj if callableobj.func_code.co_argcount == 1 or keyarg == 0: def cache_wrapper1(self, *args): cache = '_%s_cache_' % callableobj.__name__ #print 'cache1?', cache try: return self.__dict__[cache] except KeyError: #print 'miss' value = callableobj(self, *args) setattr(self, cache, value) return value try: cache_wrapper1.__doc__ = callableobj.__doc__ cache_wrapper1.func_name = callableobj.func_name except: pass return cache_wrapper1 elif keyarg: def cache_wrapper2(self, *args, **kwargs): cache = '_%s_cache_' % callableobj.__name__ key = args[keyarg-1] #print 'cache2?', cache, self, key try: _cache = self.__dict__[cache] except KeyError: #print 'init' _cache = {} setattr(self, cache, _cache) try: return _cache[key] except KeyError: #print 'miss', self, cache, key _cache[key] = callableobj(self, *args, **kwargs) return _cache[key] try: cache_wrapper2.__doc__ = callableobj.__doc__ cache_wrapper2.func_name = callableobj.func_name except: pass return cache_wrapper2 def cache_wrapper3(self, *args): cache = '_%s_cache_' % callableobj.__name__ #print 'cache3?', cache, self, args try: _cache = self.__dict__[cache] except KeyError: #print 'init' _cache = {} setattr(self, cache, _cache) try: return _cache[args] except KeyError: #print 'miss' _cache[args] = callableobj(self, *args) return _cache[args] try: cache_wrapper3.__doc__ = callableobj.__doc__ cache_wrapper3.func_name = callableobj.func_name except: pass return cache_wrapper3 def clear_cache(obj, funcname): """Function to clear a cache handled by the cached decorator.""" try: del obj.__dict__['_%s_cache_' % funcname] except KeyError: pass def copy_cache(obj, funcname, cacheobj): """Copy cache for <funcname> from cacheobj to obj.""" cache = '_%s_cache_' % funcname try: setattr(obj, cache, cacheobj.__dict__[cache]) except KeyError: pass class wproperty(object): """Simple descriptor expecting to take a modifier function as first argument and looking for a _<function name> to retrieve the attribute. """ def __init__(self, setfunc): self.setfunc = setfunc self.attrname = '_%s' % setfunc.__name__ def __set__(self, obj, value): self.setfunc(obj, value) def __get__(self, obj, cls): assert obj is not None return getattr(obj, self.attrname) class classproperty(object): """this is a simple property-like class but for class attributes. """ def __init__(self, get): self.get = get def __get__(self, inst, cls): return self.get(cls) class iclassmethod(object): '''Descriptor for method which should be available as class method if called on the class or instance method if called on an instance. ''' def __init__(self, func): self.func = func def __get__(self, instance, objtype): if instance is None: return types.MethodType(self.func, objtype, objtype.__class__) return types.MethodType(self.func, instance, objtype) def __set__(self, instance, value): raise AttributeError("can't set attribute") def timed(f): def wrap(*args, **kwargs): t = time() c = clock() res = f(*args, **kwargs) print '%s clock: %.9f / time: %.9f' % (f.__name__, clock() - c, time() - t) return res return wrap def locked(acquire, release): """Decorator taking two methods to acquire/release a lock as argument, returning a decorator function which will call the inner method after having called acquire(self) et will call release(self) afterwards. """ def decorator(f): def wrapper(self, *args, **kwargs): acquire(self) try: return f(self, *args, **kwargs) finally: release(self) return wrapper return decorator def monkeypatch(klass, methodname=None): """Decorator extending class with the decorated function >>> class A: ... pass >>> @monkeypatch(A) ... def meth(self): ... return 12 ... >>> a = A() >>> a.meth() 12 >>> @monkeypatch(A, 'foo') ... def meth(self): ... return 12 ... >>> a.foo() 12 """ def decorator(func): setattr(klass, methodname or func.__name__, func) return func return decorator
gpl-2.0
rhndg/openedx
common/lib/xmodule/xmodule/services.py
163
2772
""" Module contains various XModule/XBlock services """ from django.conf import settings class SettingsService(object): """ Allows server-wide configuration of XBlocks on a per-type basis XBlock settings are read from XBLOCK_SETTINGS settings key. Each XBlock is allowed access to single settings bucket. Bucket is determined by this service using the following rules: * Value of SettingsService.xblock_settings_bucket_selector is examined. If XBlock have attribute/property with the name of that value this attribute/property is read to get the bucket key (e.g. if XBlock have `block_settings_key = 'my_block_settings'`, bucket key would be 'my_block_settings'). * Otherwise, XBlock class name is used Service is content-agnostic: it just returns whatever happen to be in the settings bucket (technically, it returns the bucket itself). If `default` argument is specified it is returned if: * There are no XBLOCK_SETTINGS setting * XBLOCK_SETTINGS is empty * XBLOCK_SETTINGS does not contain settings bucket If `default` is not specified or None, empty dictionary is used for default. Example: "XBLOCK_SETTINGS": { "my_block": { "setting1": 1, "setting2": [] }, "my_other_block": [1, 2, 3], "MyThirdBlock": "QWERTY" } class MyBlock: block_settings_key='my_block' class MyOtherBlock: block_settings_key='my_other_block' class MyThirdBlock: pass class MissingBlock: pass service = SettingsService() service.get_settings_bucket(MyBlock()) # { "setting1": 1, "setting2": [] } service.get_settings_bucket(MyOtherBlock()) # [1, 2, 3] service.get_settings_bucket(MyThirdBlock()) # "QWERTY" service.get_settings_bucket(MissingBlock()) # {} service.get_settings_bucket(MissingBlock(), "default") # "default" service.get_settings_bucket(MissingBlock(), None) # {} """ xblock_settings_bucket_selector = 'block_settings_key' def get_settings_bucket(self, block, default=None): """ Gets xblock settings dictionary from settings. """ if not block: raise ValueError("Expected XBlock instance, got {0} of type {1}".format(block, type(block))) actual_default = default if default is not None else {} xblock_settings_bucket = getattr(block, self.xblock_settings_bucket_selector, block.unmixed_class.__name__) xblock_settings = settings.XBLOCK_SETTINGS if hasattr(settings, "XBLOCK_SETTINGS") else {} return xblock_settings.get(xblock_settings_bucket, actual_default)
agpl-3.0
log2timeline/plaso
tests/parsers/sqlite_plugins/chrome_history.py
3
9090
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the Google Chrome History database plugin.""" import unittest from plaso.lib import definitions from plaso.parsers.sqlite_plugins import chrome_history from tests.parsers.sqlite_plugins import test_lib class GoogleChrome8HistoryPluginTest(test_lib.SQLitePluginTestCase): """Tests for the Google Chrome 8 history SQLite database plugin.""" def testProcess(self): """Tests the Process function on a Chrome History database file.""" plugin = chrome_history.GoogleChrome8HistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['History'], plugin) # The History file contains 71 events (69 page visits, 1 file downloads). self.assertEqual(storage_writer.number_of_events, 71) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) events = list(storage_writer.GetEvents()) # Check the first page visited entry. expected_event_values = { 'data_type': 'chrome:history:page_visited', 'date_time': '2011-04-07 12:03:11.000000', 'page_transition_type': 0, 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED, 'title': 'Ubuntu Start Page', 'typed_count': 0, 'url': 'http://start.ubuntu.com/10.04/Google/', 'visit_source': 3} self.CheckEventValues(storage_writer, events[0], expected_event_values) # Check the first file downloaded entry. expected_event_values = { 'data_type': 'chrome:history:file_downloaded', 'date_time': '2011-05-23 08:35:30', 'full_path': '/home/john/Downloads/funcats_scr.exe', 'received_bytes': 1132155, 'timestamp_desc': definitions.TIME_DESCRIPTION_FILE_DOWNLOADED, 'total_bytes': 1132155, 'url': 'http://fatloss4idiotsx.com/download/funcats/funcats_scr.exe'} self.CheckEventValues(storage_writer, events[69], expected_event_values) class GoogleChrome27HistoryPluginTest(test_lib.SQLitePluginTestCase): """Tests for the Google Chrome 27 history SQLite database plugin.""" def testProcess57(self): """Tests the Process function on a Google Chrome 57 History database.""" plugin = chrome_history.GoogleChrome27HistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['History-57.0.2987.133'], plugin) # The History file contains 3 events (1 page visit, 2 file downloads). self.assertEqual(storage_writer.number_of_events, 3) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) events = list(storage_writer.GetEvents()) # Check the page visit event. expected_url = ( 'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/' 'generate-specimens.sh') expected_event_values = { 'data_type': 'chrome:history:page_visited', 'date_time': '2018-01-21 14:09:53.885478', 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED, 'title': '', 'typed_count': 0, 'url': expected_url} self.CheckEventValues(storage_writer, events[0], expected_event_values) # Check the file downloaded event. expected_event_values = { 'data_type': 'chrome:history:file_downloaded', 'date_time': '2018-01-21 14:09:53.900399', 'full_path': '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi', 'received_bytes': 3080192, 'timestamp_desc': definitions.TIME_DESCRIPTION_START, 'total_bytes': 3080192, 'url': ( 'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/' 'win32/plaso-20171231.1.win32.msi')} self.CheckEventValues(storage_writer, events[1], expected_event_values) def testProcess58(self): """Tests the Process function on a Google Chrome 58 History database.""" plugin = chrome_history.GoogleChrome27HistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['History-58.0.3029.96'], plugin) # The History file contains 3 events (1 page visit, 2 file downloads). self.assertEqual(storage_writer.number_of_events, 3) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) events = list(storage_writer.GetEvents()) # Check the page visit event. expected_url = ( 'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/' 'generate-specimens.sh') expected_event_values = { 'data_type': 'chrome:history:page_visited', 'date_time': '2018-01-21 14:09:27.315765', 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED, 'title': '', 'typed_count': 0, 'url': expected_url} self.CheckEventValues(storage_writer, events[0], expected_event_values) # Check the file downloaded event. expected_event_values = { 'data_type': 'chrome:history:file_downloaded', 'date_time': '2018-01-21 14:09:27.200398', 'full_path': '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi', 'received_bytes': 3080192, 'timestamp_desc': definitions.TIME_DESCRIPTION_START, 'total_bytes': 3080192, 'url': ( 'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/' 'win32/plaso-20171231.1.win32.msi')} self.CheckEventValues(storage_writer, events[1], expected_event_values) def testProcess59(self): """Tests the Process function on a Google Chrome 59 History database.""" plugin = chrome_history.GoogleChrome27HistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['History-59.0.3071.86'], plugin) # The History file contains 3 events (1 page visit, 2 file downloads). self.assertEqual(storage_writer.number_of_events, 3) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) events = list(storage_writer.GetEvents()) # Check the page visit event. expected_url = ( 'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/' 'generate-specimens.sh') expected_event_values = { 'data_type': 'chrome:history:page_visited', 'date_time': '2018-01-21 14:08:52.037692', 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED, 'title': '', 'typed_count': 0, 'url': expected_url} self.CheckEventValues(storage_writer, events[0], expected_event_values) # Check the file downloaded event. expected_event_values = { 'data_type': 'chrome:history:file_downloaded', 'date_time': '2018-01-21 14:08:51.811123', 'full_path': '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi', 'received_bytes': 3080192, 'timestamp_desc': definitions.TIME_DESCRIPTION_START, 'total_bytes': 3080192, 'url': ( 'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/' 'win32/plaso-20171231.1.win32.msi')} self.CheckEventValues(storage_writer, events[1], expected_event_values) def testProcess59ExtraColumn(self): """Tests the Process function on a Google Chrome 59 History database, manually modified to have an unexpected column. """ plugin = chrome_history.GoogleChrome27HistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['History-59_added-fake-column'], plugin) # The History file contains 3 events (1 page visit, 2 file downloads). self.assertEqual(storage_writer.number_of_events, 3) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) events = list(storage_writer.GetEvents()) # Check the page visit event. expected_url = ( 'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/' 'generate-specimens.sh') expected_event_values = { 'data_type': 'chrome:history:page_visited', 'date_time': '2018-01-21 14:08:52.037692', 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED, 'title': '', 'typed_count': 0, 'url': expected_url} self.CheckEventValues(storage_writer, events[0], expected_event_values) # Check the file downloaded event. expected_event_values = { 'data_type': 'chrome:history:file_downloaded', 'date_time': '2018-01-21 14:08:51.811123', 'full_path': '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi', 'received_bytes': 3080192, 'timestamp_desc': definitions.TIME_DESCRIPTION_START, 'total_bytes': 3080192, 'url': ( 'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/' 'win32/plaso-20171231.1.win32.msi')} self.CheckEventValues(storage_writer, events[1], expected_event_values) if __name__ == '__main__': unittest.main()
apache-2.0
yuhcaesar/emacsrc
.emacs.d/.python-environments/default/Lib/site.py
306
27543
"""Append module search paths for third-party packages to sys.path. **************************************************************** * This module is automatically imported during initialization. * **************************************************************** In earlier versions of Python (up to 1.5a3), scripts or modules that needed to use site-specific modules would place ``import site'' somewhere near the top of their code. Because of the automatic import, this is no longer necessary (but code that does it still works). This will append site-specific paths to the module search path. On Unix, it starts with sys.prefix and sys.exec_prefix (if different) and appends lib/python<version>/site-packages as well as lib/site-python. It also supports the Debian convention of lib/python<version>/dist-packages. On other platforms (mainly Mac and Windows), it uses just sys.prefix (and sys.exec_prefix, if different, but this is unlikely). The resulting directories, if they exist, are appended to sys.path, and also inspected for path configuration files. FOR DEBIAN, this sys.path is augmented with directories in /usr/local. Local addons go into /usr/local/lib/python<version>/site-packages (resp. /usr/local/lib/site-python), Debian addons install into /usr/{lib,share}/python<version>/dist-packages. A path configuration file is a file whose name has the form <package>.pth; its contents are additional directories (one per line) to be added to sys.path. Non-existing directories (or non-directories) are never added to sys.path; no directory is added to sys.path more than once. Blank lines and lines beginning with '#' are skipped. Lines starting with 'import' are executed. For example, suppose sys.prefix and sys.exec_prefix are set to /usr/local and there is a directory /usr/local/lib/python2.X/site-packages with three subdirectories, foo, bar and spam, and two path configuration files, foo.pth and bar.pth. Assume foo.pth contains the following: # foo package configuration foo bar bletch and bar.pth contains: # bar package configuration bar Then the following directories are added to sys.path, in this order: /usr/local/lib/python2.X/site-packages/bar /usr/local/lib/python2.X/site-packages/foo Note that bletch is omitted because it doesn't exist; bar precedes foo because bar.pth comes alphabetically before foo.pth; and spam is omitted because it is not mentioned in either path configuration file. After these path manipulations, an attempt is made to import a module named sitecustomize, which can perform arbitrary additional site-specific customizations. If this import fails with an ImportError exception, it is silently ignored. """ import sys import os try: import __builtin__ as builtins except ImportError: import builtins try: set except NameError: from sets import Set as set # Prefixes for site-packages; add additional prefixes like /usr/local here PREFIXES = [sys.prefix, sys.exec_prefix] # Enable per user site-packages directory # set it to False to disable the feature or True to force the feature ENABLE_USER_SITE = None # for distutils.commands.install USER_SITE = None USER_BASE = None _is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32 _is_pypy = hasattr(sys, 'pypy_version_info') _is_jython = sys.platform[:4] == 'java' if _is_jython: ModuleType = type(os) def makepath(*paths): dir = os.path.join(*paths) if _is_jython and (dir == '__classpath__' or dir.startswith('__pyclasspath__')): return dir, dir dir = os.path.abspath(dir) return dir, os.path.normcase(dir) def abs__file__(): """Set all module' __file__ attribute to an absolute path""" for m in sys.modules.values(): if ((_is_jython and not isinstance(m, ModuleType)) or hasattr(m, '__loader__')): # only modules need the abspath in Jython. and don't mess # with a PEP 302-supplied __file__ continue f = getattr(m, '__file__', None) if f is None: continue m.__file__ = os.path.abspath(f) def removeduppaths(): """ Remove duplicate entries from sys.path along with making them absolute""" # This ensures that the initial path provided by the interpreter contains # only absolute pathnames, even if we're running from the build directory. L = [] known_paths = set() for dir in sys.path: # Filter out duplicate paths (on case-insensitive file systems also # if they only differ in case); turn relative paths into absolute # paths. dir, dircase = makepath(dir) if not dircase in known_paths: L.append(dir) known_paths.add(dircase) sys.path[:] = L return known_paths # XXX This should not be part of site.py, since it is needed even when # using the -S option for Python. See http://www.python.org/sf/586680 def addbuilddir(): """Append ./build/lib.<platform> in case we're running in the build dir (especially for Guido :-)""" from distutils.util import get_platform s = "build/lib.%s-%.3s" % (get_platform(), sys.version) if hasattr(sys, 'gettotalrefcount'): s += '-pydebug' s = os.path.join(os.path.dirname(sys.path[-1]), s) sys.path.append(s) def _init_pathinfo(): """Return a set containing all existing directory entries from sys.path""" d = set() for dir in sys.path: try: if os.path.isdir(dir): dir, dircase = makepath(dir) d.add(dircase) except TypeError: continue return d def addpackage(sitedir, name, known_paths): """Add a new path to known_paths by combining sitedir and 'name' or execute sitedir if it starts with 'import'""" if known_paths is None: _init_pathinfo() reset = 1 else: reset = 0 fullname = os.path.join(sitedir, name) try: f = open(fullname, "rU") except IOError: return try: for line in f: if line.startswith("#"): continue if line.startswith("import"): exec(line) continue line = line.rstrip() dir, dircase = makepath(sitedir, line) if not dircase in known_paths and os.path.exists(dir): sys.path.append(dir) known_paths.add(dircase) finally: f.close() if reset: known_paths = None return known_paths def addsitedir(sitedir, known_paths=None): """Add 'sitedir' argument to sys.path if missing and handle .pth files in 'sitedir'""" if known_paths is None: known_paths = _init_pathinfo() reset = 1 else: reset = 0 sitedir, sitedircase = makepath(sitedir) if not sitedircase in known_paths: sys.path.append(sitedir) # Add path component try: names = os.listdir(sitedir) except os.error: return names.sort() for name in names: if name.endswith(os.extsep + "pth"): addpackage(sitedir, name, known_paths) if reset: known_paths = None return known_paths def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix): """Add site-packages (and possibly site-python) to sys.path""" prefixes = [os.path.join(sys_prefix, "local"), sys_prefix] if exec_prefix != sys_prefix: prefixes.append(os.path.join(exec_prefix, "local")) for prefix in prefixes: if prefix: if sys.platform in ('os2emx', 'riscos') or _is_jython: sitedirs = [os.path.join(prefix, "Lib", "site-packages")] elif _is_pypy: sitedirs = [os.path.join(prefix, 'site-packages')] elif sys.platform == 'darwin' and prefix == sys_prefix: if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"), os.path.join(prefix, "Extras", "lib", "python")] else: # any other Python distros on OSX work this way sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")] elif os.sep == '/': sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages"), os.path.join(prefix, "lib", "site-python"), os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")] lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages") if (os.path.exists(lib64_dir) and os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]): if _is_64bit: sitedirs.insert(0, lib64_dir) else: sitedirs.append(lib64_dir) try: # sys.getobjects only available in --with-pydebug build sys.getobjects sitedirs.insert(0, os.path.join(sitedirs[0], 'debug')) except AttributeError: pass # Debian-specific dist-packages directories: if sys.version[0] == '2': sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "dist-packages")) else: sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[0], "dist-packages")) sitedirs.append(os.path.join(prefix, "local/lib", "python" + sys.version[:3], "dist-packages")) sitedirs.append(os.path.join(prefix, "lib", "dist-python")) else: sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")] if sys.platform == 'darwin': # for framework builds *only* we add the standard Apple # locations. Currently only per-user, but /Library and # /Network/Library could be added too if 'Python.framework' in prefix: home = os.environ.get('HOME') if home: sitedirs.append( os.path.join(home, 'Library', 'Python', sys.version[:3], 'site-packages')) for sitedir in sitedirs: if os.path.isdir(sitedir): addsitedir(sitedir, known_paths) return None def check_enableusersite(): """Check if user site directory is safe for inclusion The function tests for the command line flag (including environment var), process uid/gid equal to effective uid/gid. None: Disabled for security reasons False: Disabled by user (command line option) True: Safe and enabled """ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False): return False if hasattr(os, "getuid") and hasattr(os, "geteuid"): # check process uid == effective uid if os.geteuid() != os.getuid(): return None if hasattr(os, "getgid") and hasattr(os, "getegid"): # check process gid == effective gid if os.getegid() != os.getgid(): return None return True def addusersitepackages(known_paths): """Add a per user site-package to sys.path Each user has its own python directory with site-packages in the home directory. USER_BASE is the root directory for all Python versions USER_SITE is the user specific site-packages directory USER_SITE/.. can be used for data. """ global USER_BASE, USER_SITE, ENABLE_USER_SITE env_base = os.environ.get("PYTHONUSERBASE", None) def joinuser(*args): return os.path.expanduser(os.path.join(*args)) #if sys.platform in ('os2emx', 'riscos'): # # Don't know what to put here # USER_BASE = '' # USER_SITE = '' if os.name == "nt": base = os.environ.get("APPDATA") or "~" if env_base: USER_BASE = env_base else: USER_BASE = joinuser(base, "Python") USER_SITE = os.path.join(USER_BASE, "Python" + sys.version[0] + sys.version[2], "site-packages") else: if env_base: USER_BASE = env_base else: USER_BASE = joinuser("~", ".local") USER_SITE = os.path.join(USER_BASE, "lib", "python" + sys.version[:3], "site-packages") if ENABLE_USER_SITE and os.path.isdir(USER_SITE): addsitedir(USER_SITE, known_paths) if ENABLE_USER_SITE: for dist_libdir in ("lib", "local/lib"): user_site = os.path.join(USER_BASE, dist_libdir, "python" + sys.version[:3], "dist-packages") if os.path.isdir(user_site): addsitedir(user_site, known_paths) return known_paths def setBEGINLIBPATH(): """The OS/2 EMX port has optional extension modules that do double duty as DLLs (and must use the .DLL file extension) for other extensions. The library search path needs to be amended so these will be found during module import. Use BEGINLIBPATH so that these are at the start of the library search path. """ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") libpath = os.environ['BEGINLIBPATH'].split(';') if libpath[-1]: libpath.append(dllpath) else: libpath[-1] = dllpath os.environ['BEGINLIBPATH'] = ';'.join(libpath) def setquit(): """Define new built-ins 'quit' and 'exit'. These are simply strings that display a hint on how to exit. """ if os.sep == ':': eof = 'Cmd-Q' elif os.sep == '\\': eof = 'Ctrl-Z plus Return' else: eof = 'Ctrl-D (i.e. EOF)' class Quitter(object): def __init__(self, name): self.name = name def __repr__(self): return 'Use %s() or %s to exit' % (self.name, eof) def __call__(self, code=None): # Shells like IDLE catch the SystemExit, but listen when their # stdin wrapper is closed. try: sys.stdin.close() except: pass raise SystemExit(code) builtins.quit = Quitter('quit') builtins.exit = Quitter('exit') class _Printer(object): """interactive prompt objects for printing the license text, a list of contributors and the copyright notice.""" MAXLINES = 23 def __init__(self, name, data, files=(), dirs=()): self.__name = name self.__data = data self.__files = files self.__dirs = dirs self.__lines = None def __setup(self): if self.__lines: return data = None for dir in self.__dirs: for filename in self.__files: filename = os.path.join(dir, filename) try: fp = open(filename, "rU") data = fp.read() fp.close() break except IOError: pass if data: break if not data: data = self.__data self.__lines = data.split('\n') self.__linecnt = len(self.__lines) def __repr__(self): self.__setup() if len(self.__lines) <= self.MAXLINES: return "\n".join(self.__lines) else: return "Type %s() to see the full %s text" % ((self.__name,)*2) def __call__(self): self.__setup() prompt = 'Hit Return for more, or q (and Return) to quit: ' lineno = 0 while 1: try: for i in range(lineno, lineno + self.MAXLINES): print(self.__lines[i]) except IndexError: break else: lineno += self.MAXLINES key = None while key is None: try: key = raw_input(prompt) except NameError: key = input(prompt) if key not in ('', 'q'): key = None if key == 'q': break def setcopyright(): """Set 'copyright' and 'credits' in __builtin__""" builtins.copyright = _Printer("copyright", sys.copyright) if _is_jython: builtins.credits = _Printer( "credits", "Jython is maintained by the Jython developers (www.jython.org).") elif _is_pypy: builtins.credits = _Printer( "credits", "PyPy is maintained by the PyPy developers: http://pypy.org/") else: builtins.credits = _Printer("credits", """\ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands for supporting Python development. See www.python.org for more information.""") here = os.path.dirname(os.__file__) builtins.license = _Printer( "license", "See http://www.python.org/%.3s/license.html" % sys.version, ["LICENSE.txt", "LICENSE"], [os.path.join(here, os.pardir), here, os.curdir]) class _Helper(object): """Define the built-in 'help'. This is a wrapper around pydoc.help (with a twist). """ def __repr__(self): return "Type help() for interactive help, " \ "or help(object) for help about object." def __call__(self, *args, **kwds): import pydoc return pydoc.help(*args, **kwds) def sethelper(): builtins.help = _Helper() def aliasmbcs(): """On Windows, some default encodings are not provided by Python, while they are always available as "mbcs" in each locale. Make them usable by aliasing to "mbcs" in such a case.""" if sys.platform == 'win32': import locale, codecs enc = locale.getdefaultlocale()[1] if enc.startswith('cp'): # "cp***" ? try: codecs.lookup(enc) except LookupError: import encodings encodings._cache[enc] = encodings._unknown encodings.aliases.aliases[enc] = 'mbcs' def setencoding(): """Set the string encoding used by the Unicode implementation. The default is 'ascii', but if you're willing to experiment, you can change this.""" encoding = "ascii" # Default value set by _PyUnicode_Init() if 0: # Enable to support locale aware default string encodings. import locale loc = locale.getdefaultlocale() if loc[1]: encoding = loc[1] if 0: # Enable to switch off string to Unicode coercion and implicit # Unicode to string conversion. encoding = "undefined" if encoding != "ascii": # On Non-Unicode builds this will raise an AttributeError... sys.setdefaultencoding(encoding) # Needs Python Unicode build ! def execsitecustomize(): """Run custom site specific code, if available.""" try: import sitecustomize except ImportError: pass def virtual_install_main_packages(): f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt')) sys.real_prefix = f.read().strip() f.close() pos = 2 hardcoded_relative_dirs = [] if sys.path[0] == '': pos += 1 if _is_jython: paths = [os.path.join(sys.real_prefix, 'Lib')] elif _is_pypy: if sys.version_info > (3, 2): cpyver = '%d' % sys.version_info[0] elif sys.pypy_version_info >= (1, 5): cpyver = '%d.%d' % sys.version_info[:2] else: cpyver = '%d.%d.%d' % sys.version_info[:3] paths = [os.path.join(sys.real_prefix, 'lib_pypy'), os.path.join(sys.real_prefix, 'lib-python', cpyver)] if sys.pypy_version_info < (1, 9): paths.insert(1, os.path.join(sys.real_prefix, 'lib-python', 'modified-%s' % cpyver)) hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below # # This is hardcoded in the Python executable, but relative to sys.prefix: for path in paths[:]: plat_path = os.path.join(path, 'plat-%s' % sys.platform) if os.path.exists(plat_path): paths.append(plat_path) elif sys.platform == 'win32': paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')] else: paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])] hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3]) if os.path.exists(lib64_path): if _is_64bit: paths.insert(0, lib64_path) else: paths.append(lib64_path) # This is hardcoded in the Python executable, but relative to # sys.prefix. Debian change: we need to add the multiarch triplet # here, which is where the real stuff lives. As per PEP 421, in # Python 3.3+, this lives in sys.implementation, while in Python 2.7 # it lives in sys. try: arch = getattr(sys, 'implementation', sys)._multiarch except AttributeError: # This is a non-multiarch aware Python. Fallback to the old way. arch = sys.platform plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], 'plat-%s' % arch) if os.path.exists(plat_path): paths.append(plat_path) # This is hardcoded in the Python executable, but # relative to sys.prefix, so we have to fix up: for path in list(paths): tk_dir = os.path.join(path, 'lib-tk') if os.path.exists(tk_dir): paths.append(tk_dir) # These are hardcoded in the Apple's Python executable, # but relative to sys.prefix, so we have to fix them up: if sys.platform == 'darwin': hardcoded_paths = [os.path.join(relative_dir, module) for relative_dir in hardcoded_relative_dirs for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')] for path in hardcoded_paths: if os.path.exists(path): paths.append(path) sys.path.extend(paths) def force_global_eggs_after_local_site_packages(): """ Force easy_installed eggs in the global environment to get placed in sys.path after all packages inside the virtualenv. This maintains the "least surprise" result that packages in the virtualenv always mask global packages, never the other way around. """ egginsert = getattr(sys, '__egginsert', 0) for i, path in enumerate(sys.path): if i > egginsert and path.startswith(sys.prefix): egginsert = i sys.__egginsert = egginsert + 1 def virtual_addsitepackages(known_paths): force_global_eggs_after_local_site_packages() return addsitepackages(known_paths, sys_prefix=sys.real_prefix) def fixclasspath(): """Adjust the special classpath sys.path entries for Jython. These entries should follow the base virtualenv lib directories. """ paths = [] classpaths = [] for path in sys.path: if path == '__classpath__' or path.startswith('__pyclasspath__'): classpaths.append(path) else: paths.append(path) sys.path = paths sys.path.extend(classpaths) def execusercustomize(): """Run custom user specific code, if available.""" try: import usercustomize except ImportError: pass def main(): global ENABLE_USER_SITE virtual_install_main_packages() abs__file__() paths_in_sys = removeduppaths() if (os.name == "posix" and sys.path and os.path.basename(sys.path[-1]) == "Modules"): addbuilddir() if _is_jython: fixclasspath() GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt')) if not GLOBAL_SITE_PACKAGES: ENABLE_USER_SITE = False if ENABLE_USER_SITE is None: ENABLE_USER_SITE = check_enableusersite() paths_in_sys = addsitepackages(paths_in_sys) paths_in_sys = addusersitepackages(paths_in_sys) if GLOBAL_SITE_PACKAGES: paths_in_sys = virtual_addsitepackages(paths_in_sys) if sys.platform == 'os2emx': setBEGINLIBPATH() setquit() setcopyright() sethelper() aliasmbcs() setencoding() execsitecustomize() if ENABLE_USER_SITE: execusercustomize() # Remove sys.setdefaultencoding() so that users cannot change the # encoding after initialization. The test for presence is needed when # this module is run as a script, because this code is executed twice. if hasattr(sys, "setdefaultencoding"): del sys.setdefaultencoding main() def _script(): help = """\ %s [--user-base] [--user-site] Without arguments print some useful information With arguments print the value of USER_BASE and/or USER_SITE separated by '%s'. Exit codes with --user-base or --user-site: 0 - user site directory is enabled 1 - user site directory is disabled by user 2 - uses site directory is disabled by super user or for security reasons >2 - unknown error """ args = sys.argv[1:] if not args: print("sys.path = [") for dir in sys.path: print(" %r," % (dir,)) print("]") def exists(path): if os.path.isdir(path): return "exists" else: return "doesn't exist" print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE))) print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE))) print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE) sys.exit(0) buffer = [] if '--user-base' in args: buffer.append(USER_BASE) if '--user-site' in args: buffer.append(USER_SITE) if buffer: print(os.pathsep.join(buffer)) if ENABLE_USER_SITE: sys.exit(0) elif ENABLE_USER_SITE is False: sys.exit(1) elif ENABLE_USER_SITE is None: sys.exit(2) else: sys.exit(3) else: import textwrap print(textwrap.dedent(help % (sys.argv[0], os.pathsep))) sys.exit(10) if __name__ == '__main__': _script()
gpl-2.0
fangxingli/hue
desktop/core/ext-py/kazoo-2.0/kazoo/protocol/paths.py
36
1344
def normpath(path, trailing=False): """Normalize path, eliminating double slashes, etc.""" comps = path.split('/') new_comps = [] for comp in comps: if comp == '': continue if comp in ('.', '..'): raise ValueError('relative paths not allowed') new_comps.append(comp) new_path = '/'.join(new_comps) if trailing is True and path.endswith('/'): new_path += '/' if path.startswith('/'): return '/' + new_path return new_path def join(a, *p): """Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded. """ path = a for b in p: if b.startswith('/'): path = b elif path == '' or path.endswith('/'): path += b else: path += '/' + b return path def isabs(s): """Test whether a path is absolute""" return s.startswith('/') def basename(p): """Returns the final component of a pathname""" i = p.rfind('/') + 1 return p[i:] def _prefix_root(root, path, trailing=False): """Prepend a root to a path. """ return normpath(join(_norm_root(root), path.lstrip('/')), trailing=trailing) def _norm_root(root): return normpath(join('/', root))
apache-2.0
fevangelista/psi4
psi4/driver/procrouting/dft/dh_functionals.py
7
18989
# # @BEGIN LICENSE # # Psi4: an open-source quantum chemistry software package # # Copyright (c) 2007-2021 The Psi4 Developers. # # The copyrights for code used from other parties are included in # the corresponding files. # # This file is part of Psi4. # # Psi4 is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, version 3. # # Psi4 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with Psi4; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # @END LICENSE # """ List of double-hybrid functionals """ funcs = [] funcs.append({ "name": "MP2MP2", "x_hf": { "alpha": 1.0 }, "c_functionals": {}, "c_mp2": { "alpha": 1.0 }, }) funcs.append({ "name": "MP2D", "alias": ["MP2-D"], "x_hf": { "alpha": 1.0 }, "c_functionals": {}, "c_mp2": { "alpha": 1.0 }, "dispersion": { "type": "dmp2", "params": { "s8": 1.187, "a1": 0.944, "a2": 0.480, "rcut": 0.72, "w": 0.20, }, "citation": " Rezac, J.; Greenwell, C.; Beran, G. (2018), J. Chem. Theory Comput., 14: 4711-4721\n", }, }) funcs.append({ "name": "B2PLYP", "x_functionals": { "GGA_X_B88": { "alpha": 0.47 } }, "x_hf": { "alpha": 0.53 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.73 } }, "c_mp2": { "alpha": 0.27 }, "citation": ' S. Grimme, J. Chem. Phys., 124, 034108, 2006\n', "description": ' B2PLYP Double Hybrid Exchange-Correlation Functional\n', }) funcs.append({ "name": "DSD-BLYP", "x_functionals": { "GGA_X_B88": { "alpha": 0.25 } }, "x_hf": { "alpha": 0.75 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.53 } }, "c_mp2": { "os": 0.46, "ss": 0.60 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-BLYP SCS Double Hybrid XC Functional (not dispersion corrected)\n', }) funcs.append({ "name": "DSD-BLYP-D2", "x_functionals": { "GGA_X_B88": { "alpha": 0.29 } }, "x_hf": { "alpha": 0.71 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.55 } }, "c_mp2": { "os": 0.46, "ss": 0.43 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-BLYP-D2 Dispersion-corrected SCS Double Hybrid XC Functional\n', "dispersion": { "type": "d2", "params": { "s6": 0.35, "alpha6": 20.0, "sr6": 1.1 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, }) funcs.append({ "name": "DSD-BLYP-D3BJ", "x_functionals": { "GGA_X_B88": { "alpha": 0.29 } }, "x_hf": { "alpha": 0.71 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.54 } }, "c_mp2": { "os": 0.47, "ss": 0.40, }, "dispersion": { "type": "d3bj", "params": { "s6": 0.57, "a2": 5.4, "a1": 0.0, "s8": 0.0 }, "citation": " S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n" }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-BLYP-D3BJ Dispersion-corrected SCS Double Hybrid XC Functional\n', }) funcs.append({ # note by H.Kruse: Uses the full-core parameters in the Yu paper. But my and L. Georigk's experience shows that it hardly matters. # Use FC is recommended by S. Grimme. # May this madness never end. "name": "DSD-BLYP-NL", "x_functionals": { "GGA_X_B88": { "alpha": 0.29 } }, "x_hf": { "alpha": 0.71 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.54 } }, "c_mp2": { "alpha": 1.0, "os": 0.47, "ss": 0.40, }, "dispersion": { "type": "nl", "params": { "b": 12.00, "c": 0.0093, }, "citation": " F. Yu J. Chem. Theory Comput. 10, 4400-4407, 2014\n" }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-BLYP-NL (D3BJ,FC parameters) VV10 SCS Double Hybrid XC Functional\n', }) funcs.append({ "name": "CORE-DSD-BLYP", "x_functionals": { "GGA_X_B88": { "alpha": 0.31 } }, "x_hf": { "alpha": 0.69 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.54 } }, "c_mp2": { "os": 0.46, "ss": 0.37 }, "citation": ' S. Kozuch, Phys. Chem. Chem. Phys., 13, 20104, 2011\n', "description": ' DSD-BLYP SCS Double Hybrid XC Functional (full-core param.)\n' }) funcs.append({ "name": "PBE0-2", "alias": ["PBE02"], "x_functionals": { "GGA_X_PBE": { "alpha": 0.206299 } }, "x_hf": { "alpha": 0.793701 }, "c_functionals": { "GGA_C_PBE": { "alpha": 0.5 } }, "c_mp2": { "alpha": 0.5 }, "citation": ' J. Chai, Chem. Phys. Lett., 538, 121-125, 2012\n', "description": ' PBE0-2 Double Hybrid Exchange-Correlation Functional\n', }) funcs.append({ "name": "PBE0-DH", "alias": ["PBE0DH"], "x_functionals": { "GGA_X_PBE": { "alpha": 0.5 } }, "x_hf": { "alpha": 0.5 }, "c_functionals": { "GGA_C_PBE": { "alpha": 0.875 } }, "c_mp2": { "alpha": 0.125 }, "citation": ' E. Bremond, C. Adamo, J. Chem. Phys., 135, 024106, 2011\n', "description": ' PBE0-DH Double Hybrid Exchange-Correlation Functional\n', }) funcs.append({ "name": "DSD-PBEP86", "alias": ["DSDPBEP86"], "x_functionals": { "GGA_X_PBE": { "alpha": 0.28 } }, "x_hf": { "alpha": 0.72 }, "c_functionals": { "GGA_C_P86": { "alpha": 0.44 } }, "c_mp2": { "os": 0.51, "ss": 0.36 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEP86 SCS Double Hybrid XC Functional (not dispersion corrected)\n', }) funcs.append({ "name": "DSD-PBEP86-D3BJ", "x_functionals": { "GGA_X_PBE": { "alpha": 0.31 } }, "x_hf": { "alpha": 0.69 }, "c_functionals": { "GGA_C_P86": { "alpha": 0.44 } }, "c_mp2": { "os": 0.52, "ss": 0.22 }, "dispersion": { "type": "d3bj", "params": { "s6": 0.48, "a2": 5.6, "a1": 0.0, "s8": 0.0 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEP86-D3BJ Dispersion-corrected SCS Double Hybrid XC Functional\n', }) funcs.append({ # note: Using the D3BJ form for NL, which is sensible but not explicitly mentioned in the paper "name": "DSD-PBEP86-NL", "x_functionals": { "GGA_X_PBE": { "alpha": 0.31 } }, "x_hf": { "alpha": 0.69 }, "c_functionals": { "GGA_C_P86": { "alpha": 0.44 } }, "c_mp2": { "os": 0.52, "ss": 0.22 }, "dispersion": { "type": "nl", "params": { "b": 12.8, "c": 0.0093, }, "citation": ' M. K. Kesharwani, A. Karton, J.M. L. Martin, J. Chem. Theory Comput. 12, 444-454, 2016 \n' }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEP86-NL (D3BJ parameters) VV10 SCS Double Hybrid XC Functional\n', }) funcs.append({ "name": "DSD-PBEP86-D2", "x_functionals": { "GGA_X_PBE": { "alpha": 0.32 } }, "x_hf": { "alpha": 0.68 }, "c_functionals": { "GGA_C_P86": { "alpha": 0.45 } }, "c_mp2": { "os": 0.51, "ss": 0.23 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEP86-D2 Dispersion-corrected SCS Double Hybrid XC Functional\n', "dispersion": { "type": "d2", "params": { "s6": 0.29, "alpha6": 20.0, "sr6": 1.1 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, }) funcs.append({ "name": "DSD-PBEPBE", "alias": ["DSDPBEPBE"], "x_functionals": { "GGA_X_PBE": { "alpha": 0.28 } }, "x_hf": { "alpha": 0.72 }, "c_functionals": { "GGA_C_PBE": { "alpha": 0.48 } }, "c_mp2": { "os": 0.54, "ss": 0.31 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEPBE SCS Double Hybrid XC Functional (not dispersion corrected)\n', }) funcs.append({ "name": "DSD-PBEPBE-D2", "x_functionals": { "GGA_X_PBE": { "alpha": 0.34 } }, "x_hf": { "alpha": 0.66 }, "c_functionals": { "GGA_C_PBE": { "alpha": 0.51 } }, "c_mp2": { "os": 0.53, "ss": 0.12 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEPBE-D2 Dispersion-corrected SCS Double Hybrid XC Functional\n', "dispersion": { "type": "d2", "params": { "s6": 0.42, "alpha6": 20.0, "sr6": 1.1 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, }) funcs.append({ "name": "DSD-PBEPBE-D3BJ", "x_functionals": { "GGA_X_PBE": { "alpha": 0.32 } }, "x_hf": { "alpha": 0.68 }, "c_functionals": { "GGA_C_PBE": { "alpha": 0.49 } }, "c_mp2": { "os": 0.55, "ss": 0.13 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEPBE-D3BJ Dispersion-corrected SCS Double Hybrid XC Functional\n', "dispersion": { "type": "d3bj", "params": { "s6": 0.78, "a2": 6.1, "a1": 0.0, "s8": 0.0 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, }) funcs.append({ "name": "DSD-PBEPBE-NL", "x_functionals": { "GGA_X_PBE": { "alpha": 0.32 } }, "x_hf": { "alpha": 0.68 }, "c_functionals": { "GGA_C_PBE": { "alpha": 0.49 } }, "c_mp2": { "alpha": 1.0, "os": 0.55, "ss": 0.13 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEPBE-NL (D3BJ parameters) VV10 SCS Double Hybrid XC Functional\n', "dispersion": { "type": "nl", "params": { "b": 9.6, "c": 0.0093, }, "citation": ' M. K. Kesharwani, A. Karton, J.M. L. Martin, J. Chem. Theory Comput. 12, 444-454, 2016\n' }, }) funcs.append({ "name": "DSD-BP86-D2", "x_functionals": { "GGA_X_B88": { "alpha": 0.33 } }, "x_hf": { "alpha": 0.67 }, "c_functionals": { "GGA_C_P86": { "alpha": 0.49 } }, "c_mp2": { "os": 0.49, "ss": 0.24 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-BP86-D2 Dispersion-corrected SCS Double Hybrid XC Functional\n', "dispersion": { "type": "d2", "params": { "s6": 0.41, "alpha6": 20.0, "sr6": 1.1 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, }) funcs.append({ "name": "DSD-SVWN-D2", "x_functionals": { "LDA_X": { "alpha": 0.29 } }, "x_hf": { "alpha": 0.71 }, "c_functionals": { "LDA_C_VWN": { "alpha": 0.34 } }, "c_mp2": { "os": 0.58, "ss": 0.11 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-SVWN5-D2 Dispersion-corrected SCS Double Hybrid XC Functional\n', "dispersion": { "type": "d2", "params": { "s6": 0.28, "alpha6": 20.0, "sr6": 1.1 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', }, }) funcs.append({ "name": "B2GPPLYP", "x_functionals": { "GGA_X_B88": { "alpha": 0.35 } }, "x_hf": { "alpha": 0.65 }, "c_functionals": { "GGA_C_LYP": { "alpha": 0.64 } }, "c_mp2": { "alpha": 0.36 }, "citation": ' A. Karton, et al., J.Phys. Chem. A, 112, 12868-12886, 2008\n', "description": ' B2GPPLYP Double Hybrid Exchange-Correlation Functional\n', }) def get_pwpb95_tweaks(): X2S = 0.1282782438530421943003109254455883701296 bt = 0.004440 # paper values c_pw = 0.32620 # paper values expo_pw6 = 3.7868 # paper values alpha_pw6 = c_pw / X2S / X2S return {"_bt": bt, "_alpha": alpha_pw6, "_expo": expo_pw6} funcs.append({ "name": "PWPB95", "x_functionals": { "GGA_X_MPW91": { # only mpw91, not pw91, is tweakable "tweak": get_pwpb95_tweaks(), "alpha": 0.50 } }, "x_hf": { "alpha": 0.50 }, "c_functionals": { "MGGA_C_BC95": { "tweak": { "_css": 0.03241, "_copp": 0.00250, }, "alpha": 0.731 } }, "c_mp2": { "ss": 0.0, "os": 0.269 }, "citation": ' L. Goerigk, S.Grimme, J.Chem. Theory Compt. 7, 291-309, 2011 \n', "description": ' PWPB95 SOS Double Hybrid XC Functional\n', }) funcs.append({ "name": "PTPSS", "x_functionals": { "MGGA_X_TPSS": { "tweak": { "_b": 0.15, "_c": 0.88491, "_e": 0.047, "_kappa": 0.872, "_mu": 0.16952, }, "alpha": 0.50 } }, "x_hf": { "alpha": 0.50 }, "c_functionals": { "MGGA_C_TPSS": { "tweak": { "_beta": 0.06080, "_d": 6.3, }, "alpha": 0.625 } }, "c_mp2": { "ss": 0.0, "os": 0.375 }, "citation": ' L. Goerigk, S.Grimme, J. Chem. Theory Comput., 7, 291-309, 2011 \n', "description": ' PTPSS SOS Double Hybrid XC Functional\n', }) funcs.append({ "name": "DSD-PBEB95", "x_functionals": { "GGA_X_PBE": { "alpha": 0.31 } }, "x_hf": { "alpha": 0.69 }, "c_functionals": { "MGGA_C_BC95": { "alpha": 0.54 } }, "c_mp2": { "os": 0.48, "ss": 0.22 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEB95 SCS Double Hybrid Meta-GGA XC Functional (not dispersion corrected)\n', }) funcs.append({ "name": "DSD-PBEB95-D2", "x_functionals": { "GGA_X_PBE": { "alpha": 0.35 } }, "x_hf": { "alpha": 0.65 }, "c_functionals": { "MGGA_C_BC95": { "alpha": 0.55 } }, "c_mp2": { "os": 0.46, "ss": 0.08 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEB95-D2 Dispersion-corrected SCS Double Hybrid Meta-GGA XC Functional\n', "dispersion": { "type": "d2", "params": { "s6": 0.32, "alpha6": 20.0, "sr6": 1.1 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', }, }) funcs.append({ "name": "DSD-PBEB95-D3BJ", "x_functionals": { "GGA_X_PBE": { "alpha": 0.34 } }, "x_hf": { "alpha": 0.66 }, "c_functionals": { "MGGA_C_BC95": { "alpha": 0.55 } }, "c_mp2": { "os": 0.46, "ss": 0.09 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEB95-D3BJ Dispersion-corrected SCS Double Hybrid Meta-GGA XC Functional\n', "dispersion": { "type": "d3bj", "params": { "s6": 0.61, "a2": 6.2, "a1": 0.0, "s8": 0.0 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n' }, }) funcs.append({ "name": "DSD-PBEB95-NL", "x_functionals": { "GGA_X_PBE": { "alpha": 0.34 } }, "x_hf": { "alpha": 0.66 }, "c_functionals": { "MGGA_C_BC95": { "alpha": 0.55 } }, "c_mp2": { "os": 0.46, "ss": 0.09 }, "citation": ' S. Kozuch, J.M.L. Martin, J. Comp. Chem., 34, 2327-2344, 2013\n', "description": ' DSD-PBEB95-NL (D3BJ parameters) VV10 SCS Double Hybrid Meta-GGA XC Functional\n', "dispersion": { "type": "nl", "params": { "b": 12.50, "c": 0.0093, }, "citation": ' M. K. Kesharwani, A. Karton, J.M. L. Martin, J. Chem. Theory Comput. 12, 444-454, 2016\n' }, }) functional_list = {} for functional in funcs: functional_list[functional["name"].lower()] = functional
lgpl-3.0
laszlocsomor/tensorflow
tensorflow/contrib/lite/toco/python/toco_from_protos_test.py
2
3849
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import tempfile import tensorflow as tf from tensorflow.contrib.lite.toco import model_flags_pb2 from tensorflow.contrib.lite.toco import toco_flags_pb2 from tensorflow.contrib.lite.toco import types_pb2 from tensorflow.python.platform import googletest from tensorflow.python.platform import resource_loader def TensorName(x): """Get the canonical (non foo:0 name).""" return x.name.split(":")[0] class TocoFromProtosTest(googletest.TestCase): def _run(self, sess, in_tensor, out_tensor, should_succeed): """Use toco binary to check conversion from graphdef to tflite. Args: sess: Active TensorFlow session containing graph. in_tensor: TensorFlow tensor to use as input. out_tensor: TensorFlow tensor to use as output. should_succeed: Whether this is a valid conversion. """ # Build all protos and extract graphdef graph_def = sess.graph_def toco_flags = toco_flags_pb2.TocoFlags() toco_flags.input_format = toco_flags_pb2.TENSORFLOW_GRAPHDEF toco_flags.output_format = toco_flags_pb2.TFLITE toco_flags.inference_input_type = types_pb2.FLOAT toco_flags.inference_type = types_pb2.FLOAT model_flags = model_flags_pb2.ModelFlags() input_array = model_flags.input_arrays.add() input_array.name = TensorName(in_tensor) input_array.shape.extend(map(int, in_tensor.get_shape())) model_flags.output_arrays.append(TensorName(out_tensor)) # Shell out to run toco (in case it crashes) with tempfile.NamedTemporaryFile() as fp_toco, \ tempfile.NamedTemporaryFile() as fp_model, \ tempfile.NamedTemporaryFile() as fp_input, \ tempfile.NamedTemporaryFile() as fp_output: fp_model.write(model_flags.SerializeToString()) fp_toco.write(toco_flags.SerializeToString()) fp_input.write(graph_def.SerializeToString()) fp_model.flush() fp_toco.flush() fp_input.flush() tflite_bin = resource_loader.get_path_to_datafile("toco_from_protos") cmdline = " ".join([ tflite_bin, fp_model.name, fp_toco.name, fp_input.name, fp_output.name ]) exitcode = os.system(cmdline) if exitcode == 0: stuff = fp_output.read() self.assertEqual(stuff is not None, should_succeed) else: self.assertFalse(should_succeed) def test_toco(self): """Run a couple of TensorFlow graphs against TOCO through the python bin.""" with tf.Session() as sess: img = tf.placeholder(name="img", dtype=tf.float32, shape=(1, 64, 64, 3)) val = img + tf.constant([1., 2., 3.]) + tf.constant([1., 4., 4.]) out = tf.identity(val, name="out") out2 = tf.sin(val, name="out2") # This is a valid mdoel self._run(sess, img, out, True) # This uses an invalid function. # TODO(aselle): Check to make sure a warning is included. self._run(sess, img, out2, True) # This is an identity graph, which doesn't work self._run(sess, img, img, False) if __name__ == "__main__": googletest.main()
apache-2.0
eendroroy/spikes
spikes/spike.py
1
3009
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys if sys.version_info[0] == 3: uchr = chr elif sys.version_info[0] == 2: uchr = unichr class Spike(object): __BAR_INDEX = 8 __BARS = [ ' ', uchr(9601), uchr(9602), uchr(9603), uchr(9604), uchr(9605), uchr(9606), uchr(9607), uchr(9608), ] __USAGE = 'Usage: spike [-l <number_of_lines>] list_of_numbers]' @staticmethod def usage(): return Spike.__USAGE @staticmethod def __normalize(data, rows=1): upper_limit = rows * Spike.__BAR_INDEX normalized_list = list() max_item = float(max(data)) if max_item == float(0): Spike.__normalize_zero_list(normalized_list, data, upper_limit) else: Spike.__normalize_positive_list(normalized_list, data, max_item, upper_limit) return normalized_list @staticmethod def __normalize_zero_list(n_list, data, upper_limit): for item in data: n_list.append((int(item) if item < upper_limit else upper_limit)) @staticmethod def __normalize_positive_list(n_list, data, max_item, upper_limit): for item in data: norm = int(round(float(item) / max_item * upper_limit, 0)) adjusted_norm = (norm if norm > 0 else 1) n_list.append((int(item) if item == float(0) else adjusted_norm)) @staticmethod def __spike_column(spiked, column, rows): full_rows = int(column / Spike.__BAR_INDEX) fraction_val = column - full_rows * Spike.__BAR_INDEX for full_row in range(0, full_rows): spiked[full_row].append(Spike.__BAR_INDEX) if full_rows < rows: spiked[full_rows].append(fraction_val) for rest_row in range(full_rows + 1, rows): spiked[rest_row].append(0) @staticmethod def __spike_data(data, rows): spiked = list() for _ in range(0, rows): spiked.append(list()) for column in data: Spike.__spike_column(spiked, column, rows) return spiked @staticmethod def __print_spike(row): _spike = '' for bar in row: _spike += Spike.__BARS[bar] return _spike @staticmethod def make_spike(spiked_data): spikes = '' for index, row in enumerate(spiked_data): spikes += Spike.__print_spike(row) if index + 1 < len(spiked_data): spikes += '\n' return spikes @staticmethod def get_spike(data, rows=1): try: data = list(map(float, data)) except ValueError: sys.stderr.write(Spike.__USAGE) sys.exit(65) spiked_data = Spike.__spike_data(Spike.__normalize(data, rows), rows) spiked_data.reverse() return Spike.make_spike(spiked_data) def spike(data, rows=1): spikes = Spike.get_spike(data, rows) return spikes
mit
MrCubanfrog/NorDB
nordb/database/instrument2sql.py
1
2379
""" This module contains all functions and classes for reading a instrument file in `CSS3.0 format`_ and pushing it into the database .. _CSS3.0 format: ftp://ftp.pmel.noaa.gov/newport/lau/tphase/data/css_wfdisc.pdf Functions and Classes --------------------- """ import unidecode from nordb.nordic.instrument import Instrument from nordb.core import usernameUtilities from nordb.core.utils import stringToDate INSTRUMENT_INSERT = ( "INSERT INTO instrument " "( css_id, instrument_name, instrument_type, " " band, digital, samprate, ncalib, " " ncalper, dir, dfile, rsptype, " " lddate, response_id) " "VALUES " "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " ) FIND_RESPONSE = ( "SELECT " " response.id " "FROM " " response " "WHERE " " response.file_name = %s " ) def getResponseId(response_file_name): """ Function for finding the correct response for the instrument :param String response_file_name: filename of the response :returns: id of the response in the database """ conn = usernameUtilities.log2nordb() cur = conn.cursor() response_id = -1 try: cur.execute(FIND_RESPONSE, (response_file_name,)) response_id = cur.fetchone()[0] except Exception as e: conn.close() raise e return response_id def insertInstrument2Database(instrument): """ Function for inserting the instrument array to the database :param Instrument instrument: instrument that will be inserted to the database """ conn = usernameUtilities.log2nordb() cur = conn.cursor() if instrument.css_id == -1: cur.execute("SELECT MAX(css_id) FROM instrument") ans = cur.fetchone() if ans[0] is None: instrument.css_id = 1 else: instrument.css_id = ans[0] + 1 try: instrument.response_id = getResponseId(instrument.dfile) cur.execute(INSTRUMENT_INSERT, instrument.getAsList()) except Exception as e: conn.close() raise e conn.commit() conn.close()
mit
anarcheuz/CTF
hacklu-2014/exploit/oreo/pwn.py
1
2399
import socket from binascii import * import struct import time import telnetlib s=None leak=None arch=None def init(host, port, archi='x86'): global s global arch arch = archi s = socket.create_connection((host,port)) return s def init_leak(l): global leak leak = l def q(a): return struct.pack("<I", a) def r(a): from binascii import hexlify return int('0x'+hexlify(a[::-1]),16) def read_until(string=""): global s text="" while text.find(string) == -1: res = s.recv(1024) if res == "": return text text += res return text def interact(): global s t = telnetlib.Telnet() t.sock = s t.interact() def get_elf(addr): global s start = addr & 0xfffff000 pages = 0 while leak(start-pages)[0:4] != '\x7fELF': pages += 0x1000 return start-pages def get_prog_headers(base): global s global arch if arch == 'x86': return r(leak(base+28)) elif arch == 'x64': return r(leak(base+32)) def get_dynamic(prog_headers): global s global arch i = 0 if arch == 'x86': add = 32 field_off = 4 elif arch == 'x64': add = 56 field_off = 8 while 1: section_type = r(leak(prog_headers+i)) if section_type == 2: return r(leak(prog_headers+i+field_off)) i+=add def get_str_symtab(dynamic): global s global arch i = strtab = symtab = 0 if arch == 'x86': add = 8 field_off = 4 elif arch == 'x64': add = 16 field_off = 8 while strtab == 0 or symtab == 0: typeDyn = r(leak(dynamic+i)) if typeDyn == 5: strtab = r(leak(dynamic+i+field_off)[:4]) if typeDyn == 6: symtab = r(leak(dynamic+i+field_off)[:4]) i+=add return (strtab,symtab) def get_symbol(symbol, strtab, symtab): global s global arch i=16*1400 if arch =='x86': add = 16 field_off = 4 elif arch == 'x64': add = 24 field_off = 8 while 1: l = leak(symtab+i) if l != '': offset = r(l) l = leak(strtab+offset) #print l + ' : ' + str(i/16) time.sleep(0.01) if l == symbol: return r(leak(symtab+i+field_off)) i+=add
mit
menegon/geonode
geonode/people/enumerations.py
35
2025
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from django.utils.translation import ugettext as _ CONTACT_FIELDS = [ "name", "organization", "position", "voice", "facsimile", "delivery_point", "city", "administrative_area", "postal_code", "country", "email", "role" ] ROLE_VALUES = ( ('author', _('party who authored the resource')), ('processor', _('party who has processed the data in a manner such that the resource has been modified')), ('publisher', _('party who published the resource')), ('custodian', _('party that accepts accountability and responsibility for the data and ensures \ appropriate care and maintenance of the resource')), ('pointOfContact', _('party who can be contacted for acquiring knowledge about or acquisition of the resource')), ('distributor', _('party who distributes the resource')), ('user', _('party who uses the resource')), ('resourceProvider', _('party that supplies the resource')), ('originator', _('party who created the resource')), ('owner', _('party that owns the resource')), ('principalInvestigator', _('key party responsible for gathering information and conducting research')), )
gpl-3.0
CameronLonsdale/sec-tools
python2/lib/python2.7/site-packages/pip/vcs/subversion.py
343
9350
from __future__ import absolute_import import logging import os import re from pip._vendor.six.moves.urllib import parse as urllib_parse from pip.index import Link from pip.utils import rmtree, display_path from pip.utils.logging import indent_log from pip.vcs import vcs, VersionControl _svn_xml_url_re = re.compile('url="([^"]+)"') _svn_rev_re = re.compile('committed-rev="(\d+)"') _svn_url_re = re.compile(r'URL: (.+)') _svn_revision_re = re.compile(r'Revision: (.+)') _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') _svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') logger = logging.getLogger(__name__) class Subversion(VersionControl): name = 'svn' dirname = '.svn' repo_name = 'checkout' schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') def get_info(self, location): """Returns (url, revision), where both are strings""" assert not location.rstrip('/').endswith(self.dirname), \ 'Bad directory: %s' % location output = self.run_command( ['info', location], show_stdout=False, extra_environ={'LANG': 'C'}, ) match = _svn_url_re.search(output) if not match: logger.warning( 'Cannot determine URL of svn checkout %s', display_path(location), ) logger.debug('Output that cannot be parsed: \n%s', output) return None, None url = match.group(1).strip() match = _svn_revision_re.search(output) if not match: logger.warning( 'Cannot determine revision of svn checkout %s', display_path(location), ) logger.debug('Output that cannot be parsed: \n%s', output) return url, None return url, match.group(1) def export(self, location): """Export the svn repository at the url to the destination location""" url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) url = self.remove_auth_from_url(url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) self.run_command( ['export'] + rev_options + [url, location], show_stdout=False) def switch(self, dest, url, rev_options): self.run_command(['switch'] + rev_options + [url, dest]) def update(self, dest, rev_options): self.run_command(['update'] + rev_options + [dest]) def obtain(self, dest): url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) url = self.remove_auth_from_url(url) if rev: rev_display = ' (to revision %s)' % rev else: rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.info( 'Checking out %s%s to %s', url, rev_display, display_path(dest), ) self.run_command(['checkout', '-q'] + rev_options + [url, dest]) def get_location(self, dist, dependency_links): for url in dependency_links: egg_fragment = Link(url).egg_fragment if not egg_fragment: continue if '-' in egg_fragment: # FIXME: will this work when a package has - in the name? key = '-'.join(egg_fragment.split('-')[:-1]).lower() else: key = egg_fragment if key == dist.key: return url.split('#', 1)[0] return None def get_revision(self, location): """ Return the maximum revision for all files under a given location """ # Note: taken from setuptools.command.egg_info revision = 0 for base, dirs, files in os.walk(location): if self.dirname not in dirs: dirs[:] = [] continue # no sense walking uncontrolled subdirs dirs.remove(self.dirname) entries_fn = os.path.join(base, self.dirname, 'entries') if not os.path.exists(entries_fn): # FIXME: should we warn? continue dirurl, localrev = self._get_svn_url_rev(base) if base == location: base_url = dirurl + '/' # save the root url elif not dirurl or not dirurl.startswith(base_url): dirs[:] = [] continue # not part of the same svn tree, skip it revision = max(revision, localrev) return revision def get_url_rev(self): # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it url, rev = super(Subversion, self).get_url_rev() if url.startswith('ssh://'): url = 'svn+' + url return url, rev def get_url(self, location): # In cases where the source is in a subdirectory, not alongside # setup.py we have to look up in the location until we find a real # setup.py orig_location = location while not os.path.exists(os.path.join(location, 'setup.py')): last_location = location location = os.path.dirname(location) if location == last_location: # We've traversed up to the root of the filesystem without # finding setup.py logger.warning( "Could not find setup.py for directory %s (tried all " "parent directories)", orig_location, ) return None return self._get_svn_url_rev(location)[0] def _get_svn_url_rev(self, location): from pip.exceptions import InstallationError entries_path = os.path.join(location, self.dirname, 'entries') if os.path.exists(entries_path): with open(entries_path) as f: data = f.read() else: # subversion >= 1.7 does not have the 'entries' file data = '' if (data.startswith('8') or data.startswith('9') or data.startswith('10')): data = list(map(str.splitlines, data.split('\n\x0c\n'))) del data[0][0] # get rid of the '8' url = data[0][3] revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] elif data.startswith('<?xml'): match = _svn_xml_url_re.search(data) if not match: raise ValueError('Badly formatted data: %r' % data) url = match.group(1) # get repository URL revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] else: try: # subversion >= 1.7 xml = self.run_command( ['info', '--xml', location], show_stdout=False, ) url = _svn_info_xml_url_re.search(xml).group(1) revs = [ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) ] except InstallationError: url, revs = None, [] if revs: rev = max(revs) else: rev = 0 return url, rev def get_src_requirement(self, dist, location): repo = self.get_url(location) if repo is None: return None # FIXME: why not project name? egg_project_name = dist.egg_name().split('-', 1)[0] rev = self.get_revision(location) return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name) def check_version(self, dest, rev_options): """Always assume the versions don't match""" return False @staticmethod def remove_auth_from_url(url): # Return a copy of url with 'username:password@' removed. # username/pass params are passed to subversion through flags # and are not recognized in the url. # parsed url purl = urllib_parse.urlsplit(url) stripped_netloc = \ purl.netloc.split('@')[-1] # stripped url url_pieces = ( purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment ) surl = urllib_parse.urlunsplit(url_pieces) return surl def get_rev_options(url, rev): if rev: rev_options = ['-r', rev] else: rev_options = [] r = urllib_parse.urlsplit(url) if hasattr(r, 'username'): # >= Python-2.5 username, password = r.username, r.password else: netloc = r[1] if '@' in netloc: auth = netloc.split('@')[0] if ':' in auth: username, password = auth.split(':', 1) else: username, password = auth, None else: username, password = None, None if username: rev_options += ['--username', username] if password: rev_options += ['--password', password] return rev_options vcs.register(Subversion)
mit
codingforentrepreneurs/ecommerce-2-api
src/carts/views.py
1
14126
import ast import base64 import braintree from django.conf import settings from django.contrib import messages from django.contrib.auth.forms import AuthenticationForm from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, Http404, JsonResponse from django.shortcuts import render, get_object_or_404, redirect from django.views.generic.base import View from django.views.generic.detail import SingleObjectMixin, DetailView from django.views.generic.edit import FormMixin from rest_framework import filters from rest_framework import generics from rest_framework import status from rest_framework.authentication import BasicAuthentication, SessionAuthentication from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly from rest_framework.response import Response from rest_framework.reverse import reverse as api_reverse from rest_framework.views import APIView from orders.forms import GuestCheckoutForm from orders.mixins import CartOrderMixin from orders.models import UserCheckout, Order, UserAddress from orders.serializers import OrderSerializer, FinalizedOrderSerializer from products.models import Variation from .mixins import TokenMixin, CartUpdateAPIMixin, CartTokenMixin from .models import Cart, CartItem from .serializers import CartItemSerializer, CheckoutSerializer # # abc123 """ { "order_token": "eydvcmRlcl9pZCc6IDU1LCAndXNlcl9jaGVja291dF9pZCc6IDExfQ==", "payment_method_nonce": "2bd23ca6-ae17-4bed-85f6-4d00aabcc3b0" } Run Python server: python -m SimpleHTTPServer 8080 """ class CheckoutFinalizeAPIView(TokenMixin, APIView): def get(self, request, format=None): response = {} order_token = request.GET.get('order_token') if order_token: checkout_id = self.parse_token(order_token).get("user_checkout_id") if checkout_id: checkout = UserCheckout.objects.get(id=checkout_id) client_token = checkout.get_client_token() response["client_token"] = client_token return Response(response) else: response["message"] = "This method is not allowed" return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED) def post(self, request, format=None): data = request.data response = {} serializer = FinalizedOrderSerializer(data=data) if serializer.is_valid(raise_exception=True): request_data = serializer.data order_id = request_data.get("order_id") order = Order.objects.get(id=order_id) if not order.is_complete: order_total = order.order_total nonce = request_data.get("payment_method_nonce") if nonce: result = braintree.Transaction.sale({ "amount": order_total, "payment_method_nonce": nonce, "billing": { "postal_code": "%s" %(order.billing_address.zipcode), }, "options": { "submit_for_settlement": True } }) success = result.is_success if success: #result.transaction.id to order order.mark_completed(order_id=result.transaction.id) #order.mark_completed(order_id="abc12344423") order.cart.is_complete() response["message"] = "Your order has been completed." response["final_order_id"] = order.order_id response["success"] = True else: #messages.success(request, "There was a problem with your order.") error_message = result.message #error_message = "Error" response["message"] = error_message response["success"] = False else: response["message"] = "Ordered has already been completed." response["success"] = False return Response(response) class CheckoutAPIView(TokenMixin, APIView): def post(self, request, format=None): data = request.data serializer = CheckoutSerializer(data=data) if serializer.is_valid(raise_exception=True): #print "valid data!!@!@" data = serializer.data user_checkout_id = data.get("user_checkout_id") cart_id = data.get("cart_id") billing_address = data.get("billing_address") shipping_address = data.get("shipping_address") user_checkout = UserCheckout.objects.get(id=user_checkout_id) cart_obj = Cart.objects.get(id=cart_id) s_a = UserAddress.objects.get(id=shipping_address) b_a = UserAddress.objects.get(id=billing_address) order, created = Order.objects.get_or_create(cart=cart_obj, user=user_checkout) if not order.is_complete: order.shipping_address = s_a order.billing_address = b_a order.save() order_data = { "order_id": order.id, "user_checkout_id": user_checkout_id } order_token = self.create_token(order_data) response = { "order_token": order_token } return Response(response) # def get(self, request, format=None): # data, cart_obj, response_status = self.get_cart_from_token() # user_checkout_token = self.request.GET.get("checkout_token") # user_checkout_data = self.parse_token(user_checkout_token) # user_checkout_id = user_checkout_data.get("user_checkout_id") # billing_address = self.request.GET.get("billing") # shipping_address = self.request.GET.get("shipping") # billing_obj, shipping_obj = None, None # try: # user_checkout = UserCheckout.objects.get(id = int(user_checkout_id)) # except: # user_checkout = None # if user_checkout == None: # data = { # "message": "A user or guest user is required to continue." # } # response_status = status.HTTP_400_BAD_REQUEST # return Response(data, status=response_status) # if billing_address: # try: # billing_obj = UserAddress.objects.get(user=user_checkout, id=int(billing_address)) # except: # pass # if shipping_address: # try: # shipping_obj = UserAddress.objects.get(user=user_checkout, id=int(shipping_address)) # except: # pass # if not billing_obj or not shipping_obj: # data = { # "message": "A valid billing or shipping is needed." # } # response_status = status.HTTP_400_BAD_REQUEST # return Response(data, status=response_status) # if cart_obj: # if cart_obj.items.count() == 0: # data = { # "message": "Your cart is Empty." # } # response_status = status.HTTP_400_BAD_REQUEST # else: # order, created = Order.objects.get_or_create(cart=cart_obj) # if not order.user: # order.user = user_checkout # if order.is_complete: # order.cart.is_complete() # data = { # "message": "This order has been completed." # } # return Response(data) # order.billing_address = billing_obj # order.shipping_address = shipping_obj # order.save() # data = OrderSerializer(order).data # return Response(data, status=response_status) class CartAPIView(CartTokenMixin, CartUpdateAPIMixin, APIView): # authentication_classes = [SessionAuthentication] # permission_classes = [IsAuthenticated] token_param = "token" cart = None def get_cart(self): data, cart_obj, response_status = self.get_cart_from_token() if cart_obj == None or not cart_obj.active: cart = Cart() cart.tax_percentage = 0.075 if self.request.user.is_authenticated(): cart.user = self.request.user cart.save() data = { "cart_id": str(cart.id) } self.create_token(data) cart_obj = cart return cart_obj def get(self, request, format=None): cart = self.get_cart() self.cart = cart self.update_cart() #token = self.create_token(cart.id) items = CartItemSerializer(cart.cartitem_set.all(), many=True) print cart.items.all() data = { "token": self.token, "cart" : cart.id, "total": cart.total, "subtotal": cart.subtotal, "tax_total": cart.tax_total, "count": cart.items.count(), "items": items.data, } return Response(data) if settings.DEBUG: braintree.Configuration.configure(braintree.Environment.Sandbox, merchant_id=settings.BRAINTREE_MERCHANT_ID, public_key=settings.BRAINTREE_PUBLIC, private_key=settings.BRAINTREE_PRIVATE) class ItemCountView(View): def get(self, request, *args, **kwargs): if request.is_ajax(): cart_id = self.request.session.get("cart_id") if cart_id == None: count = 0 else: cart = Cart.objects.get(id=cart_id) count = cart.items.count() request.session["cart_item_count"] = count return JsonResponse({"count": count}) else: raise Http404 class CartView(SingleObjectMixin, View): model = Cart template_name = "carts/view.html" def get_object(self, *args, **kwargs): self.request.session.set_expiry(0) #5 minutes cart_id = self.request.session.get("cart_id") if cart_id == None: cart = Cart() cart.tax_percentage = 0.075 cart.save() cart_id = cart.id self.request.session["cart_id"] = cart_id cart = Cart.objects.get(id=cart_id) if self.request.user.is_authenticated(): cart.user = self.request.user cart.save() return cart def get(self, request, *args, **kwargs): cart = self.get_object() item_id = request.GET.get("item") delete_item = request.GET.get("delete", False) flash_message = "" item_added = False if item_id: item_instance = get_object_or_404(Variation, id=item_id) qty = request.GET.get("qty", 1) try: if int(qty) < 1: delete_item = True except: raise Http404 cart_item, created = CartItem.objects.get_or_create(cart=cart, item=item_instance) if created: flash_message = "Successfully added to the cart" item_added = True if delete_item: flash_message = "Item removed successfully." cart_item.delete() else: if not created: flash_message = "Quantity has been updated successfully." cart_item.quantity = qty cart_item.save() if not request.is_ajax(): return HttpResponseRedirect(reverse("cart")) #return cart_item.cart.get_absolute_url() if request.is_ajax(): try: total = cart_item.line_item_total except: total = None try: subtotal = cart_item.cart.subtotal except: subtotal = None try: cart_total = cart_item.cart.total except: cart_total = None try: tax_total = cart_item.cart.tax_total except: tax_total = None try: total_items = cart_item.cart.items.count() except: total_items = 0 data = { "deleted": delete_item, "item_added": item_added, "line_total": total, "subtotal": subtotal, "cart_total": cart_total, "tax_total": tax_total, "flash_message": flash_message, "total_items": total_items } return JsonResponse(data) context = { "object": self.get_object() } template = self.template_name return render(request, template, context) class CheckoutView(CartOrderMixin, FormMixin, DetailView): model = Cart template_name = "carts/checkout_view.html" form_class = GuestCheckoutForm def get_object(self, *args, **kwargs): cart = self.get_cart() if cart == None: return None return cart def get_context_data(self, *args, **kwargs): context = super(CheckoutView, self).get_context_data(*args, **kwargs) user_can_continue = False user_check_id = self.request.session.get("user_checkout_id") if self.request.user.is_authenticated(): user_can_continue = True user_checkout, created = UserCheckout.objects.get_or_create(email=self.request.user.email) user_checkout.user = self.request.user user_checkout.save() context["client_token"] = user_checkout.get_client_token() self.request.session["user_checkout_id"] = user_checkout.id elif not self.request.user.is_authenticated() and user_check_id == None: context["login_form"] = AuthenticationForm() context["next_url"] = self.request.build_absolute_uri() else: pass if user_check_id != None: user_can_continue = True if not self.request.user.is_authenticated(): #GUEST USER user_checkout_2 = UserCheckout.objects.get(id=user_check_id) context["client_token"] = user_checkout_2.get_client_token() #if self.get_cart() is not None: context["order"] = self.get_order() context["user_can_continue"] = user_can_continue context["form"] = self.get_form() return context def post(self, request, *args, **kwargs): self.object = self.get_object() form = self.get_form() if form.is_valid(): email = form.cleaned_data.get("email") user_checkout, created = UserCheckout.objects.get_or_create(email=email) request.session["user_checkout_id"] = user_checkout.id return self.form_valid(form) else: return self.form_invalid(form) def get_success_url(self): return reverse("checkout") def get(self, request, *args, **kwargs): get_data = super(CheckoutView, self).get(request, *args, **kwargs) cart = self.get_object() if cart == None: return redirect("cart") new_order = self.get_order() user_checkout_id = request.session.get("user_checkout_id") if user_checkout_id != None: user_checkout = UserCheckout.objects.get(id=user_checkout_id) if new_order.billing_address == None or new_order.shipping_address == None: return redirect("order_address") new_order.user = user_checkout new_order.save() return get_data class CheckoutFinalView(CartOrderMixin, View): def post(self, request, *args, **kwargs): order = self.get_order() order_total = order.order_total nonce = request.POST.get("payment_method_nonce") if nonce: result = braintree.Transaction.sale({ "amount": order_total, "payment_method_nonce": nonce, "billing": { "postal_code": "%s" %(order.billing_address.zipcode), }, "options": { "submit_for_settlement": True } }) if result.is_success: #result.transaction.id to order order.mark_completed(order_id=result.transaction.id) messages.success(request, "Thank you for your order.") del request.session["cart_id"] del request.session["order_id"] else: #messages.success(request, "There was a problem with your order.") messages.success(request, "%s" %(result.message)) return redirect("checkout") return redirect("order_detail", pk=order.pk) def get(self, request, *args, **kwargs): return redirect("checkout")
mit
8l/beri
cheritest/trunk/tests/branch/test_raw_jalr.py
2
1991
#- # Copyright (c) 2011 Steven J. Murdoch # All rights reserved. # # This software was developed by SRI International and the University of # Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237 # ("CTSRD"), as part of the DARPA CRASH research programme. # # @BERI_LICENSE_HEADER_START@ # # Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor # license agreements. See the NOTICE file distributed with this work for # additional information regarding copyright ownership. BERI licenses this # file to you under the BERI Hardware-Software License, Version 1.0 (the # "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at: # # http://www.beri-open-systems.org/legal/license-1-0.txt # # Unless required by applicable law or agreed to in writing, Work distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # # @BERI_LICENSE_HEADER_END@ # from beritest_tools import BaseBERITestCase class test_raw_jalr(BaseBERITestCase): def test_before_jalr(self): self.assertRegisterEqual(self.MIPS.a0, 1, "instruction before jalr missed") def test_jalr_branch_delay(self): self.assertRegisterEqual(self.MIPS.a3, self.MIPS.a1, "instruction in branch-delay slot missed or register forwarding didn't work") def test_jalr_skipped(self): self.assertRegisterNotEqual(self.MIPS.a4, 4, "jump didn't happen") def test_jalr_target(self): self.assertRegisterEqual(self.MIPS.a5, 5, "instruction at jump target didn't run") def test_jalr_ra(self): self.assertRegisterEqual(self.MIPS.ra, 0, "ra improperly set after jalr") def test_jalr_reg(self): self.assertRegisterEqual(self.MIPS.a1, self.MIPS.a2, "a2 not set to return address by jalr")
apache-2.0
ticosax/django
tests/admin_changelist/tests.py
12
34783
from __future__ import unicode_literals import datetime from django.contrib import admin from django.contrib.admin.models import LogEntry from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.templatetags.admin_list import pagination from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase from django.contrib.admin.views.main import ALL_VAR, SEARCH_VAR, ChangeList from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.template import Context, Template from django.test import TestCase, override_settings from django.test.client import RequestFactory from django.utils import formats, six from .admin import ( BandAdmin, ChildAdmin, ChordsBandAdmin, CustomPaginationAdmin, CustomPaginator, DynamicListDisplayChildAdmin, DynamicListDisplayLinksChildAdmin, DynamicListFilterChildAdmin, DynamicSearchFieldsChildAdmin, FilteredChildAdmin, GroupAdmin, InvitationAdmin, NoListDisplayLinksParentAdmin, ParentAdmin, QuartetAdmin, SwallowAdmin, site as custom_site, ) from .models import ( Band, Child, ChordsBand, ChordsMusician, CustomIdUser, Event, Genre, Group, Invitation, Membership, Musician, OrderedObject, Parent, Quartet, Swallow, UnorderedObject, ) @override_settings(ROOT_URLCONF="admin_changelist.urls") class ChangeListTests(TestCase): def setUp(self): self.factory = RequestFactory() def _create_superuser(self, username): return User.objects.create(username=username, is_superuser=True) def _mocked_authenticated_request(self, url, user): request = self.factory.get(url) request.user = user return request def test_select_related_preserved(self): """ Regression test for #10348: ChangeList.get_queryset() shouldn't overwrite a custom select_related provided by ModelAdmin.get_queryset(). """ m = ChildAdmin(Child, admin.site) request = self.factory.get('/child/') list_select_related = m.get_list_select_related(request) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) self.assertEqual(cl.queryset.query.select_related, { 'parent': {'name': {}} }) def test_select_related_as_tuple(self): ia = InvitationAdmin(Invitation, admin.site) request = self.factory.get('/invitation/') list_select_related = ia.get_list_select_related(request) cl = ChangeList(request, Child, ia.list_display, ia.list_display_links, ia.list_filter, ia.date_hierarchy, ia.search_fields, list_select_related, ia.list_per_page, ia.list_max_show_all, ia.list_editable, ia) self.assertEqual(cl.queryset.query.select_related, {'player': {}}) def test_select_related_as_empty_tuple(self): ia = InvitationAdmin(Invitation, admin.site) ia.list_select_related = () request = self.factory.get('/invitation/') list_select_related = ia.get_list_select_related(request) cl = ChangeList(request, Child, ia.list_display, ia.list_display_links, ia.list_filter, ia.date_hierarchy, ia.search_fields, list_select_related, ia.list_per_page, ia.list_max_show_all, ia.list_editable, ia) self.assertEqual(cl.queryset.query.select_related, False) def test_get_select_related_custom_method(self): class GetListSelectRelatedAdmin(admin.ModelAdmin): list_display = ('band', 'player') def get_list_select_related(self, request): return ('band', 'player') ia = GetListSelectRelatedAdmin(Invitation, admin.site) request = self.factory.get('/invitation/') list_select_related = ia.get_list_select_related(request) cl = ChangeList(request, Child, ia.list_display, ia.list_display_links, ia.list_filter, ia.date_hierarchy, ia.search_fields, list_select_related, ia.list_per_page, ia.list_max_show_all, ia.list_editable, ia) self.assertEqual(cl.queryset.query.select_related, {'player': {}, 'band': {}}) def test_result_list_empty_changelist_value(self): """ Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored for relationship fields """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') m = ChildAdmin(Child, admin.site) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) cl = ChangeList(request, Child, list_display, list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = '<tbody><tr class="row1"><th class="field-name"><a href="%s">name</a></th><td class="field-parent nowrap">-</td></tr></tbody>' % link self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_html(self): """ Verifies that inclusion tag result_list generates a table when with default ModelAdmin settings. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') m = ChildAdmin(Child, admin.site) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) cl = ChangeList(request, Child, list_display, list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = '<tbody><tr class="row1"><th class="field-name"><a href="%s">name</a></th><td class="field-parent nowrap">Parent object</td></tr></tbody>' % link self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_editable_html(self): """ Regression tests for #11791: Inclusion tag result_list generates a table and this checks that the items are nested within the table element tags. Also a regression test for #13599, verifies that hidden fields when list_editable is enabled are rendered in a div outside the table. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') m = ChildAdmin(Child, admin.site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) FormSet = m.get_changelist_formset(request) cl.formset = FormSet(queryset=cl.result_list) template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl}) table_output = template.render(context) # make sure that hidden fields are in the correct place hiddenfields_div = '<div class="hiddenfields"><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></div>' % new_child.id self.assertInHTML(hiddenfields_div, table_output, msg_prefix='Failed to find hidden fields') # make sure that list editable fields are rendered in divs correctly editable_name_field = '<input name="form-0-name" value="name" class="vTextField" maxlength="30" type="text" id="id_form-0-name" />' self.assertInHTML('<td class="field-name">%s</td>' % editable_name_field, table_output, msg_prefix='Failed to find "name" list_editable field') def test_result_list_editable(self): """ Regression test for #14312: list_editable with pagination """ new_parent = Parent.objects.create(name='parent') for i in range(200): Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/', data={'p': -1}) # Anything outside range m = ChildAdmin(Child, admin.site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] self.assertRaises(IncorrectLookupParameters, lambda: ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)) def test_custom_paginator(self): new_parent = Parent.objects.create(name='parent') for i in range(200): Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/') m = CustomPaginationAdmin(Child, admin.site) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) self.assertIsInstance(cl.paginator, CustomPaginator) def test_distinct_for_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Basic ManyToMany. """ blues = Genre.objects.create(name='Blues') band = Band.objects.create(name='B.B. King Review', nr_of_members=11) band.genres.add(blues) band.genres.add(blues) m = BandAdmin(Band, admin.site) request = self.factory.get('/band/', data={'genres': blues.pk}) cl = ChangeList(request, Band, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) def test_distinct_for_through_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. With an intermediate model. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = GroupAdmin(Group, admin.site) request = self.factory.get('/group/', data={'members': lead.pk}) cl = ChangeList(request, Group, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) def test_distinct_for_inherited_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Model managed in the admin inherits from the one that defins the relationship. """ lead = Musician.objects.create(name='John') four = Quartet.objects.create(name='The Beatles') Membership.objects.create(group=four, music=lead, role='lead voice') Membership.objects.create(group=four, music=lead, role='guitar player') m = QuartetAdmin(Quartet, admin.site) request = self.factory.get('/quartet/', data={'members': lead.pk}) cl = ChangeList(request, Quartet, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one Quartet instance self.assertEqual(cl.result_count, 1) def test_distinct_for_m2m_to_inherited_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Target of the relationship inherits from another. """ lead = ChordsMusician.objects.create(name='Player A') three = ChordsBand.objects.create(name='The Chords Trio') Invitation.objects.create(band=three, player=lead, instrument='guitar') Invitation.objects.create(band=three, player=lead, instrument='bass') m = ChordsBandAdmin(ChordsBand, admin.site) request = self.factory.get('/chordsband/', data={'members': lead.pk}) cl = ChangeList(request, ChordsBand, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) cl.get_results(request) # There's only one ChordsBand instance self.assertEqual(cl.result_count, 1) def test_distinct_for_non_unique_related_object_in_list_filter(self): """ Regressions tests for #15819: If a field listed in list_filters is a non-unique related object, distinct() must be called. """ parent = Parent.objects.create(name='Mary') # Two children with the same name Child.objects.create(parent=parent, name='Daniel') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, admin.site) request = self.factory.get('/parent/', data={'child__name': 'Daniel'}) cl = ChangeList(request, Parent, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) # Make sure distinct() was called self.assertEqual(cl.queryset.count(), 1) def test_distinct_for_non_unique_related_object_in_search_fields(self): """ Regressions tests for #15819: If a field listed in search_fields is a non-unique related object, distinct() must be called. """ parent = Parent.objects.create(name='Mary') Child.objects.create(parent=parent, name='Danielle') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, admin.site) request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'}) cl = ChangeList(request, Parent, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) # Make sure distinct() was called self.assertEqual(cl.queryset.count(), 1) def test_pagination(self): """ Regression tests for #12893: Pagination in admins changelist doesn't use queryset set by modeladmin. """ parent = Parent.objects.create(name='anything') for i in range(30): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) request = self.factory.get('/child/') # Test default queryset m = ChildAdmin(Child, admin.site) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) self.assertEqual(cl.queryset.count(), 60) self.assertEqual(cl.paginator.count, 60) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3, 4, 5, 6]) # Test custom queryset m = FilteredChildAdmin(Child, admin.site) cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) self.assertEqual(cl.queryset.count(), 30) self.assertEqual(cl.paginator.count, 30) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3]) def test_computed_list_display_localization(self): """ Regression test for #13196: output of functions should be localized in the changelist. """ User.objects.create_superuser( username='super', email='super@localhost', password='secret') self.client.login(username='super', password='secret') event = Event.objects.create(date=datetime.date.today()) response = self.client.get(reverse('admin:admin_changelist_event_changelist')) self.assertContains(response, formats.localize(event.date)) self.assertNotContains(response, six.text_type(event.date)) def test_dynamic_list_display(self): """ Regression tests for #14206: dynamic list_display support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertNotContains(response, 'Parent object') list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ['name', 'age']) self.assertEqual(list_display_links, ['name']) # Test with user 'parents' m = DynamicListDisplayChildAdmin(Child, admin.site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') custom_site.unregister(Child) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['parent']) # Test default implementation custom_site.register(Child, ChildAdmin) m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') def test_show_all(self): parent = Parent.objects.create(name='anything') for i in range(30): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) # Add "show all" parameter to request request = self.factory.get('/child/', data={ALL_VAR: ''}) # Test valid "show all" request (number of total objects is under max) m = ChildAdmin(Child, admin.site) # 200 is the max we'll pass to ChangeList cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, 200, m.list_editable, m) cl.get_results(request) self.assertEqual(len(cl.result_list), 60) # Test invalid "show all" request (number of total objects over max) # falls back to paginated pages m = ChildAdmin(Child, admin.site) # 30 is the max we'll pass to ChangeList for this test cl = ChangeList(request, Child, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, 30, m.list_editable, m) cl.get_results(request) self.assertEqual(len(cl.result_list), 10) def test_dynamic_list_display_links(self): """ Regression tests for #16257: dynamic list_display_links support. """ parent = Parent.objects.create(name='parent') for i in range(1, 10): Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i) m = DynamicListDisplayLinksChildAdmin(Child, admin.site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/child/', superuser) response = m.changelist_view(request) for i in range(1, 10): link = reverse('admin:admin_changelist_child_change', args=(i,)) self.assertContains(response, '<a href="%s">%s</a>' % (link, i)) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['age']) def test_no_list_display_links(self): """#15185 -- Allow no links from the 'change list' view grid.""" p = Parent.objects.create(name='parent') m = NoListDisplayLinksParentAdmin(Parent, admin.site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/parent/', superuser) response = m.changelist_view(request) link = reverse('admin:admin_changelist_parent_change', args=(p.pk,)) self.assertNotContains(response, '<a href="%s">' % link) def test_tuple_list_display(self): """ Regression test for #17128 (ChangeList failing under Python 2.5 after r16319) """ swallow = Swallow.objects.create( origin='Africa', load='12.34', speed='22.2') model_admin = SwallowAdmin(Swallow, admin.site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/swallow/', superuser) response = model_admin.changelist_view(request) # just want to ensure it doesn't blow up during rendering self.assertContains(response, six.text_type(swallow.origin)) self.assertContains(response, six.text_type(swallow.load)) self.assertContains(response, six.text_type(swallow.speed)) def test_deterministic_order_for_unordered_model(self): """ Ensure that the primary key is systematically used in the ordering of the changelist's results to guarantee a deterministic order, even when the Model doesn't have any default ordering defined. Refs #17198. """ superuser = self._create_superuser('superuser') for counter in range(1, 51): UnorderedObject.objects.create(id=counter, bool=True) class UnorderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): admin.site.register(UnorderedObject, UnorderedObjectAdmin) model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site) counter = 0 if ascending else 51 for page in range(0, 5): request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) admin.site.unregister(UnorderedObject) # When no order is defined at all, everything is ordered by '-pk'. check_results_order() # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. UnorderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. UnorderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order() UnorderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order(ascending=True) UnorderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order() UnorderedObjectAdmin.ordering = ['id', 'bool'] check_results_order(ascending=True) def test_deterministic_order_for_model_ordered_by_its_manager(self): """ Ensure that the primary key is systematically used in the ordering of the changelist's results to guarantee a deterministic order, even when the Model has a manager that defines a default ordering. Refs #17198. """ superuser = self._create_superuser('superuser') for counter in range(1, 51): OrderedObject.objects.create(id=counter, bool=True, number=counter) class OrderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): admin.site.register(OrderedObject, OrderedObjectAdmin) model_admin = OrderedObjectAdmin(OrderedObject, admin.site) counter = 0 if ascending else 51 for page in range(0, 5): request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) admin.site.unregister(OrderedObject) # When no order is defined at all, use the model's default ordering (i.e. 'number') check_results_order(ascending=True) # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. OrderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. OrderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order() OrderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order(ascending=True) OrderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order() OrderedObjectAdmin.ordering = ['id', 'bool'] check_results_order(ascending=True) def test_dynamic_list_filter(self): """ Regression tests for ticket #17646: dynamic list_filter support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = DynamicListFilterChildAdmin(Child, admin.site) request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].list_filter, ['name', 'age']) # Test with user 'parents' m = DynamicListFilterChildAdmin(Child, admin.site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].list_filter, ('parent', 'name', 'age')) def test_dynamic_search_fields(self): child = self._create_superuser('child') m = DynamicSearchFieldsChildAdmin(Child, admin.site) request = self._mocked_authenticated_request('/child/', child) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].search_fields, ('name', 'age')) def test_pagination_page_range(self): """ Regression tests for ticket #15653: ensure the number of pages generated for changelist views are correct. """ # instantiating and setting up ChangeList object m = GroupAdmin(Group, admin.site) request = self.factory.get('/group/') cl = ChangeList(request, Group, m.list_display, m.list_display_links, m.list_filter, m.date_hierarchy, m.search_fields, m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m) per_page = cl.list_per_page = 10 for page_num, objects_count, expected_page_range in [ (0, per_page, []), (0, per_page * 2, list(range(2))), (5, per_page * 11, list(range(11))), (5, per_page * 12, [0, 1, 2, 3, 4, 5, 6, 7, 8, '.', 10, 11]), (6, per_page * 12, [0, 1, '.', 3, 4, 5, 6, 7, 8, 9, 10, 11]), (6, per_page * 13, [0, 1, '.', 3, 4, 5, 6, 7, 8, 9, '.', 11, 12]), ]: # assuming we have exactly `objects_count` objects Group.objects.all().delete() for i in range(objects_count): Group.objects.create(name='test band') # setting page number and calculating page range cl.page_num = page_num cl.get_results(request) real_page_range = pagination(cl)['page_range'] self.assertListEqual( expected_page_range, list(real_page_range), ) class AdminLogNodeTestCase(TestCase): def test_get_admin_log_templatetag_custom_user(self): """ Regression test for ticket #20088: admin log depends on User model having id field as primary key. The old implementation raised an AttributeError when trying to use the id field. """ context = Context({'user': CustomIdUser()}) template_string = '{% load log %}{% get_admin_log 10 as admin_log for_user user %}' template = Template(template_string) # Rendering should be u'' since this templatetag just logs, # it doesn't render any string. self.assertEqual(template.render(context), '') def test_get_admin_log_templatetag_no_user(self): """ The {% get_admin_log %} tag should work without specifying a user. """ user = User(username='jondoe', password='secret', email='[email protected]') user.save() ct = ContentType.objects.get_for_model(User) LogEntry.objects.log_action(user.pk, ct.pk, user.pk, repr(user), 1) t = Template( '{% load log %}' '{% get_admin_log 100 as admin_log %}' '{% for entry in admin_log %}' '{{ entry|safe }}' '{% endfor %}' ) self.assertEqual(t.render(Context({})), 'Added "<User: jondoe>".') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF="admin_changelist.urls") class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase): available_apps = ['admin_changelist'] + AdminSeleniumWebDriverTestCase.available_apps webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver' def setUp(self): # password = "secret" User.objects.create( pk=100, username='super', first_name='Super', last_name='User', email='[email protected]', password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True, is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10) ) def test_add_row_selection(self): """ Ensure that the status line for selected rows gets updated correcly (#22038) """ self.admin_login(username='super', password='secret') self.selenium.get('%s%s' % (self.live_server_url, reverse('admin:auth_user_changelist'))) form_id = '#changelist-form' # Test amount of rows in the Changelist rows = self.selenium.find_elements_by_css_selector( '%s #result_list tbody tr' % form_id) self.assertEqual(len(rows), 1) # Test current selection selection_indicator = self.selenium.find_element_by_css_selector( '%s .action-counter' % form_id) self.assertEqual(selection_indicator.text, "0 of 1 selected") # Select a row and check again row_selector = self.selenium.find_element_by_css_selector( '%s #result_list tbody tr:first-child .action-select' % form_id) row_selector.click() self.assertEqual(selection_indicator.text, "1 of 1 selected") class SeleniumChromeTests(SeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver' class SeleniumIETests(SeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
bsd-3-clause
sv-dev1/odoo
addons/purchase/wizard/purchase_order_group.py
376
3379
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from openerp.osv import fields, osv from openerp.tools.translate import _ class purchase_order_group(osv.osv_memory): _name = "purchase.order.group" _description = "Purchase Order Merge" def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): """ Changes the view dynamically @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view. """ if context is None: context={} res = super(purchase_order_group, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False) if context.get('active_model','') == 'purchase.order' and len(context['active_ids']) < 2: raise osv.except_osv(_('Warning!'), _('Please select multiple order to merge in the list view.')) return res def merge_orders(self, cr, uid, ids, context=None): """ To merge similar type of purchase orders. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: the ID or list of IDs @param context: A standard dictionary @return: purchase order view """ order_obj = self.pool.get('purchase.order') proc_obj = self.pool.get('procurement.order') mod_obj =self.pool.get('ir.model.data') if context is None: context = {} result = mod_obj._get_id(cr, uid, 'purchase', 'view_purchase_order_filter') id = mod_obj.read(cr, uid, result, ['res_id']) allorders = order_obj.do_merge(cr, uid, context.get('active_ids',[]), context) return { 'domain': "[('id','in', [" + ','.join(map(str, allorders.keys())) + "])]", 'name': _('Purchase Orders'), 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'purchase.order', 'view_id': False, 'type': 'ir.actions.act_window', 'search_view_id': id['res_id'] } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
elventear/ansible
lib/ansible/modules/cloud/google/gce.py
16
26956
#!/usr/bin/python # Copyright 2013 Google Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: gce version_added: "1.4" short_description: create or terminate GCE instances description: - Creates or terminates Google Compute Engine (GCE) instances. See U(https://cloud.google.com/compute) for an overview. Full install/configuration instructions for the gce* modules can be found in the comments of ansible/test/gce_tests.py. options: image: description: - image string to use for the instance (default will follow latest stable debian image) required: false default: "debian-8" instance_names: description: - a comma-separated list of instance names to create or destroy required: false default: null machine_type: description: - machine type to use for the instance, use 'n1-standard-1' by default required: false default: "n1-standard-1" metadata: description: - a hash/dictionary of custom data for the instance; '{"key":"value", ...}' required: false default: null service_account_email: version_added: "1.5.1" description: - service account email required: false default: null service_account_permissions: version_added: "2.0" description: - service account permissions (see U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create), --scopes section for detailed information) required: false default: null choices: [ "bigquery", "cloud-platform", "compute-ro", "compute-rw", "useraccounts-ro", "useraccounts-rw", "datastore", "logging-write", "monitoring", "sql-admin", "storage-full", "storage-ro", "storage-rw", "taskqueue", "userinfo-email" ] pem_file: version_added: "1.5.1" description: - path to the pem file associated with the service account email This option is deprecated. Use 'credentials_file'. required: false default: null credentials_file: version_added: "2.1.0" description: - path to the JSON file associated with the service account email default: null required: false project_id: version_added: "1.5.1" description: - your GCE project ID required: false default: null name: description: - either a name of a single instance or when used with 'num_instances', the base name of a cluster of nodes required: false aliases: ['base_name'] num_instances: description: - can be used with 'name', specifies the number of nodes to provision using 'name' as a base name required: false version_added: "2.3" network: description: - name of the network, 'default' will be used if not specified required: false default: "default" subnetwork: description: - name of the subnetwork in which the instance should be created required: false default: null version_added: "2.2" persistent_boot_disk: description: - if set, create the instance with a persistent boot disk required: false default: "false" disks: description: - a list of persistent disks to attach to the instance; a string value gives the name of the disk; alternatively, a dictionary value can define 'name' and 'mode' ('READ_ONLY' or 'READ_WRITE'). The first entry will be the boot disk (which must be READ_WRITE). required: false default: null version_added: "1.7" state: description: - desired state of the resource required: false default: "present" choices: ["active", "present", "absent", "deleted", "started", "stopped", "terminated"] tags: description: - a comma-separated list of tags to associate with the instance required: false default: null zone: description: - the GCE zone to use required: true default: "us-central1-a" ip_forward: version_added: "1.9" description: - set to true if the instance can forward ip packets (useful for gateways) required: false default: "false" external_ip: version_added: "1.9" description: - type of external ip, ephemeral by default; alternatively, a fixed gce ip or ip name can be given. Specify 'none' if no external ip is desired. required: false default: "ephemeral" disk_auto_delete: version_added: "1.9" description: - if set boot disk will be removed after instance destruction required: false default: "true" preemptible: version_added: "2.1" description: - if set to true, instances will be preemptible and time-limited. (requires libcloud >= 0.20.0) required: false default: "false" disk_size: description: - The size of the boot disk created for this instance (in GB) required: false default: 10 version_added: "2.3" requirements: - "python >= 2.6" - "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials, >= 0.20.0 if using preemptible option" notes: - Either I(instance_names) or I(name) is required. - JSON credentials strongly preferred. author: "Eric Johnson (@erjohnso) <[email protected]>, Tom Melendez (@supertom) <[email protected]>" ''' EXAMPLES = ''' # Basic provisioning example. Create a single Debian 8 instance in the # us-central1-a Zone of the n1-standard-1 machine type. # Create multiple instances by specifying multiple names, seperated by # commas in the instance_names field # (e.g. my-test-instance1,my-test-instance2) gce: instance_names: my-test-instance1 zone: us-central1-a machine_type: n1-standard-1 image: debian-8 state: present service_account_email: "[email protected]" credentials_file: "/path/to/your-key.json" project_id: "your-project-name" disk_size: 32 # Create a single Debian 8 instance in the us-central1-a Zone # Use existing disks, custom network/subnetwork, set service account permissions # add tags and metadata. gce: instance_names: my-test-instance zone: us-central1-a machine_type: n1-standard-1 state: present metadata: '{"db":"postgres", "group":"qa", "id":500}' tags: - http-server - my-other-tag disks: - name: disk-2 mode: READ_WRITE - name: disk-3 mode: READ_ONLY disk_auto_delete: false network: foobar-network subnetwork: foobar-subnetwork-1 preemptible: true ip_forward: true service_account_permissions: - storage-full - taskqueue - bigquery service_account_email: "[email protected]" credentials_file: "/path/to/your-key.json" project_id: "your-project-name" --- # Example Playbook - name: Compute Engine Instance Examples hosts: localhost vars: service_account_email: "[email protected]" credentials_file: "/path/to/your-key.json" project_id: "your-project-name" tasks: - name: create multiple instances # Basic provisioning example. Create multiple Debian 8 instances in the # us-central1-a Zone of n1-standard-1 machine type. gce: instance_names: test1,test2,test3 zone: us-central1-a machine_type: n1-standard-1 image: debian-8 state: present service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" metadata : '{ "startup-script" : "apt-get update" }' register: gce - name: Save host data add_host: hostname: "{{ item.public_ip }}" groupname: gce_instances_ips with_items: "{{ gce.instance_data }}" - name: Wait for SSH for instances wait_for: delay: 1 host: "{{ item.public_ip }}" port: 22 state: started timeout: 30 with_items: "{{ gce.instance_data }}" - name: Configure Hosts hosts: gce_instances_ips become: yes become_method: sudo roles: - my-role-one - my-role-two tags: - config - name: delete test-instances # Basic termination of instance. gce: service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" instance_names: "{{ gce.instance_names }}" zone: us-central1-a state: absent tags: - delete ''' import socket try: import libcloud from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver from libcloud.common.google import GoogleBaseError, QuotaExceededError, \ ResourceExistsError, ResourceInUseError, ResourceNotFoundError from libcloud.compute.drivers.gce import GCEAddress _ = Provider.GCE HAS_LIBCLOUD = True except ImportError: HAS_LIBCLOUD = False try: from ast import literal_eval HAS_PYTHON26 = True except ImportError: HAS_PYTHON26 = False def get_instance_info(inst): """Retrieves instance information from an instance object and returns it as a dictionary. """ metadata = {} if 'metadata' in inst.extra and 'items' in inst.extra['metadata']: for md in inst.extra['metadata']['items']: metadata[md['key']] = md['value'] try: netname = inst.extra['networkInterfaces'][0]['network'].split('/')[-1] except: netname = None try: subnetname = inst.extra['networkInterfaces'][0]['subnetwork'].split('/')[-1] except: subnetname = None if 'disks' in inst.extra: disk_names = [disk_info['source'].split('/')[-1] for disk_info in sorted(inst.extra['disks'], key=lambda disk_info: disk_info['index'])] else: disk_names = [] if len(inst.public_ips) == 0: public_ip = None else: public_ip = inst.public_ips[0] return({ 'image': inst.image is not None and inst.image.split('/')[-1] or None, 'disks': disk_names, 'machine_type': inst.size, 'metadata': metadata, 'name': inst.name, 'network': netname, 'subnetwork': subnetname, 'private_ip': inst.private_ips[0], 'public_ip': public_ip, 'status': ('status' in inst.extra) and inst.extra['status'] or None, 'tags': ('tags' in inst.extra) and inst.extra['tags'] or [], 'zone': ('zone' in inst.extra) and inst.extra['zone'].name or None, }) def create_instances(module, gce, instance_names, number): """Creates new instances. Attributes other than instance_names are picked up from 'module' module : AnsibleModule object gce: authenticated GCE libcloud driver instance_names: python list of instance names to create Returns: A list of dictionaries with instance information about the instances that were launched. """ image = module.params.get('image') machine_type = module.params.get('machine_type') metadata = module.params.get('metadata') network = module.params.get('network') subnetwork = module.params.get('subnetwork') persistent_boot_disk = module.params.get('persistent_boot_disk') disks = module.params.get('disks') state = module.params.get('state') tags = module.params.get('tags') zone = module.params.get('zone') ip_forward = module.params.get('ip_forward') external_ip = module.params.get('external_ip') disk_auto_delete = module.params.get('disk_auto_delete') preemptible = module.params.get('preemptible') disk_size = module.params.get('disk_size') service_account_permissions = module.params.get('service_account_permissions') service_account_email = module.params.get('service_account_email') if external_ip == "none": instance_external_ip = None elif external_ip != "ephemeral": instance_external_ip = external_ip try: # check if instance_external_ip is an ip or a name try: socket.inet_aton(instance_external_ip) instance_external_ip = GCEAddress(id='unknown', name='unknown', address=instance_external_ip, region='unknown', driver=gce) except socket.error: instance_external_ip = gce.ex_get_address(instance_external_ip) except GoogleBaseError as e: module.fail_json(msg='Unexpected error attempting to get a static ip %s, error: %s' % (external_ip, e.value)) else: instance_external_ip = external_ip new_instances = [] changed = False lc_disks = [] disk_modes = [] for i, disk in enumerate(disks or []): if isinstance(disk, dict): lc_disks.append(gce.ex_get_volume(disk['name'])) disk_modes.append(disk['mode']) else: lc_disks.append(gce.ex_get_volume(disk)) # boot disk is implicitly READ_WRITE disk_modes.append('READ_ONLY' if i > 0 else 'READ_WRITE') lc_network = gce.ex_get_network(network) lc_machine_type = gce.ex_get_size(machine_type) lc_zone = gce.ex_get_zone(zone) # Try to convert the user's metadata value into the format expected # by GCE. First try to ensure user has proper quoting of a # dictionary-like syntax using 'literal_eval', then convert the python # dict into a python list of 'key' / 'value' dicts. Should end up # with: # [ {'key': key1, 'value': value1}, {'key': key2, 'value': value2}, ...] if metadata: if isinstance(metadata, dict): md = metadata else: try: md = literal_eval(str(metadata)) if not isinstance(md, dict): raise ValueError('metadata must be a dict') except ValueError as e: module.fail_json(msg='bad metadata: %s' % str(e)) except SyntaxError as e: module.fail_json(msg='bad metadata syntax') if hasattr(libcloud, '__version__') and libcloud.__version__ < '0.15': items = [] for k, v in md.items(): items.append({"key": k, "value": v}) metadata = {'items': items} else: metadata = md lc_image = LazyDiskImage(module, gce, image, lc_disks) ex_sa_perms = [] bad_perms = [] if service_account_permissions: for perm in service_account_permissions: if perm not in gce.SA_SCOPES_MAP: bad_perms.append(perm) if len(bad_perms) > 0: module.fail_json(msg='bad permissions: %s' % str(bad_perms)) ex_sa_perms.append({'email': "default"}) ex_sa_perms[0]['scopes'] = service_account_permissions # These variables all have default values but check just in case if not lc_network or not lc_machine_type or not lc_zone: module.fail_json(msg='Missing required create instance variable', changed=False) gce_args = dict( location=lc_zone, ex_network=network, ex_tags=tags, ex_metadata=metadata, ex_can_ip_forward=ip_forward, external_ip=instance_external_ip, ex_disk_auto_delete=disk_auto_delete, ex_service_accounts=ex_sa_perms ) if preemptible is not None: gce_args['ex_preemptible'] = preemptible if subnetwork is not None: gce_args['ex_subnetwork'] = subnetwork if isinstance(instance_names, str) and not number: instance_names = [instance_names] if isinstance(instance_names, str) and number: instance_responses = gce.ex_create_multiple_nodes(instance_names, lc_machine_type, lc_image(), number, **gce_args) for resp in instance_responses: n = resp if isinstance(resp, libcloud.compute.drivers.gce.GCEFailedNode): try: n = gce.ex_get_node(n.name, lc_zone) except ResourceNotFoundError: pass else: # Assure that at least one node has been created to set changed=True changed = True new_instances.append(n) else: for instance in instance_names: pd = None if lc_disks: pd = lc_disks[0] elif persistent_boot_disk: try: pd = gce.ex_get_volume("%s" % instance, lc_zone) except ResourceNotFoundError: pd = gce.create_volume(disk_size, "%s" % instance, image=lc_image()) gce_args['ex_boot_disk'] = pd inst = None try: inst = gce.ex_get_node(instance, lc_zone) except ResourceNotFoundError: inst = gce.create_node( instance, lc_machine_type, lc_image(), **gce_args ) changed = True except GoogleBaseError as e: module.fail_json(msg='Unexpected error attempting to create ' + 'instance %s, error: %s' % (instance, e.value)) if inst: new_instances.append(inst) for inst in new_instances: for i, lc_disk in enumerate(lc_disks): # Check whether the disk is already attached if (len(inst.extra['disks']) > i): attached_disk = inst.extra['disks'][i] if attached_disk['source'] != lc_disk.extra['selfLink']: module.fail_json( msg=("Disk at index %d does not match: requested=%s found=%s" % ( i, lc_disk.extra['selfLink'], attached_disk['source']))) elif attached_disk['mode'] != disk_modes[i]: module.fail_json( msg=("Disk at index %d is in the wrong mode: requested=%s found=%s" % ( i, disk_modes[i], attached_disk['mode']))) else: continue gce.attach_volume(inst, lc_disk, ex_mode=disk_modes[i]) # Work around libcloud bug: attached volumes don't get added # to the instance metadata. get_instance_info() only cares about # source and index. if len(inst.extra['disks']) != i+1: inst.extra['disks'].append( {'source': lc_disk.extra['selfLink'], 'index': i}) instance_names = [] instance_json_data = [] for inst in new_instances: d = get_instance_info(inst) instance_names.append(d['name']) instance_json_data.append(d) return (changed, instance_json_data, instance_names) def change_instance_state(module, gce, instance_names, number, zone_name, state): """Changes the state of a list of instances. For example, change from started to stopped, or started to absent. module: Ansible module object gce: authenticated GCE connection object instance_names: a list of instance names to terminate zone_name: the zone where the instances reside prior to termination state: 'state' parameter passed into module as argument Returns a dictionary of instance names that were changed. """ changed = False nodes = [] state_instance_names = [] if isinstance(instance_names, str) and number: node_names = ['%s-%03d' % (instance_names, i) for i in range(number)] elif isinstance(instance_names, str) and not number: node_names = [instance_names] else: node_names = instance_names for name in node_names: inst = None try: inst = gce.ex_get_node(name, zone_name) except ResourceNotFoundError: state_instance_names.append(name) except Exception as e: module.fail_json(msg=unexpected_error_msg(e), changed=False) else: nodes.append(inst) state_instance_names.append(name) if state in ['absent', 'deleted'] and number: changed_nodes = gce.ex_destroy_multiple_nodes(nodes) or [False] changed = reduce(lambda x, y: x or y, changed_nodes) else: for node in nodes: if state in ['absent', 'deleted']: gce.destroy_node(node) changed = True elif state == 'started' and \ node.state == libcloud.compute.types.NodeState.STOPPED: gce.ex_start_node(node) changed = True elif state in ['stopped', 'terminated'] and \ node.state == libcloud.compute.types.NodeState.RUNNING: gce.ex_stop_node(node) changed = True return (changed, state_instance_names) def main(): module = AnsibleModule( argument_spec = dict( image = dict(default='debian-8'), instance_names = dict(), machine_type = dict(default='n1-standard-1'), metadata = dict(), name = dict(aliases=['base_name']), num_instances = dict(type='int'), network = dict(default='default'), subnetwork = dict(), persistent_boot_disk = dict(type='bool', default=False), disks = dict(type='list'), state = dict(choices=['active', 'present', 'absent', 'deleted', 'started', 'stopped', 'terminated'], default='present'), tags = dict(type='list'), zone = dict(default='us-central1-a'), service_account_email = dict(), service_account_permissions = dict(type='list'), pem_file = dict(type='path'), credentials_file = dict(type='path'), project_id = dict(), ip_forward = dict(type='bool', default=False), external_ip=dict(default='ephemeral'), disk_auto_delete = dict(type='bool', default=True), disk_size = dict(type='int', default=10), preemptible = dict(type='bool', default=None), ), mutually_exclusive=[('instance_names', 'name')] ) if not HAS_PYTHON26: module.fail_json(msg="GCE module requires python's 'ast' module, python v2.6+") if not HAS_LIBCLOUD: module.fail_json(msg='libcloud with GCE support (0.17.0+) required for this module') gce = gce_connect(module) image = module.params.get('image') instance_names = module.params.get('instance_names') machine_type = module.params.get('machine_type') metadata = module.params.get('metadata') name = module.params.get('name') number = module.params.get('num_instances') network = module.params.get('network') subnetwork = module.params.get('subnetwork') persistent_boot_disk = module.params.get('persistent_boot_disk') state = module.params.get('state') tags = module.params.get('tags') zone = module.params.get('zone') ip_forward = module.params.get('ip_forward') preemptible = module.params.get('preemptible') changed = False inames = None if isinstance(instance_names, list): inames = instance_names elif isinstance(instance_names, str): inames = instance_names.split(',') if name: inames = name if not inames: module.fail_json(msg='Must specify a "name" or "instance_names"', changed=False) if not zone: module.fail_json(msg='Must specify a "zone"', changed=False) if preemptible is not None and hasattr(libcloud, '__version__') and libcloud.__version__ < '0.20': module.fail_json(msg="Apache Libcloud 0.20.0+ is required to use 'preemptible' option", changed=False) if subnetwork is not None and not hasattr(gce, 'ex_get_subnetwork'): module.fail_json(msg="Apache Libcloud 1.0.0+ is required to use 'subnetwork' option", changed=False) json_output = {'zone': zone} if state in ['absent', 'deleted', 'started', 'stopped', 'terminated']: json_output['state'] = state (changed, state_instance_names) = change_instance_state( module, gce, inames, number, zone, state) # based on what user specified, return the same variable, although # value could be different if an instance could not be destroyed if instance_names or name and number: json_output['instance_names'] = state_instance_names elif name: json_output['name'] = name elif state in ['active', 'present']: json_output['state'] = 'present' (changed, instance_data, instance_name_list) = create_instances( module, gce, inames, number) json_output['instance_data'] = instance_data if instance_names: json_output['instance_names'] = instance_name_list elif name: json_output['name'] = name json_output['changed'] = changed module.exit_json(**json_output) class LazyDiskImage: """ Object for lazy instantiation of disk image gce.ex_get_image is a very expensive call, so we want to avoid calling it as much as possible. """ def __init__(self, module, gce, name, has_pd): self.image = None self.was_called = False self.gce = gce self.name = name self.has_pd = has_pd self.module = module def __call__(self): if not self.was_called: self.was_called = True if not self.has_pd: self.image = self.gce.ex_get_image(self.name) if not self.image: self.module.fail_json(msg='image or disks missing for create instance', changed=False) return self.image # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.gce import * if __name__ == '__main__': main()
gpl-3.0
tangentlabs/django-oscar-parachute
parachute/oscommerce/models.py
1
33421
# This is an auto-generated Django model module. # You'll have to do the following manually to clean this up: # * Rearrange models' order # * Make sure each model has one field with primary_key=True # Feel free to rename the models, but don't rename db_table values or field names. # # Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]' # into your database. from django.db import models class AddressBook(models.Model): id = models.IntegerField(db_column='address_book_id', primary_key=True) customer = models.ForeignKey('Customer', db_column="customers_id", related_name='addresses') gender = models.CharField(db_column="entry_gender", max_length=3) company = models.CharField(db_column="entry_company", max_length=96, blank=True) firstname = models.CharField(db_column="entry_firstname", max_length=96) lastname = models.CharField(db_column="entry_lastname", max_length=96) street_address = models.CharField(db_column="entry_street_address", max_length=192) suburb = models.CharField(db_column="entry_suburb", max_length=96, blank=True) postcode = models.CharField(db_column="entry_postcode", max_length=30) city = models.CharField(db_column="entry_city", max_length=96) state = models.CharField(db_column="entry_state", max_length=96, blank=True) country = models.ForeignKey('Country', db_column="entry_country_id") zone = models.ForeignKey('Zone', db_column="entry_zone_id") def __unicode__(self): return "%s %s, %s, %s %s %s, %s" % ( self.firstname, self.lastname, self.street_address, self.city, self.state, self.postcode, self.country ) class Meta: managed=False db_table = u'address_book' class Zone(models.Model): """ States in different countries """ id = models.IntegerField(db_column="zone_id", primary_key=True) country = models.ForeignKey('Country', db_column="zone_country_id") code = models.CharField(db_column="zone_code", max_length=96) name = models.CharField(db_column="zone_name", max_length=96) class Meta: managed=False db_table = u'zones' class Customer(models.Model): id = models.IntegerField(db_column="customers_id", primary_key=True) gender = models.CharField(db_column="customers_gender", max_length=3) firstname = models.CharField(db_column="customers_firstname", max_length=96) lastname = models.CharField(db_column="customers_lastname", max_length=96) dob = models.DateTimeField(db_column="customers_dob", ) email = models.CharField(db_column="customers_email_address", max_length=288) telephone = models.CharField(db_column="customers_telephone", max_length=96) fax = models.CharField(db_column="customers_fax", max_length=96, blank=True) password = models.CharField(db_column="customers_password", max_length=120) newsletter = models.CharField(db_column="customers_newsletter", max_length=3, blank=True) view_category = models.IntegerField(db_column="customers_view_category", ) default_address_id = models.IntegerField( db_column="customers_default_address_id", null=True, blank=True, ) def __unicode__(self): return "%s, %s" % (self.lastname, self.firstname) class Meta: managed=False db_table = u'customers' class AddressFormat(models.Model): id = models.IntegerField(db_column="address_format_id", primary_key=True) format = models.CharField(db_column="address_format", max_length=384) summary = models.CharField(db_column="address_summary", max_length=144) def __unicode__(self): return self.format class Meta: managed=False db_table = u'address_format' class Country(models.Model): id = models.IntegerField(db_column="countries_id", primary_key=True) name = models.CharField(db_column="countries_name", max_length=192) iso_code_2 = models.CharField(db_column="countries_iso_code_2", max_length=6) iso_code_3 = models.CharField(db_column="countries_iso_code_3", max_length=9) address_format = models.ForeignKey('AddressFormat', db_column="address_format_id") def __unicode__(self): return "%s (%s)" % (self.name, self.iso_code_3) class Meta: managed=False db_table = u'countries' class CustomerInfo(models.Model): customer = models.OneToOneField( 'Customer', db_column="customers_info_id", primary_key=True, related_name='info' ) date_of_last_logon = models.DateTimeField( db_column="customers_info_date_of_last_logon", null=True, blank=True ) number_of_logons = models.IntegerField( db_column="customers_info_number_of_logons", null=True, blank=True ) date_account_created = models.DateTimeField( db_column="customers_info_date_account_created", null=True, blank=True ) date_account_last_modified = models.DateTimeField( db_column="customers_info_date_account_last_modified", null=True, blank=True ) global_product_notifications = models.IntegerField( db_column="global_product_notifications", null=True, blank=True ) def __unicode__(self): return "Info for %s, %s" % ( self.customer.lastname, self.customer.firstname ) class Meta: managed=False db_table = u'customers_info' class Order(models.Model): id = models.IntegerField(db_column="orders_id", primary_key=True) customer = models.ForeignKey('Customer', db_column="customers_id") customers_name = models.CharField(db_column="customers_name", max_length=192) customers_company = models.CharField(db_column="customers_company", max_length=96, blank=True) customers_street_address = models.CharField(max_length=192) customers_suburb = models.CharField(max_length=96, blank=True) customers_city = models.CharField(max_length=96) customers_postcode = models.CharField(max_length=30) customers_state = models.CharField(max_length=96, blank=True) customers_country = models.CharField(max_length=96) customers_telephone = models.CharField(max_length=96) customers_email_address = models.CharField(max_length=288) customers_address_format_id = models.IntegerField() delivery_name = models.CharField(max_length=192) delivery_company = models.CharField(max_length=96, blank=True) delivery_street_address = models.CharField(max_length=192) delivery_suburb = models.CharField(max_length=96, blank=True) delivery_city = models.CharField(max_length=96) delivery_postcode = models.CharField(max_length=30) delivery_state = models.CharField(max_length=96, blank=True) delivery_country = models.CharField(max_length=96) delivery_address_format_id = models.IntegerField() billing_name = models.CharField(max_length=192) billing_company = models.CharField(max_length=96, blank=True) billing_street_address = models.CharField(max_length=192) billing_suburb = models.CharField(max_length=96, blank=True) billing_city = models.CharField(max_length=96) billing_postcode = models.CharField(max_length=30) billing_state = models.CharField(max_length=96, blank=True) billing_country = models.CharField(max_length=96) billing_address_format_id = models.IntegerField() payment_method = models.CharField(max_length=96) cc_type = models.CharField(max_length=60, blank=True) cc_owner = models.CharField(max_length=192, blank=True) cc_number = models.CharField(max_length=96, blank=True) cc_expires = models.CharField(max_length=12, blank=True) last_modified = models.DateTimeField(null=True, blank=True) date_purchased = models.DateTimeField(null=True, blank=True) status = models.ForeignKey('OrderStatus', db_column="orders_status") date_finished = models.DateTimeField( db_column="orders_date_finished", null=True, blank=True ) currency = models.CharField(max_length=9, blank=True) currency_value = models.DecimalField(null=True, max_digits=16, decimal_places=6, blank=True) def __unicode__(self): return 'Order #%d' % self.id class Meta: managed=False db_table = u'orders' class Language(models.Model): id = models.IntegerField(db_column="languages_id", primary_key=True) name = models.CharField(max_length=96) code = models.CharField(max_length=6) image = models.CharField(max_length=192, blank=True) directory = models.CharField(max_length=96, blank=True) sort_order = models.IntegerField(null=True, blank=True) class Meta: managed=False db_table = u'languages' class OrderStatus(models.Model): id = models.IntegerField(db_column='orders_status_id', primary_key=True) language = models.ForeignKey('Language', db_column="language_id") name = models.CharField(db_column="orders_status_name", max_length=96) def __unicode__(self): return self.name class Meta: managed=False db_table = u'orders_status' ################################################################################ # Unprocessed models following ################################################################################ class CustomersBasket(models.Model): customers_basket_id = models.IntegerField(primary_key=True) customers_id = models.IntegerField() products_id = models.TextField() customers_basket_quantity = models.IntegerField() final_price = models.DecimalField(null=True, max_digits=17, decimal_places=4, blank=True) customers_basket_date_added = models.CharField(max_length=24, blank=True) class Meta: managed=False db_table = u'customers_basket' class CustomersBasketAttributes(models.Model): customers_basket_attributes_id = models.IntegerField(primary_key=True) customers_id = models.IntegerField() products_id = models.TextField() products_options_id = models.IntegerField() products_options_value_id = models.IntegerField() class Meta: managed=False db_table = u'customers_basket_attributes' class CategoryDescription(models.Model): category = models.OneToOneField( 'Category', primary_key=True, db_column='categories_id', related_name='description' ) language_id = models.IntegerField(primary_key=True) name = models.CharField(db_column="categories_name", max_length=96) heading_title = models.CharField( db_column="categories_heading_title", max_length=192, blank=True ) description = models.TextField(db_column="categories_description", blank=True) class Meta: managed=False db_table = u'categories_description' class Category(models.Model): id = models.IntegerField(db_column="categories_id", primary_key=True) image = models.CharField(db_column="categories_image", max_length=192, blank=True) parent = models.ForeignKey('self', db_column="parent_id", related_name='children') sort_order = models.IntegerField(null=True, blank=True) date_added = models.DateTimeField(null=True, blank=True) last_modified = models.DateTimeField(null=True, blank=True) hide = models.IntegerField(db_column="categories_hide", ) status = models.IntegerField(db_column="categories_status", ) @property def is_leaf(self): return bool(self.children.count() == 0) @property def is_toplevel(self): return bool(self.parent_id == 0) @property def tree_path(self): try: path = self.parent.tree_path except Category.DoesNotExist: path = [] return path + [self.id] def __unicode__(self): try: return "%s #%d" % (self.description.name, self.id) except CategoryDescription.DoesNotExist: return 'Category #%d' % self.id class Meta: managed=False db_table = u'categories' #class CbCorporateProducts(models.Model): # corpid = models.IntegerField(primary_key=True) # status = models.IntegerField() # name = models.CharField(max_length=765) # image = models.CharField(max_length=765) # sortorder = models.IntegerField() # setupcost = models.IntegerField() # unitbaseprice = models.FloatField() # unitname = models.CharField(max_length=96) # unitweight = models.FloatField() # itemname = models.CharField(max_length=96) # itemsperunit = models.IntegerField() # minimumunits = models.IntegerField() # unitincrements = models.IntegerField() # sizeoptions = models.TextField() # flavouroptions = models.TextField() # packagingoptions = models.TextField() # customisationoptions = models.TextField() # otheroptions = models.TextField() # description = models.TextField() # ingredients = models.TextField() # information = models.TextField() # class Meta: # managed=False # db_table = u'cb_corporate_products' # # #class Configuration(models.Model): # configuration_id = models.IntegerField(primary_key=True) # configuration_title = models.CharField(max_length=192) # configuration_key = models.CharField(max_length=192) # configuration_value = models.CharField(max_length=765) # configuration_description = models.CharField(max_length=765) # configuration_group_id = models.IntegerField() # sort_order = models.IntegerField(null=True, blank=True) # last_modified = models.DateTimeField(null=True, blank=True) # date_added = models.DateTimeField() # use_function = models.CharField(max_length=765, blank=True) # set_function = models.CharField(max_length=765, blank=True) # class Meta: # managed=False # db_table = u'configuration' # # #class ConfigurationGroup(models.Model): # configuration_group_id = models.IntegerField(primary_key=True) # configuration_group_title = models.CharField(max_length=192) # configuration_group_description = models.CharField(max_length=765) # sort_order = models.IntegerField(null=True, blank=True) # visible = models.IntegerField(null=True, blank=True) # class Meta: # managed=False # db_table = u'configuration_group' # # #class Counter(models.Model): # startdate = models.CharField(max_length=24, blank=True) # counter = models.IntegerField(null=True, blank=True) # class Meta: # managed=False # db_table = u'counter' # # #class CounterHistory(models.Model): # month = models.CharField(max_length=24, blank=True) # counter = models.IntegerField(null=True, blank=True) # class Meta: # managed=False # db_table = u'counter_history' # # #class Currencies(models.Model): # currencies_id = models.IntegerField(primary_key=True) # title = models.CharField(max_length=96) # code = models.CharField(max_length=9) # symbol_left = models.CharField(max_length=36, blank=True) # symbol_right = models.CharField(max_length=36, blank=True) # decimal_point = models.CharField(max_length=3, blank=True) # thousands_point = models.CharField(max_length=3, blank=True) # decimal_places = models.CharField(max_length=3, blank=True) # value = models.FloatField(null=True, blank=True) # last_updated = models.DateTimeField(null=True, blank=True) # class Meta: # managed=False # db_table = u'currencies' # # #class DiscountCoupons(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # coupons_description = models.CharField(max_length=192) # coupons_discount_amount = models.DecimalField(max_digits=17, decimal_places=12) # coupons_discount_type = models.CharField(max_length=24) # coupons_date_start = models.DateTimeField(null=True, blank=True) # coupons_date_end = models.DateTimeField(null=True, blank=True) # coupons_max_use = models.IntegerField() # coupons_min_order = models.DecimalField(max_digits=17, decimal_places=4) # coupons_min_order_type = models.CharField(max_length=24, blank=True) # coupons_number_available = models.IntegerField() # class Meta: # managed=False # db_table = u'discount_coupons' # # #class DiscountCouponsToCategories(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # categories_id = models.IntegerField(primary_key=True) # class Meta: # managed=False # db_table = u'discount_coupons_to_categories' # # #class DiscountCouponsToCustomers(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # customers_id = models.IntegerField(primary_key=True) # class Meta: # managed=False # db_table = u'discount_coupons_to_customers' # # #class DiscountCouponsToManufacturers(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # manufacturers_id = models.IntegerField(primary_key=True) # class Meta: # managed=False # db_table = u'discount_coupons_to_manufacturers' # # #class DiscountCouponsToOrders(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # orders_id = models.IntegerField(primary_key=True) # class Meta: # managed=False # db_table = u'discount_coupons_to_orders' # # #class DiscountCouponsToProducts(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # products_id = models.IntegerField(primary_key=True) # class Meta: # managed=False # db_table = u'discount_coupons_to_products' # # #class DiscountCouponsToZones(models.Model): # coupons_id = models.CharField(max_length=96, primary_key=True) # geo_zone_id = models.IntegerField(primary_key=True) # class Meta: # managed=False # db_table = u'discount_coupons_to_zones' class Manufacturer(models.Model): id = models.IntegerField(db_column="manufacturers_id", primary_key=True) name = models.CharField(db_column="manufacturers_name", max_length=96) image = models.CharField(db_column="manufacturers_image", max_length=192, blank=True) date_added = models.DateTimeField(null=True, blank=True) last_modified = models.DateTimeField(null=True, blank=True) class Meta: managed=False db_table = u'manufacturers' class ManufacturersInfo(models.Model): manufacturer = models.ForeignKey( "Manufacturer", db_column="manufacturers_id", primary_key=True ) language = models.ForeignKey('Language', db_column="languages_id") url = models.CharField(db_column="manufacturers_url", max_length=765) url_clicked = models.IntegerField() date_last_click = models.DateTimeField(null=True, blank=True) class Meta: managed=False unique_together = ('id', 'language') db_table = u'manufacturers_info' class Newsletters(models.Model): newsletters_id = models.IntegerField(primary_key=True) title = models.CharField(max_length=765) content = models.TextField() module = models.CharField(max_length=765) date_added = models.DateTimeField() date_sent = models.DateTimeField(null=True, blank=True) status = models.IntegerField(null=True, blank=True) locked = models.IntegerField(null=True, blank=True) class Meta: managed=False db_table = u'newsletters' class OrderProduct(models.Model): id = models.IntegerField(db_column="orders_products_id", primary_key=True) order = models.ForeignKey('Order', db_column="orders_id", related_name='products') product = models.ForeignKey('Product', db_column="products_id", related_name='order_products') model = models.CharField(db_column="products_model", max_length=36, blank=True) name = models.CharField(db_column="products_name", max_length=192) price = models.DecimalField(db_column="products_price", max_digits=17, decimal_places=4) final_price = models.DecimalField(max_digits=17, decimal_places=4) tax = models.DecimalField(db_column="products_tax", max_digits=9, decimal_places=4) quantity = models.IntegerField(db_column="products_quantity", ) class Meta: managed=False db_table = u'orders_products' class OrdersProductsAttributes(models.Model): orders_products_attributes_id = models.IntegerField(primary_key=True) orders_id = models.IntegerField() orders_products_id = models.IntegerField() products_options = models.CharField(max_length=96) products_options_values = models.CharField(max_length=96) options_values_price = models.DecimalField(max_digits=17, decimal_places=4) price_prefix = models.CharField(max_length=3) class Meta: managed=False db_table = u'orders_products_attributes' class OrdersProductsDownload(models.Model): orders_products_download_id = models.IntegerField(primary_key=True) orders_id = models.IntegerField() orders_products_id = models.IntegerField() orders_products_filename = models.CharField(max_length=765) download_maxdays = models.IntegerField() download_count = models.IntegerField() class Meta: managed=False db_table = u'orders_products_download' class OrdersStatusHistory(models.Model): orders_status_history_id = models.IntegerField(primary_key=True) orders_id = models.IntegerField() orders_status_id = models.IntegerField() date_added = models.DateTimeField() customer_notified = models.IntegerField(null=True, blank=True) comments = models.TextField(blank=True) class Meta: managed=False db_table = u'orders_status_history' class OrderTotal(models.Model): id = models.IntegerField(db_column="orders_total_id", primary_key=True) order = models.ForeignKey('Order', db_column="orders_id", related_name='totals') title = models.CharField(max_length=765) text = models.CharField(max_length=765) value = models.DecimalField(max_digits=17, decimal_places=4) class_field = models.CharField(max_length=96, db_column='class') sort_order = models.IntegerField() class Meta: managed=False db_table = u'orders_total' class Product(models.Model): id = models.IntegerField(db_column="products_id", primary_key=True) model = models.CharField( db_column="products_model", max_length=36, blank=True ) quantity = models.IntegerField(db_column="products_quantity") image = models.CharField( db_column="products_image", max_length=192, blank=True ) price = models.DecimalField( db_column="products_price", max_digits=17, decimal_places=4 ) date_added = models.DateTimeField(db_column="products_date_added") last_modified = models.DateTimeField( db_column="products_last_modified", null=True, blank=True ) date_available = models.DateTimeField( db_column="products_date_available", null=True, blank=True ) weight = models.DecimalField( db_column="products_weight", max_digits=7, decimal_places=2 ) status = models.IntegerField(db_column="products_status") tax_class_id = models.IntegerField(db_column="products_tax_class_id") manufacturer = models.ForeignKey( 'Manufacturer', db_column="manufacturers_id", null=True, blank=True ) ordered = models.IntegerField(db_column="products_ordered") sort_order = models.IntegerField(db_column="products_sort_order") categories = models.ManyToManyField( 'Category', through='ProductsToCategories', related_name='products' ) def __unicode__(self): try: return '%s #%d' % (self.description.name, self.id) except ProductDescription.DoesNotExist: return 'Product #%d' % self.id class Meta: managed=False db_table = u'products' class ProductDescription(models.Model): product = models.OneToOneField( "Product", db_column="products_id", primary_key=True, related_name='description' ) language = models.ForeignKey( "Language", db_column="language_id", related_name='languages' ) name = models.CharField(db_column="products_name", max_length=192) description = models.TextField(db_column="products_description", blank=True) url = models.CharField(db_column="products_url", max_length=765, blank=True) viewed = models.IntegerField(db_column="products_viewed", null=True, blank=True) class Meta: unique_together = ('product', 'language') managed=False db_table = u'products_description' class ProductAttribute(models.Model): id = models.IntegerField(db_column="products_attributes_id", primary_key=True) product = models.ForeignKey( "Product", db_column="products_id", related_name='attributes' ) product_option = models.ForeignKey( "ProductOption", db_column="options_id" ) product_option_value = models.ForeignKey( "ProductsOptionValues", db_column="options_values_id" ) options_values_price = models.DecimalField( db_column="options_values_price", max_digits=17, decimal_places=4 ) price_prefix = models.CharField(max_length=3) class Meta: managed=False db_table = u'products_attributes' class ProductsAttributesDownload(models.Model): id = models.IntegerField( db_column="products_attributes_id", primary_key=True ) filename = models.CharField( db_column="products_attributes_filename", max_length=765 ) maxdays = models.IntegerField( db_column="products_attributes_maxdays", null=True, blank=True ) maxcount = models.IntegerField( db_column="products_attributes_maxcount", null=True, blank=True ) class Meta: managed=False db_table = u'products_attributes_download' class ProductNotification(models.Model): product = models.IntegerField( 'Product', db_column="products_id", primary_key=True ) customer = models.IntegerField('Customer', db_column="customers_id") date_added = models.DateTimeField() class Meta: unique_together = ('product', 'customer') managed=False db_table = u'products_notifications' class ProductOption(models.Model): products_options_id = models.IntegerField(primary_key=True) language_id = models.IntegerField(primary_key=True) products_options_name = models.CharField(max_length=96) class Meta: managed=False db_table = u'products_options' class ProductOptionValue(models.Model): products_options_values_id = models.IntegerField(primary_key=True) language_id = models.IntegerField(primary_key=True) products_options_values_name = models.CharField(max_length=192) class Meta: managed=False db_table = u'products_options_values' class ProductsOptionsValuesToProductsOptions(models.Model): products_options_values_to_products_options_id = models.IntegerField(primary_key=True) products_options_id = models.IntegerField() products_options_values_id = models.IntegerField() class Meta: managed=False db_table = u'products_options_values_to_products_options' class ProductsToCategories(models.Model): product = models.ForeignKey("Product", db_column="products_id") category = models.ForeignKey("Category", db_column="categories_id") class Meta: unique_together = ('product', 'category') managed=False db_table = u'products_to_categories' class Reviews(models.Model): reviews_id = models.IntegerField(primary_key=True) products_id = models.IntegerField() customers_id = models.IntegerField(null=True, blank=True) customers_name = models.CharField(max_length=192) reviews_rating = models.IntegerField(null=True, blank=True) date_added = models.DateTimeField(null=True, blank=True) last_modified = models.DateTimeField(null=True, blank=True) reviews_read = models.IntegerField() class Meta: managed=False db_table = u'reviews' class ReviewsDescription(models.Model): reviews_id = models.IntegerField(primary_key=True) language = models.ForeignKey('Language', db_column="languages_id", primary_key=True) reviews_text = models.TextField() class Meta: managed=False db_table = u'reviews_description' class Sessions(models.Model): sesskey = models.CharField(max_length=96, primary_key=True) expiry = models.IntegerField() value = models.TextField() class Meta: managed=False db_table = u'sessions' class Specials(models.Model): specials_id = models.IntegerField(primary_key=True) products_id = models.IntegerField() specials_new_products_price = models.DecimalField(max_digits=17, decimal_places=4) specials_date_added = models.DateTimeField(null=True, blank=True) specials_last_modified = models.DateTimeField(null=True, blank=True) expires_date = models.DateTimeField(null=True, blank=True) date_status_change = models.DateTimeField(null=True, blank=True) status = models.IntegerField() class Meta: managed=False db_table = u'specials' class Storelocations(models.Model): shopid = models.IntegerField(primary_key=True) name = models.CharField(max_length=765) sortorder = models.IntegerField() jobsavailable = models.IntegerField() phone = models.CharField(max_length=48) email = models.CharField(max_length=765) address = models.CharField(max_length=765) manager = models.CharField(max_length=765) class Meta: managed=False db_table = u'storelocations' class TaxClass(models.Model): id = models.IntegerField(db_column="tax_class_id", primary_key=True) title = models.CharField(db_column="tax_class_title", max_length=96) description = models.CharField(db_column="tax_class_description", max_length=765) last_modified = models.DateTimeField(null=True, blank=True) date_added = models.DateTimeField() class Meta: managed=False db_table = u'tax_class' class TaxRates(models.Model): id = models.IntegerField(db_column="tax_rates_id", primary_key=True) zone = models.ForeignKey('Zone', db_column="tax_zone_id", ) tax_class = models.ForeignKey('TaxClass', db_column="tax_class_id", ) priority = models.IntegerField(db_column="tax_priority", null=True, blank=True) rate = models.DecimalField(db_column="tax_rate", max_digits=9, decimal_places=4) description = models.CharField(db_column="tax_description", max_length=765) last_modified = models.DateTimeField(null=True, blank=True) date_added = models.DateTimeField() class Meta: managed=False db_table = u'tax_rates' class WhosOnline(models.Model): customer_id = models.IntegerField(null=True, blank=True) full_name = models.CharField(max_length=192) session_id = models.CharField(max_length=384) ip_address = models.CharField(max_length=45) time_entry = models.CharField(max_length=42) time_last_click = models.CharField(max_length=42) last_page_url = models.CharField(max_length=765) class Meta: managed=False db_table = u'whos_online' class ZonesToGeoZones(models.Model): association_id = models.IntegerField(primary_key=True) zone_country_id = models.IntegerField() zone_id = models.IntegerField(null=True, blank=True) geo_zone_id = models.IntegerField(null=True, blank=True) last_modified = models.DateTimeField(null=True, blank=True) date_added = models.DateTimeField() class Meta: managed=False db_table = u'zones_to_geo_zones' # # #class Banners(models.Model): # banners_id = models.IntegerField(primary_key=True) # banners_title = models.CharField(max_length=192) # banners_url = models.CharField(max_length=765) # banners_image = models.CharField(max_length=192) # banners_group = models.CharField(max_length=30) # banners_html_text = models.TextField(blank=True) # expires_impressions = models.IntegerField(null=True, blank=True) # expires_date = models.DateTimeField(null=True, blank=True) # date_scheduled = models.DateTimeField(null=True, blank=True) # date_added = models.DateTimeField() # date_status_change = models.DateTimeField(null=True, blank=True) # status = models.IntegerField() # class Meta: # managed=False # db_table = u'banners' # # #class BannersHistory(models.Model): # banners_history_id = models.IntegerField(primary_key=True) # banners_id = models.IntegerField() # banners_shown = models.IntegerField() # banners_clicked = models.IntegerField() # banners_history_date = models.DateTimeField() # class Meta: # managed=False # db_table = u'banners_history' # # #class GeoZones(models.Model): # id = models.IntegerField(primary_key=True) # name = models.CharField(max_length=96) # description = models.CharField(max_length=765) # last_modified = models.DateTimeField(null=True, blank=True) # date_added = models.DateTimeField() # # class Meta: # managed=False # db_table = u'geo_zones'
bsd-3-clause