repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
Azure/msrest-for-python
msrest/serialization.py
Model._create_xml_node
def _create_xml_node(cls): """Create XML node from "_xml_map". """ try: xml_map = cls._xml_map except AttributeError: raise ValueError("This model has no XML definition") return _create_xml_node( xml_map.get('name', cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None) )
python
def _create_xml_node(cls): """Create XML node from "_xml_map". """ try: xml_map = cls._xml_map except AttributeError: raise ValueError("This model has no XML definition") return _create_xml_node( xml_map.get('name', cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None) )
[ "def", "_create_xml_node", "(", "cls", ")", ":", "try", ":", "xml_map", "=", "cls", ".", "_xml_map", "except", "AttributeError", ":", "raise", "ValueError", "(", "\"This model has no XML definition\"", ")", "return", "_create_xml_node", "(", "xml_map", ".", "get", "(", "'name'", ",", "cls", ".", "__name__", ")", ",", "xml_map", ".", "get", "(", "\"prefix\"", ",", "None", ")", ",", "xml_map", ".", "get", "(", "\"ns\"", ",", "None", ")", ")" ]
Create XML node from "_xml_map".
[ "Create", "XML", "node", "from", "_xml_map", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L191-L203
train
Azure/msrest-for-python
msrest/serialization.py
Model.validate
def validate(self): """Validate this model recursively and return a list of ValidationError. :returns: A list of validation error :rtype: list """ validation_result = [] for attr_name, value in [(attr, getattr(self, attr)) for attr in self._attribute_map]: attr_desc = self._attribute_map[attr_name] if attr_name == "additional_properties" and attr_desc["key"] == '': # Do NOT validate additional_properties continue attr_type = attr_desc['type'] try: debug_name = "{}.{}".format(self.__class__.__name__, attr_name) Serializer.validate(value, debug_name, **self._validation.get(attr_name, {})) except ValidationError as validation_error: validation_result.append(validation_error) validation_result += _recursive_validate(attr_name, attr_type, value) return validation_result
python
def validate(self): """Validate this model recursively and return a list of ValidationError. :returns: A list of validation error :rtype: list """ validation_result = [] for attr_name, value in [(attr, getattr(self, attr)) for attr in self._attribute_map]: attr_desc = self._attribute_map[attr_name] if attr_name == "additional_properties" and attr_desc["key"] == '': # Do NOT validate additional_properties continue attr_type = attr_desc['type'] try: debug_name = "{}.{}".format(self.__class__.__name__, attr_name) Serializer.validate(value, debug_name, **self._validation.get(attr_name, {})) except ValidationError as validation_error: validation_result.append(validation_error) validation_result += _recursive_validate(attr_name, attr_type, value) return validation_result
[ "def", "validate", "(", "self", ")", ":", "validation_result", "=", "[", "]", "for", "attr_name", ",", "value", "in", "[", "(", "attr", ",", "getattr", "(", "self", ",", "attr", ")", ")", "for", "attr", "in", "self", ".", "_attribute_map", "]", ":", "attr_desc", "=", "self", ".", "_attribute_map", "[", "attr_name", "]", "if", "attr_name", "==", "\"additional_properties\"", "and", "attr_desc", "[", "\"key\"", "]", "==", "''", ":", "# Do NOT validate additional_properties", "continue", "attr_type", "=", "attr_desc", "[", "'type'", "]", "try", ":", "debug_name", "=", "\"{}.{}\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "attr_name", ")", "Serializer", ".", "validate", "(", "value", ",", "debug_name", ",", "*", "*", "self", ".", "_validation", ".", "get", "(", "attr_name", ",", "{", "}", ")", ")", "except", "ValidationError", "as", "validation_error", ":", "validation_result", ".", "append", "(", "validation_error", ")", "validation_result", "+=", "_recursive_validate", "(", "attr_name", ",", "attr_type", ",", "value", ")", "return", "validation_result" ]
Validate this model recursively and return a list of ValidationError. :returns: A list of validation error :rtype: list
[ "Validate", "this", "model", "recursively", "and", "return", "a", "list", "of", "ValidationError", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L205-L226
train
Azure/msrest-for-python
msrest/serialization.py
Model.serialize
def serialize(self, keep_readonly=False): """Return the JSON that would be sent to azure from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. :param bool keep_readonly: If you want to serialize the readonly attributes :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) return serializer._serialize(self, keep_readonly=keep_readonly)
python
def serialize(self, keep_readonly=False): """Return the JSON that would be sent to azure from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. :param bool keep_readonly: If you want to serialize the readonly attributes :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) return serializer._serialize(self, keep_readonly=keep_readonly)
[ "def", "serialize", "(", "self", ",", "keep_readonly", "=", "False", ")", ":", "serializer", "=", "Serializer", "(", "self", ".", "_infer_class_models", "(", ")", ")", "return", "serializer", ".", "_serialize", "(", "self", ",", "keep_readonly", "=", "keep_readonly", ")" ]
Return the JSON that would be sent to azure from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. :param bool keep_readonly: If you want to serialize the readonly attributes :returns: A dict JSON compatible object :rtype: dict
[ "Return", "the", "JSON", "that", "would", "be", "sent", "to", "azure", "from", "this", "model", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L228-L238
train
Azure/msrest-for-python
msrest/serialization.py
Model.as_dict
def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer): """Return a dict that can be JSONify using json.dump. Advanced usage might optionaly use a callback as parameter: .. code::python def my_key_transformer(key, attr_desc, value): return key Key is the attribute name used in Python. Attr_desc is a dict of metadata. Currently contains 'type' with the msrest type and 'key' with the RestAPI encoded key. Value is the current value in this object. The string returned will be used to serialize the key. If the return type is a list, this is considered hierarchical result dict. See the three examples in this file: - attribute_transformer - full_restapi_key_transformer - last_restapi_key_transformer :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly)
python
def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer): """Return a dict that can be JSONify using json.dump. Advanced usage might optionaly use a callback as parameter: .. code::python def my_key_transformer(key, attr_desc, value): return key Key is the attribute name used in Python. Attr_desc is a dict of metadata. Currently contains 'type' with the msrest type and 'key' with the RestAPI encoded key. Value is the current value in this object. The string returned will be used to serialize the key. If the return type is a list, this is considered hierarchical result dict. See the three examples in this file: - attribute_transformer - full_restapi_key_transformer - last_restapi_key_transformer :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly)
[ "def", "as_dict", "(", "self", ",", "keep_readonly", "=", "True", ",", "key_transformer", "=", "attribute_transformer", ")", ":", "serializer", "=", "Serializer", "(", "self", ".", "_infer_class_models", "(", ")", ")", "return", "serializer", ".", "_serialize", "(", "self", ",", "key_transformer", "=", "key_transformer", ",", "keep_readonly", "=", "keep_readonly", ")" ]
Return a dict that can be JSONify using json.dump. Advanced usage might optionaly use a callback as parameter: .. code::python def my_key_transformer(key, attr_desc, value): return key Key is the attribute name used in Python. Attr_desc is a dict of metadata. Currently contains 'type' with the msrest type and 'key' with the RestAPI encoded key. Value is the current value in this object. The string returned will be used to serialize the key. If the return type is a list, this is considered hierarchical result dict. See the three examples in this file: - attribute_transformer - full_restapi_key_transformer - last_restapi_key_transformer :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict
[ "Return", "a", "dict", "that", "can", "be", "JSONify", "using", "json", ".", "dump", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L240-L270
train
Azure/msrest-for-python
msrest/serialization.py
Model.deserialize
def deserialize(cls, data, content_type=None): """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type)
python
def deserialize(cls, data, content_type=None): """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type)
[ "def", "deserialize", "(", "cls", ",", "data", ",", "content_type", "=", "None", ")", ":", "deserializer", "=", "Deserializer", "(", "cls", ".", "_infer_class_models", "(", ")", ")", "return", "deserializer", "(", "cls", ".", "__name__", ",", "data", ",", "content_type", "=", "content_type", ")" ]
Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong
[ "Parse", "a", "str", "using", "the", "RestAPI", "syntax", "and", "return", "a", "model", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L286-L295
train
Azure/msrest-for-python
msrest/serialization.py
Model.from_dict
def from_dict(cls, data, key_extractors=None, content_type=None): """Parse a dict using given key extractor return a model. By default consider key extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = [ rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor ] if key_extractors is None else key_extractors return deserializer(cls.__name__, data, content_type=content_type)
python
def from_dict(cls, data, key_extractors=None, content_type=None): """Parse a dict using given key extractor return a model. By default consider key extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = [ rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor ] if key_extractors is None else key_extractors return deserializer(cls.__name__, data, content_type=content_type)
[ "def", "from_dict", "(", "cls", ",", "data", ",", "key_extractors", "=", "None", ",", "content_type", "=", "None", ")", ":", "deserializer", "=", "Deserializer", "(", "cls", ".", "_infer_class_models", "(", ")", ")", "deserializer", ".", "key_extractors", "=", "[", "rest_key_case_insensitive_extractor", ",", "attribute_key_case_insensitive_extractor", ",", "last_rest_key_case_insensitive_extractor", "]", "if", "key_extractors", "is", "None", "else", "key_extractors", "return", "deserializer", "(", "cls", ".", "__name__", ",", "data", ",", "content_type", "=", "content_type", ")" ]
Parse a dict using given key extractor return a model. By default consider key extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong
[ "Parse", "a", "dict", "using", "given", "key", "extractor", "return", "a", "model", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L298-L316
train
Azure/msrest-for-python
msrest/serialization.py
Model._classify
def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. Remove the polymorphic key from the initial data. """ for subtype_key in cls.__dict__.get('_subtype_map', {}).keys(): subtype_value = None rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) if subtype_value: # Try to match base class. Can be class name only # (bug to fix in Autorest to support x-ms-discriminator-name) if cls.__name__ == subtype_value: return cls flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) try: return objects[flatten_mapping_type[subtype_value]] except KeyError: _LOGGER.warning( "Subtype value %s has no mapping, use base class %s.", subtype_value, cls.__name__, ) break else: _LOGGER.warning( "Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__ ) break return cls
python
def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. Remove the polymorphic key from the initial data. """ for subtype_key in cls.__dict__.get('_subtype_map', {}).keys(): subtype_value = None rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) if subtype_value: # Try to match base class. Can be class name only # (bug to fix in Autorest to support x-ms-discriminator-name) if cls.__name__ == subtype_value: return cls flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) try: return objects[flatten_mapping_type[subtype_value]] except KeyError: _LOGGER.warning( "Subtype value %s has no mapping, use base class %s.", subtype_value, cls.__name__, ) break else: _LOGGER.warning( "Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__ ) break return cls
[ "def", "_classify", "(", "cls", ",", "response", ",", "objects", ")", ":", "for", "subtype_key", "in", "cls", ".", "__dict__", ".", "get", "(", "'_subtype_map'", ",", "{", "}", ")", ".", "keys", "(", ")", ":", "subtype_value", "=", "None", "rest_api_response_key", "=", "cls", ".", "_get_rest_key_parts", "(", "subtype_key", ")", "[", "-", "1", "]", "subtype_value", "=", "response", ".", "pop", "(", "rest_api_response_key", ",", "None", ")", "or", "response", ".", "pop", "(", "subtype_key", ",", "None", ")", "if", "subtype_value", ":", "# Try to match base class. Can be class name only", "# (bug to fix in Autorest to support x-ms-discriminator-name)", "if", "cls", ".", "__name__", "==", "subtype_value", ":", "return", "cls", "flatten_mapping_type", "=", "cls", ".", "_flatten_subtype", "(", "subtype_key", ",", "objects", ")", "try", ":", "return", "objects", "[", "flatten_mapping_type", "[", "subtype_value", "]", "]", "except", "KeyError", ":", "_LOGGER", ".", "warning", "(", "\"Subtype value %s has no mapping, use base class %s.\"", ",", "subtype_value", ",", "cls", ".", "__name__", ",", ")", "break", "else", ":", "_LOGGER", ".", "warning", "(", "\"Discriminator %s is absent or null, use base class %s.\"", ",", "subtype_key", ",", "cls", ".", "__name__", ")", "break", "return", "cls" ]
Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. Remove the polymorphic key from the initial data.
[ "Check", "the", "class", "_subtype_map", "for", "any", "child", "classes", ".", "We", "want", "to", "ignore", "any", "inherited", "_subtype_maps", ".", "Remove", "the", "polymorphic", "key", "from", "the", "initial", "data", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L328-L360
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.body
def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None """ if data is None: raise ValidationError("required", "body", True) # Just in case this is a dict internal_data_type = data_type.strip('[]{}') internal_data_type = self.dependencies.get(internal_data_type, None) if internal_data_type and not isinstance(internal_data_type, Enum): try: deserializer = Deserializer(self.dependencies) # Since it's on serialization, it's almost sure that format is not JSON REST # We're not able to deal with additional properties for now. deserializer.additional_properties_detection = False if issubclass(internal_data_type, Model) and internal_data_type.is_xml_model(): deserializer.key_extractors = [ attribute_key_case_insensitive_extractor, ] else: deserializer.key_extractors = [ rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: raise_with_traceback( SerializationError, "Unable to build a model: "+str(err), err) if self.client_side_validation: errors = _recursive_validate(data_type, data_type, data) if errors: raise errors[0] return self._serialize(data, data_type, **kwargs)
python
def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None """ if data is None: raise ValidationError("required", "body", True) # Just in case this is a dict internal_data_type = data_type.strip('[]{}') internal_data_type = self.dependencies.get(internal_data_type, None) if internal_data_type and not isinstance(internal_data_type, Enum): try: deserializer = Deserializer(self.dependencies) # Since it's on serialization, it's almost sure that format is not JSON REST # We're not able to deal with additional properties for now. deserializer.additional_properties_detection = False if issubclass(internal_data_type, Model) and internal_data_type.is_xml_model(): deserializer.key_extractors = [ attribute_key_case_insensitive_extractor, ] else: deserializer.key_extractors = [ rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: raise_with_traceback( SerializationError, "Unable to build a model: "+str(err), err) if self.client_side_validation: errors = _recursive_validate(data_type, data_type, data) if errors: raise errors[0] return self._serialize(data, data_type, **kwargs)
[ "def", "body", "(", "self", ",", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", ":", "if", "data", "is", "None", ":", "raise", "ValidationError", "(", "\"required\"", ",", "\"body\"", ",", "True", ")", "# Just in case this is a dict", "internal_data_type", "=", "data_type", ".", "strip", "(", "'[]{}'", ")", "internal_data_type", "=", "self", ".", "dependencies", ".", "get", "(", "internal_data_type", ",", "None", ")", "if", "internal_data_type", "and", "not", "isinstance", "(", "internal_data_type", ",", "Enum", ")", ":", "try", ":", "deserializer", "=", "Deserializer", "(", "self", ".", "dependencies", ")", "# Since it's on serialization, it's almost sure that format is not JSON REST", "# We're not able to deal with additional properties for now.", "deserializer", ".", "additional_properties_detection", "=", "False", "if", "issubclass", "(", "internal_data_type", ",", "Model", ")", "and", "internal_data_type", ".", "is_xml_model", "(", ")", ":", "deserializer", ".", "key_extractors", "=", "[", "attribute_key_case_insensitive_extractor", ",", "]", "else", ":", "deserializer", ".", "key_extractors", "=", "[", "rest_key_case_insensitive_extractor", ",", "attribute_key_case_insensitive_extractor", ",", "last_rest_key_case_insensitive_extractor", "]", "data", "=", "deserializer", ".", "_deserialize", "(", "data_type", ",", "data", ")", "except", "DeserializationError", "as", "err", ":", "raise_with_traceback", "(", "SerializationError", ",", "\"Unable to build a model: \"", "+", "str", "(", "err", ")", ",", "err", ")", "if", "self", ".", "client_side_validation", ":", "errors", "=", "_recursive_validate", "(", "data_type", ",", "data_type", ",", "data", ")", "if", "errors", ":", "raise", "errors", "[", "0", "]", "return", "self", ".", "_serialize", "(", "data", ",", "data_type", ",", "*", "*", "kwargs", ")" ]
Serialize data intended for a request body. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None
[ "Serialize", "data", "intended", "for", "a", "request", "body", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L540-L580
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.url
def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ if self.client_side_validation: data = self.validate(data, name, required=True, **kwargs) try: output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) if kwargs.get('skip_quote') is True: output = str(output) else: output = quote(str(output), safe='') except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return output
python
def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ if self.client_side_validation: data = self.validate(data, name, required=True, **kwargs) try: output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) if kwargs.get('skip_quote') is True: output = str(output) else: output = quote(str(output), safe='') except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return output
[ "def", "url", "(", "self", ",", "name", ",", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "client_side_validation", ":", "data", "=", "self", ".", "validate", "(", "data", ",", "name", ",", "required", "=", "True", ",", "*", "*", "kwargs", ")", "try", ":", "output", "=", "self", ".", "serialize_data", "(", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", "if", "data_type", "==", "'bool'", ":", "output", "=", "json", ".", "dumps", "(", "output", ")", "if", "kwargs", ".", "get", "(", "'skip_quote'", ")", "is", "True", ":", "output", "=", "str", "(", "output", ")", "else", ":", "output", "=", "quote", "(", "str", "(", "output", ")", ",", "safe", "=", "''", ")", "except", "SerializationError", ":", "raise", "TypeError", "(", "\"{} must be type {}.\"", ".", "format", "(", "name", ",", "data_type", ")", ")", "else", ":", "return", "output" ]
Serialize data intended for a URL path. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None
[ "Serialize", "data", "intended", "for", "a", "URL", "path", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L582-L605
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.header
def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ if self.client_side_validation: data = self.validate(data, name, required=True, **kwargs) try: if data_type in ['[str]']: data = ["" if d is None else d for d in data] output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return str(output)
python
def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ if self.client_side_validation: data = self.validate(data, name, required=True, **kwargs) try: if data_type in ['[str]']: data = ["" if d is None else d for d in data] output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return str(output)
[ "def", "header", "(", "self", ",", "name", ",", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "client_side_validation", ":", "data", "=", "self", ".", "validate", "(", "data", ",", "name", ",", "required", "=", "True", ",", "*", "*", "kwargs", ")", "try", ":", "if", "data_type", "in", "[", "'[str]'", "]", ":", "data", "=", "[", "\"\"", "if", "d", "is", "None", "else", "d", "for", "d", "in", "data", "]", "output", "=", "self", ".", "serialize_data", "(", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", "if", "data_type", "==", "'bool'", ":", "output", "=", "json", ".", "dumps", "(", "output", ")", "except", "SerializationError", ":", "raise", "TypeError", "(", "\"{} must be type {}.\"", ".", "format", "(", "name", ",", "data_type", ")", ")", "else", ":", "return", "str", "(", "output", ")" ]
Serialize data intended for a request header. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None
[ "Serialize", "data", "intended", "for", "a", "request", "header", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L634-L655
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.validate
def validate(cls, data, name, **kwargs): """Validate that a piece of data meets certain conditions""" required = kwargs.get('required', False) if required and data is None: raise ValidationError("required", name, True) elif data is None: return elif kwargs.get('readonly'): return try: for key, value in kwargs.items(): validator = cls.validation.get(key, lambda x, y: False) if validator(data, value): raise ValidationError(key, name, value) except TypeError: raise ValidationError("unknown", name, "unknown") else: return data
python
def validate(cls, data, name, **kwargs): """Validate that a piece of data meets certain conditions""" required = kwargs.get('required', False) if required and data is None: raise ValidationError("required", name, True) elif data is None: return elif kwargs.get('readonly'): return try: for key, value in kwargs.items(): validator = cls.validation.get(key, lambda x, y: False) if validator(data, value): raise ValidationError(key, name, value) except TypeError: raise ValidationError("unknown", name, "unknown") else: return data
[ "def", "validate", "(", "cls", ",", "data", ",", "name", ",", "*", "*", "kwargs", ")", ":", "required", "=", "kwargs", ".", "get", "(", "'required'", ",", "False", ")", "if", "required", "and", "data", "is", "None", ":", "raise", "ValidationError", "(", "\"required\"", ",", "name", ",", "True", ")", "elif", "data", "is", "None", ":", "return", "elif", "kwargs", ".", "get", "(", "'readonly'", ")", ":", "return", "try", ":", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "validator", "=", "cls", ".", "validation", ".", "get", "(", "key", ",", "lambda", "x", ",", "y", ":", "False", ")", "if", "validator", "(", "data", ",", "value", ")", ":", "raise", "ValidationError", "(", "key", ",", "name", ",", "value", ")", "except", "TypeError", ":", "raise", "ValidationError", "(", "\"unknown\"", ",", "name", ",", "\"unknown\"", ")", "else", ":", "return", "data" ]
Validate that a piece of data meets certain conditions
[ "Validate", "that", "a", "piece", "of", "data", "meets", "certain", "conditions" ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L658-L676
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_data
def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :param bool required: Whether it's essential that the data not be empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails. """ if data is None: raise ValueError("No value for given attribute") try: if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) elif data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway enum_type = self.dependencies.get(data_type, data.__class__) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) iter_type = data_type[0] + data_type[-1] if iter_type in self.serialize_type: return self.serialize_type[iter_type]( data, data_type[1:-1], **kwargs) except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise_with_traceback( SerializationError, msg.format(data, data_type), err) else: return self._serialize(data, **kwargs)
python
def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :param bool required: Whether it's essential that the data not be empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails. """ if data is None: raise ValueError("No value for given attribute") try: if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) elif data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway enum_type = self.dependencies.get(data_type, data.__class__) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) iter_type = data_type[0] + data_type[-1] if iter_type in self.serialize_type: return self.serialize_type[iter_type]( data, data_type[1:-1], **kwargs) except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise_with_traceback( SerializationError, msg.format(data, data_type), err) else: return self._serialize(data, **kwargs)
[ "def", "serialize_data", "(", "self", ",", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", ":", "if", "data", "is", "None", ":", "raise", "ValueError", "(", "\"No value for given attribute\"", ")", "try", ":", "if", "data_type", "in", "self", ".", "basic_types", ".", "values", "(", ")", ":", "return", "self", ".", "serialize_basic", "(", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", "elif", "data_type", "in", "self", ".", "serialize_type", ":", "return", "self", ".", "serialize_type", "[", "data_type", "]", "(", "data", ",", "*", "*", "kwargs", ")", "# If dependencies is empty, try with current data class", "# It has to be a subclass of Enum anyway", "enum_type", "=", "self", ".", "dependencies", ".", "get", "(", "data_type", ",", "data", ".", "__class__", ")", "if", "issubclass", "(", "enum_type", ",", "Enum", ")", ":", "return", "Serializer", ".", "serialize_enum", "(", "data", ",", "enum_obj", "=", "enum_type", ")", "iter_type", "=", "data_type", "[", "0", "]", "+", "data_type", "[", "-", "1", "]", "if", "iter_type", "in", "self", ".", "serialize_type", ":", "return", "self", ".", "serialize_type", "[", "iter_type", "]", "(", "data", ",", "data_type", "[", "1", ":", "-", "1", "]", ",", "*", "*", "kwargs", ")", "except", "(", "ValueError", ",", "TypeError", ")", "as", "err", ":", "msg", "=", "\"Unable to serialize value: {!r} as type: {!r}.\"", "raise_with_traceback", "(", "SerializationError", ",", "msg", ".", "format", "(", "data", ",", "data_type", ")", ",", "err", ")", "else", ":", "return", "self", ".", "_serialize", "(", "data", ",", "*", "*", "kwargs", ")" ]
Serialize generic data according to supplied data type. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :param bool required: Whether it's essential that the data not be empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails.
[ "Serialize", "generic", "data", "according", "to", "supplied", "data", "type", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L678-L715
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_basic
def serialize_basic(self, data, data_type, **kwargs): """Serialize basic builting data type. Serializes objects to str, int, float or bool. Possible kwargs: - is_xml bool : If set, adapt basic serializers without the need for basic_types_serializers - basic_types_serializers dict[str, callable] : If set, use the callable as serializer :param data: Object to be serialized. :param str data_type: Type of object in the iterable. """ custom_serializer = self._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == 'str': return self.serialize_unicode(data) return eval(data_type)(data)
python
def serialize_basic(self, data, data_type, **kwargs): """Serialize basic builting data type. Serializes objects to str, int, float or bool. Possible kwargs: - is_xml bool : If set, adapt basic serializers without the need for basic_types_serializers - basic_types_serializers dict[str, callable] : If set, use the callable as serializer :param data: Object to be serialized. :param str data_type: Type of object in the iterable. """ custom_serializer = self._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == 'str': return self.serialize_unicode(data) return eval(data_type)(data)
[ "def", "serialize_basic", "(", "self", ",", "data", ",", "data_type", ",", "*", "*", "kwargs", ")", ":", "custom_serializer", "=", "self", ".", "_get_custom_serializers", "(", "data_type", ",", "*", "*", "kwargs", ")", "if", "custom_serializer", ":", "return", "custom_serializer", "(", "data", ")", "if", "data_type", "==", "'str'", ":", "return", "self", ".", "serialize_unicode", "(", "data", ")", "return", "eval", "(", "data_type", ")", "(", "data", ")" ]
Serialize basic builting data type. Serializes objects to str, int, float or bool. Possible kwargs: - is_xml bool : If set, adapt basic serializers without the need for basic_types_serializers - basic_types_serializers dict[str, callable] : If set, use the callable as serializer :param data: Object to be serialized. :param str data_type: Type of object in the iterable.
[ "Serialize", "basic", "builting", "data", "type", ".", "Serializes", "objects", "to", "str", "int", "float", "or", "bool", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L724-L740
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_unicode
def serialize_unicode(self, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. :param data: Object to be serialized. :rtype: str """ try: return data.value except AttributeError: pass try: if isinstance(data, unicode): return data.encode(encoding='utf-8') except NameError: return str(data) else: return str(data)
python
def serialize_unicode(self, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. :param data: Object to be serialized. :rtype: str """ try: return data.value except AttributeError: pass try: if isinstance(data, unicode): return data.encode(encoding='utf-8') except NameError: return str(data) else: return str(data)
[ "def", "serialize_unicode", "(", "self", ",", "data", ")", ":", "try", ":", "return", "data", ".", "value", "except", "AttributeError", ":", "pass", "try", ":", "if", "isinstance", "(", "data", ",", "unicode", ")", ":", "return", "data", ".", "encode", "(", "encoding", "=", "'utf-8'", ")", "except", "NameError", ":", "return", "str", "(", "data", ")", "else", ":", "return", "str", "(", "data", ")" ]
Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. :param data: Object to be serialized. :rtype: str
[ "Special", "handling", "for", "serializing", "unicode", "strings", "in", "Py2", ".", "Encode", "to", "UTF", "-", "8", "if", "unicode", "otherwise", "handle", "as", "a", "str", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L742-L759
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_iter
def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. Supported kwargs: serialization_ctxt dict : The current entry of _attribute_map, or same format. serialization_ctxt['type'] should be same as data_type. :param list attr: Object to be serialized. :param str iter_type: Type of object in the iterable. :param bool required: Whether the objects in the iterable must not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. :rtype: list, str """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = [] for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) except ValueError: serialized.append(None) if div: serialized = ['' if s is None else str(s) for s in serialized] serialized = div.join(serialized) if 'xml' in serialization_ctxt: # XML serialization is more complicated xml_desc = serialization_ctxt['xml'] xml_name = xml_desc['name'] # Create a wrap node if necessary (use the fact that Element and list have "append") is_wrapped = "wrapped" in xml_desc and xml_desc["wrapped"] node_name = xml_desc.get("itemsName", xml_name) if is_wrapped: final_result = _create_xml_node( xml_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) else: final_result = [] # All list elements to "local_node" for el in serialized: if isinstance(el, ET.Element): el_node = el else: el_node = _create_xml_node( node_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) if el is not None: # Otherwise it writes "None" :-p el_node.text = str(el) final_result.append(el_node) return final_result return serialized
python
def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. Supported kwargs: serialization_ctxt dict : The current entry of _attribute_map, or same format. serialization_ctxt['type'] should be same as data_type. :param list attr: Object to be serialized. :param str iter_type: Type of object in the iterable. :param bool required: Whether the objects in the iterable must not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. :rtype: list, str """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = [] for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) except ValueError: serialized.append(None) if div: serialized = ['' if s is None else str(s) for s in serialized] serialized = div.join(serialized) if 'xml' in serialization_ctxt: # XML serialization is more complicated xml_desc = serialization_ctxt['xml'] xml_name = xml_desc['name'] # Create a wrap node if necessary (use the fact that Element and list have "append") is_wrapped = "wrapped" in xml_desc and xml_desc["wrapped"] node_name = xml_desc.get("itemsName", xml_name) if is_wrapped: final_result = _create_xml_node( xml_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) else: final_result = [] # All list elements to "local_node" for el in serialized: if isinstance(el, ET.Element): el_node = el else: el_node = _create_xml_node( node_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) if el is not None: # Otherwise it writes "None" :-p el_node.text = str(el) final_result.append(el_node) return final_result return serialized
[ "def", "serialize_iter", "(", "self", ",", "data", ",", "iter_type", ",", "div", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "data", ",", "str", ")", ":", "raise", "SerializationError", "(", "\"Refuse str type as a valid iter type.\"", ")", "serialization_ctxt", "=", "kwargs", ".", "get", "(", "\"serialization_ctxt\"", ",", "{", "}", ")", "serialized", "=", "[", "]", "for", "d", "in", "data", ":", "try", ":", "serialized", ".", "append", "(", "self", ".", "serialize_data", "(", "d", ",", "iter_type", ",", "*", "*", "kwargs", ")", ")", "except", "ValueError", ":", "serialized", ".", "append", "(", "None", ")", "if", "div", ":", "serialized", "=", "[", "''", "if", "s", "is", "None", "else", "str", "(", "s", ")", "for", "s", "in", "serialized", "]", "serialized", "=", "div", ".", "join", "(", "serialized", ")", "if", "'xml'", "in", "serialization_ctxt", ":", "# XML serialization is more complicated", "xml_desc", "=", "serialization_ctxt", "[", "'xml'", "]", "xml_name", "=", "xml_desc", "[", "'name'", "]", "# Create a wrap node if necessary (use the fact that Element and list have \"append\")", "is_wrapped", "=", "\"wrapped\"", "in", "xml_desc", "and", "xml_desc", "[", "\"wrapped\"", "]", "node_name", "=", "xml_desc", ".", "get", "(", "\"itemsName\"", ",", "xml_name", ")", "if", "is_wrapped", ":", "final_result", "=", "_create_xml_node", "(", "xml_name", ",", "xml_desc", ".", "get", "(", "'prefix'", ",", "None", ")", ",", "xml_desc", ".", "get", "(", "'ns'", ",", "None", ")", ")", "else", ":", "final_result", "=", "[", "]", "# All list elements to \"local_node\"", "for", "el", "in", "serialized", ":", "if", "isinstance", "(", "el", ",", "ET", ".", "Element", ")", ":", "el_node", "=", "el", "else", ":", "el_node", "=", "_create_xml_node", "(", "node_name", ",", "xml_desc", ".", "get", "(", "'prefix'", ",", "None", ")", ",", "xml_desc", ".", "get", "(", "'ns'", ",", "None", ")", ")", "if", "el", "is", "not", "None", ":", "# Otherwise it writes \"None\" :-p", "el_node", ".", "text", "=", "str", "(", "el", ")", "final_result", ".", "append", "(", "el_node", ")", "return", "final_result", "return", "serialized" ]
Serialize iterable. Supported kwargs: serialization_ctxt dict : The current entry of _attribute_map, or same format. serialization_ctxt['type'] should be same as data_type. :param list attr: Object to be serialized. :param str iter_type: Type of object in the iterable. :param bool required: Whether the objects in the iterable must not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. :rtype: list, str
[ "Serialize", "iterable", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L761-L821
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_dict
def serialize_dict(self, attr, dict_type, **kwargs): """Serialize a dictionary of objects. :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. :param bool required: Whether the objects in the dictionary must not be None or empty. :rtype: dict """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data( value, dict_type, **kwargs) except ValueError: serialized[self.serialize_unicode(key)] = None if 'xml' in serialization_ctxt: # XML serialization is more complicated xml_desc = serialization_ctxt['xml'] xml_name = xml_desc['name'] final_result = _create_xml_node( xml_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) for key, value in serialized.items(): ET.SubElement(final_result, key).text = value return final_result return serialized
python
def serialize_dict(self, attr, dict_type, **kwargs): """Serialize a dictionary of objects. :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. :param bool required: Whether the objects in the dictionary must not be None or empty. :rtype: dict """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data( value, dict_type, **kwargs) except ValueError: serialized[self.serialize_unicode(key)] = None if 'xml' in serialization_ctxt: # XML serialization is more complicated xml_desc = serialization_ctxt['xml'] xml_name = xml_desc['name'] final_result = _create_xml_node( xml_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) for key, value in serialized.items(): ET.SubElement(final_result, key).text = value return final_result return serialized
[ "def", "serialize_dict", "(", "self", ",", "attr", ",", "dict_type", ",", "*", "*", "kwargs", ")", ":", "serialization_ctxt", "=", "kwargs", ".", "get", "(", "\"serialization_ctxt\"", ",", "{", "}", ")", "serialized", "=", "{", "}", "for", "key", ",", "value", "in", "attr", ".", "items", "(", ")", ":", "try", ":", "serialized", "[", "self", ".", "serialize_unicode", "(", "key", ")", "]", "=", "self", ".", "serialize_data", "(", "value", ",", "dict_type", ",", "*", "*", "kwargs", ")", "except", "ValueError", ":", "serialized", "[", "self", ".", "serialize_unicode", "(", "key", ")", "]", "=", "None", "if", "'xml'", "in", "serialization_ctxt", ":", "# XML serialization is more complicated", "xml_desc", "=", "serialization_ctxt", "[", "'xml'", "]", "xml_name", "=", "xml_desc", "[", "'name'", "]", "final_result", "=", "_create_xml_node", "(", "xml_name", ",", "xml_desc", ".", "get", "(", "'prefix'", ",", "None", ")", ",", "xml_desc", ".", "get", "(", "'ns'", ",", "None", ")", ")", "for", "key", ",", "value", "in", "serialized", ".", "items", "(", ")", ":", "ET", ".", "SubElement", "(", "final_result", ",", "key", ")", ".", "text", "=", "value", "return", "final_result", "return", "serialized" ]
Serialize a dictionary of objects. :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. :param bool required: Whether the objects in the dictionary must not be None or empty. :rtype: dict
[ "Serialize", "a", "dictionary", "of", "objects", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L823-L855
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_base64
def serialize_base64(attr, **kwargs): """Serialize str into base-64 string. :param attr: Object to be serialized. :rtype: str """ encoded = b64encode(attr).decode('ascii') return encoded.strip('=').replace('+', '-').replace('/', '_')
python
def serialize_base64(attr, **kwargs): """Serialize str into base-64 string. :param attr: Object to be serialized. :rtype: str """ encoded = b64encode(attr).decode('ascii') return encoded.strip('=').replace('+', '-').replace('/', '_')
[ "def", "serialize_base64", "(", "attr", ",", "*", "*", "kwargs", ")", ":", "encoded", "=", "b64encode", "(", "attr", ")", ".", "decode", "(", "'ascii'", ")", "return", "encoded", ".", "strip", "(", "'='", ")", ".", "replace", "(", "'+'", ",", "'-'", ")", ".", "replace", "(", "'/'", ",", "'_'", ")" ]
Serialize str into base-64 string. :param attr: Object to be serialized. :rtype: str
[ "Serialize", "str", "into", "base", "-", "64", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L927-L934
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_date
def serialize_date(attr, **kwargs): """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_date(attr) t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) return t
python
def serialize_date(attr, **kwargs): """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_date(attr) t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) return t
[ "def", "serialize_date", "(", "attr", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "attr", ",", "str", ")", ":", "attr", "=", "isodate", ".", "parse_date", "(", "attr", ")", "t", "=", "\"{:04}-{:02}-{:02}\"", ".", "format", "(", "attr", ".", "year", ",", "attr", ".", "month", ",", "attr", ".", "day", ")", "return", "t" ]
Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str
[ "Serialize", "Date", "object", "into", "ISO", "-", "8601", "formatted", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L955-L964
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_duration
def serialize_duration(attr, **kwargs): """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr)
python
def serialize_duration(attr, **kwargs): """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr)
[ "def", "serialize_duration", "(", "attr", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "attr", ",", "str", ")", ":", "attr", "=", "isodate", ".", "parse_duration", "(", "attr", ")", "return", "isodate", ".", "duration_isoformat", "(", "attr", ")" ]
Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str
[ "Serialize", "TimeDelta", "object", "into", "ISO", "-", "8601", "formatted", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L967-L975
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_rfc
def serialize_rfc(attr, **kwargs): """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid. """ try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() except AttributeError: raise TypeError("RFC1123 object must be valid Datetime object.") return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], utc.tm_mday, Serializer.months[utc.tm_mon], utc.tm_year, utc.tm_hour, utc.tm_min, utc.tm_sec)
python
def serialize_rfc(attr, **kwargs): """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid. """ try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() except AttributeError: raise TypeError("RFC1123 object must be valid Datetime object.") return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], utc.tm_mday, Serializer.months[utc.tm_mon], utc.tm_year, utc.tm_hour, utc.tm_min, utc.tm_sec)
[ "def", "serialize_rfc", "(", "attr", ",", "*", "*", "kwargs", ")", ":", "try", ":", "if", "not", "attr", ".", "tzinfo", ":", "_LOGGER", ".", "warning", "(", "\"Datetime with no tzinfo will be considered UTC.\"", ")", "utc", "=", "attr", ".", "utctimetuple", "(", ")", "except", "AttributeError", ":", "raise", "TypeError", "(", "\"RFC1123 object must be valid Datetime object.\"", ")", "return", "\"{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT\"", ".", "format", "(", "Serializer", ".", "days", "[", "utc", ".", "tm_wday", "]", ",", "utc", ".", "tm_mday", ",", "Serializer", ".", "months", "[", "utc", ".", "tm_mon", "]", ",", "utc", ".", "tm_year", ",", "utc", ".", "tm_hour", ",", "utc", ".", "tm_min", ",", "utc", ".", "tm_sec", ")" ]
Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid.
[ "Serialize", "Datetime", "object", "into", "RFC", "-", "1123", "formatted", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L978-L996
train
Azure/msrest-for-python
msrest/serialization.py
Serializer.serialize_iso
def serialize_iso(attr, **kwargs): """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") microseconds = str(attr.microsecond).rjust(6,'0').rstrip('0').ljust(3, '0') if microseconds: microseconds = '.'+microseconds date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec) return date + microseconds + 'Z' except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." raise_with_traceback(SerializationError, msg, err) except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." raise_with_traceback(TypeError, msg, err)
python
def serialize_iso(attr, **kwargs): """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") microseconds = str(attr.microsecond).rjust(6,'0').rstrip('0').ljust(3, '0') if microseconds: microseconds = '.'+microseconds date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec) return date + microseconds + 'Z' except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." raise_with_traceback(SerializationError, msg, err) except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." raise_with_traceback(TypeError, msg, err)
[ "def", "serialize_iso", "(", "attr", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "attr", ",", "str", ")", ":", "attr", "=", "isodate", ".", "parse_datetime", "(", "attr", ")", "try", ":", "if", "not", "attr", ".", "tzinfo", ":", "_LOGGER", ".", "warning", "(", "\"Datetime with no tzinfo will be considered UTC.\"", ")", "utc", "=", "attr", ".", "utctimetuple", "(", ")", "if", "utc", ".", "tm_year", ">", "9999", "or", "utc", ".", "tm_year", "<", "1", ":", "raise", "OverflowError", "(", "\"Hit max or min date\"", ")", "microseconds", "=", "str", "(", "attr", ".", "microsecond", ")", ".", "rjust", "(", "6", ",", "'0'", ")", ".", "rstrip", "(", "'0'", ")", ".", "ljust", "(", "3", ",", "'0'", ")", "if", "microseconds", ":", "microseconds", "=", "'.'", "+", "microseconds", "date", "=", "\"{:04}-{:02}-{:02}T{:02}:{:02}:{:02}\"", ".", "format", "(", "utc", ".", "tm_year", ",", "utc", ".", "tm_mon", ",", "utc", ".", "tm_mday", ",", "utc", ".", "tm_hour", ",", "utc", ".", "tm_min", ",", "utc", ".", "tm_sec", ")", "return", "date", "+", "microseconds", "+", "'Z'", "except", "(", "ValueError", ",", "OverflowError", ")", "as", "err", ":", "msg", "=", "\"Unable to serialize datetime object.\"", "raise_with_traceback", "(", "SerializationError", ",", "msg", ",", "err", ")", "except", "AttributeError", "as", "err", ":", "msg", "=", "\"ISO-8601 object must be valid Datetime object.\"", "raise_with_traceback", "(", "TypeError", ",", "msg", ",", "err", ")" ]
Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid.
[ "Serialize", "Datetime", "object", "into", "ISO", "-", "8601", "formatted", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L999-L1028
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer._deserialize
def _deserialize(self, target_obj, data): """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, '_validation', {}).items() if config.get('constant')] try: for attr, mapconfig in data._attribute_map.items(): if attr in constants: continue value = getattr(data, attr) if value is None: continue local_type = mapconfig['type'] internal_data_type = local_type.strip('[]{}') if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): continue setattr( data, attr, self._deserialize(local_type, value) ) return data except AttributeError: return response, class_name = self._classify_target(target_obj, data) if isinstance(response, basestring): return self.deserialize_data(data, response) elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None: return data try: attributes = response._attribute_map d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... if attr == "additional_properties" and attr_desc["key"] == '': continue raw_value = None # Enhance attr_desc with some dynamic data attr_desc = attr_desc.copy() # Do a copy, do not change the real one internal_data_type = attr_desc["type"].strip('[]{}') if internal_data_type in self.dependencies: attr_desc["internalType"] = self.dependencies[internal_data_type] for key_extractor in self.key_extractors: found_value = key_extractor(attr, attr_desc, data) if found_value is not None: if raw_value is not None and raw_value != found_value: raise KeyError('Use twice the key: "{}"'.format(attr)) raw_value = found_value value = self.deserialize_data(raw_value, attr_desc['type']) d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name raise_with_traceback(DeserializationError, msg, err) else: additional_properties = self._build_additional_properties(attributes, data) return self._instantiate_model(response, d_attrs, additional_properties)
python
def _deserialize(self, target_obj, data): """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, '_validation', {}).items() if config.get('constant')] try: for attr, mapconfig in data._attribute_map.items(): if attr in constants: continue value = getattr(data, attr) if value is None: continue local_type = mapconfig['type'] internal_data_type = local_type.strip('[]{}') if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): continue setattr( data, attr, self._deserialize(local_type, value) ) return data except AttributeError: return response, class_name = self._classify_target(target_obj, data) if isinstance(response, basestring): return self.deserialize_data(data, response) elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None: return data try: attributes = response._attribute_map d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... if attr == "additional_properties" and attr_desc["key"] == '': continue raw_value = None # Enhance attr_desc with some dynamic data attr_desc = attr_desc.copy() # Do a copy, do not change the real one internal_data_type = attr_desc["type"].strip('[]{}') if internal_data_type in self.dependencies: attr_desc["internalType"] = self.dependencies[internal_data_type] for key_extractor in self.key_extractors: found_value = key_extractor(attr, attr_desc, data) if found_value is not None: if raw_value is not None and raw_value != found_value: raise KeyError('Use twice the key: "{}"'.format(attr)) raw_value = found_value value = self.deserialize_data(raw_value, attr_desc['type']) d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name raise_with_traceback(DeserializationError, msg, err) else: additional_properties = self._build_additional_properties(attributes, data) return self._instantiate_model(response, d_attrs, additional_properties)
[ "def", "_deserialize", "(", "self", ",", "target_obj", ",", "data", ")", ":", "# This is already a model, go recursive just in case", "if", "hasattr", "(", "data", ",", "\"_attribute_map\"", ")", ":", "constants", "=", "[", "name", "for", "name", ",", "config", "in", "getattr", "(", "data", ",", "'_validation'", ",", "{", "}", ")", ".", "items", "(", ")", "if", "config", ".", "get", "(", "'constant'", ")", "]", "try", ":", "for", "attr", ",", "mapconfig", "in", "data", ".", "_attribute_map", ".", "items", "(", ")", ":", "if", "attr", "in", "constants", ":", "continue", "value", "=", "getattr", "(", "data", ",", "attr", ")", "if", "value", "is", "None", ":", "continue", "local_type", "=", "mapconfig", "[", "'type'", "]", "internal_data_type", "=", "local_type", ".", "strip", "(", "'[]{}'", ")", "if", "internal_data_type", "not", "in", "self", ".", "dependencies", "or", "isinstance", "(", "internal_data_type", ",", "Enum", ")", ":", "continue", "setattr", "(", "data", ",", "attr", ",", "self", ".", "_deserialize", "(", "local_type", ",", "value", ")", ")", "return", "data", "except", "AttributeError", ":", "return", "response", ",", "class_name", "=", "self", ".", "_classify_target", "(", "target_obj", ",", "data", ")", "if", "isinstance", "(", "response", ",", "basestring", ")", ":", "return", "self", ".", "deserialize_data", "(", "data", ",", "response", ")", "elif", "isinstance", "(", "response", ",", "type", ")", "and", "issubclass", "(", "response", ",", "Enum", ")", ":", "return", "self", ".", "deserialize_enum", "(", "data", ",", "response", ")", "if", "data", "is", "None", ":", "return", "data", "try", ":", "attributes", "=", "response", ".", "_attribute_map", "d_attrs", "=", "{", "}", "for", "attr", ",", "attr_desc", "in", "attributes", ".", "items", "(", ")", ":", "# Check empty string. If it's not empty, someone has a real \"additionalProperties\"...", "if", "attr", "==", "\"additional_properties\"", "and", "attr_desc", "[", "\"key\"", "]", "==", "''", ":", "continue", "raw_value", "=", "None", "# Enhance attr_desc with some dynamic data", "attr_desc", "=", "attr_desc", ".", "copy", "(", ")", "# Do a copy, do not change the real one", "internal_data_type", "=", "attr_desc", "[", "\"type\"", "]", ".", "strip", "(", "'[]{}'", ")", "if", "internal_data_type", "in", "self", ".", "dependencies", ":", "attr_desc", "[", "\"internalType\"", "]", "=", "self", ".", "dependencies", "[", "internal_data_type", "]", "for", "key_extractor", "in", "self", ".", "key_extractors", ":", "found_value", "=", "key_extractor", "(", "attr", ",", "attr_desc", ",", "data", ")", "if", "found_value", "is", "not", "None", ":", "if", "raw_value", "is", "not", "None", "and", "raw_value", "!=", "found_value", ":", "raise", "KeyError", "(", "'Use twice the key: \"{}\"'", ".", "format", "(", "attr", ")", ")", "raw_value", "=", "found_value", "value", "=", "self", ".", "deserialize_data", "(", "raw_value", ",", "attr_desc", "[", "'type'", "]", ")", "d_attrs", "[", "attr", "]", "=", "value", "except", "(", "AttributeError", ",", "TypeError", ",", "KeyError", ")", "as", "err", ":", "msg", "=", "\"Unable to deserialize to object: \"", "+", "class_name", "raise_with_traceback", "(", "DeserializationError", ",", "msg", ",", "err", ")", "else", ":", "additional_properties", "=", "self", ".", "_build_additional_properties", "(", "attributes", ",", "data", ")", "return", "self", ".", "_instantiate_model", "(", "response", ",", "d_attrs", ",", "additional_properties", ")" ]
Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object.
[ "Call", "the", "deserializer", "on", "a", "model", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1230-L1301
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer._classify_target
def _classify_target(self, target, data): """Check to see whether the deserialization target object can be classified into a subclass. Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. :param str/dict data: The response data to deseralize. """ if target is None: return None, None if isinstance(target, basestring): try: target = self.dependencies[target] except KeyError: return target, target try: target = target._classify(data, self.dependencies) except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__
python
def _classify_target(self, target, data): """Check to see whether the deserialization target object can be classified into a subclass. Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. :param str/dict data: The response data to deseralize. """ if target is None: return None, None if isinstance(target, basestring): try: target = self.dependencies[target] except KeyError: return target, target try: target = target._classify(data, self.dependencies) except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__
[ "def", "_classify_target", "(", "self", ",", "target", ",", "data", ")", ":", "if", "target", "is", "None", ":", "return", "None", ",", "None", "if", "isinstance", "(", "target", ",", "basestring", ")", ":", "try", ":", "target", "=", "self", ".", "dependencies", "[", "target", "]", "except", "KeyError", ":", "return", "target", ",", "target", "try", ":", "target", "=", "target", ".", "_classify", "(", "data", ",", "self", ".", "dependencies", ")", "except", "AttributeError", ":", "pass", "# Target is not a Model, no classify", "return", "target", ",", "target", ".", "__class__", ".", "__name__" ]
Check to see whether the deserialization target object can be classified into a subclass. Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. :param str/dict data: The response data to deseralize.
[ "Check", "to", "see", "whether", "the", "deserialization", "target", "object", "can", "be", "classified", "into", "a", "subclass", ".", "Once", "classification", "has", "been", "determined", "initialize", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1315-L1336
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer._unpack_content
def _unpack_content(raw_data, content_type=None): """Extract the correct structure for deserialization. If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. if we can't, raise. Your Pipeline should have a RawDeserializer. If not a pipeline response and raw_data is bytes or string, use content-type to decode it. If no content-type, try JSON. If raw_data is something else, bypass all logic and return it directly. :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 """ # This avoids a circular dependency. We might want to consider RawDesializer is more generic # than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD. from .pipeline.universal import RawDeserializer # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) if context: if RawDeserializer.CONTEXT_NAME in context: return context[RawDeserializer.CONTEXT_NAME] raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") #Assume this is enough to recognize universal_http.ClientResponse without importing it if hasattr(raw_data, "body"): return RawDeserializer.deserialize_from_http_generics( raw_data.text(), raw_data.headers ) # Assume this enough to recognize requests.Response without importing it. if hasattr(raw_data, '_content_consumed'): return RawDeserializer.deserialize_from_http_generics( raw_data.text, raw_data.headers ) if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, 'read'): return RawDeserializer.deserialize_from_text(raw_data, content_type) return raw_data
python
def _unpack_content(raw_data, content_type=None): """Extract the correct structure for deserialization. If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. if we can't, raise. Your Pipeline should have a RawDeserializer. If not a pipeline response and raw_data is bytes or string, use content-type to decode it. If no content-type, try JSON. If raw_data is something else, bypass all logic and return it directly. :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 """ # This avoids a circular dependency. We might want to consider RawDesializer is more generic # than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD. from .pipeline.universal import RawDeserializer # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) if context: if RawDeserializer.CONTEXT_NAME in context: return context[RawDeserializer.CONTEXT_NAME] raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") #Assume this is enough to recognize universal_http.ClientResponse without importing it if hasattr(raw_data, "body"): return RawDeserializer.deserialize_from_http_generics( raw_data.text(), raw_data.headers ) # Assume this enough to recognize requests.Response without importing it. if hasattr(raw_data, '_content_consumed'): return RawDeserializer.deserialize_from_http_generics( raw_data.text, raw_data.headers ) if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, 'read'): return RawDeserializer.deserialize_from_text(raw_data, content_type) return raw_data
[ "def", "_unpack_content", "(", "raw_data", ",", "content_type", "=", "None", ")", ":", "# This avoids a circular dependency. We might want to consider RawDesializer is more generic", "# than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD.", "from", ".", "pipeline", ".", "universal", "import", "RawDeserializer", "# Assume this is enough to detect a Pipeline Response without importing it", "context", "=", "getattr", "(", "raw_data", ",", "\"context\"", ",", "{", "}", ")", "if", "context", ":", "if", "RawDeserializer", ".", "CONTEXT_NAME", "in", "context", ":", "return", "context", "[", "RawDeserializer", ".", "CONTEXT_NAME", "]", "raise", "ValueError", "(", "\"This pipeline didn't have the RawDeserializer policy; can't deserialize\"", ")", "#Assume this is enough to recognize universal_http.ClientResponse without importing it", "if", "hasattr", "(", "raw_data", ",", "\"body\"", ")", ":", "return", "RawDeserializer", ".", "deserialize_from_http_generics", "(", "raw_data", ".", "text", "(", ")", ",", "raw_data", ".", "headers", ")", "# Assume this enough to recognize requests.Response without importing it.", "if", "hasattr", "(", "raw_data", ",", "'_content_consumed'", ")", ":", "return", "RawDeserializer", ".", "deserialize_from_http_generics", "(", "raw_data", ".", "text", ",", "raw_data", ".", "headers", ")", "if", "isinstance", "(", "raw_data", ",", "(", "basestring", ",", "bytes", ")", ")", "or", "hasattr", "(", "raw_data", ",", "'read'", ")", ":", "return", "RawDeserializer", ".", "deserialize_from_text", "(", "raw_data", ",", "content_type", ")", "return", "raw_data" ]
Extract the correct structure for deserialization. If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. if we can't, raise. Your Pipeline should have a RawDeserializer. If not a pipeline response and raw_data is bytes or string, use content-type to decode it. If no content-type, try JSON. If raw_data is something else, bypass all logic and return it directly. :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8
[ "Extract", "the", "correct", "structure", "for", "deserialization", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1339-L1382
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer._instantiate_model
def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. :param response: The response model class. :param d_attrs: The deserialized response attributes. """ if callable(response): subtype = getattr(response, '_subtype_map', {}) try: readonly = [k for k, v in response._validation.items() if v.get('readonly')] const = [k for k, v in response._validation.items() if v.get('constant')] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) if additional_properties: response_obj.additional_properties = additional_properties return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format( kwargs, response) raise DeserializationError(msg + str(err)) else: try: for attr, value in attrs.items(): setattr(response, attr, value) return response except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) raise DeserializationError(msg)
python
def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. :param response: The response model class. :param d_attrs: The deserialized response attributes. """ if callable(response): subtype = getattr(response, '_subtype_map', {}) try: readonly = [k for k, v in response._validation.items() if v.get('readonly')] const = [k for k, v in response._validation.items() if v.get('constant')] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) if additional_properties: response_obj.additional_properties = additional_properties return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format( kwargs, response) raise DeserializationError(msg + str(err)) else: try: for attr, value in attrs.items(): setattr(response, attr, value) return response except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) raise DeserializationError(msg)
[ "def", "_instantiate_model", "(", "self", ",", "response", ",", "attrs", ",", "additional_properties", "=", "None", ")", ":", "if", "callable", "(", "response", ")", ":", "subtype", "=", "getattr", "(", "response", ",", "'_subtype_map'", ",", "{", "}", ")", "try", ":", "readonly", "=", "[", "k", "for", "k", ",", "v", "in", "response", ".", "_validation", ".", "items", "(", ")", "if", "v", ".", "get", "(", "'readonly'", ")", "]", "const", "=", "[", "k", "for", "k", ",", "v", "in", "response", ".", "_validation", ".", "items", "(", ")", "if", "v", ".", "get", "(", "'constant'", ")", "]", "kwargs", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "attrs", ".", "items", "(", ")", "if", "k", "not", "in", "subtype", "and", "k", "not", "in", "readonly", "+", "const", "}", "response_obj", "=", "response", "(", "*", "*", "kwargs", ")", "for", "attr", "in", "readonly", ":", "setattr", "(", "response_obj", ",", "attr", ",", "attrs", ".", "get", "(", "attr", ")", ")", "if", "additional_properties", ":", "response_obj", ".", "additional_properties", "=", "additional_properties", "return", "response_obj", "except", "TypeError", "as", "err", ":", "msg", "=", "\"Unable to deserialize {} into model {}. \"", ".", "format", "(", "kwargs", ",", "response", ")", "raise", "DeserializationError", "(", "msg", "+", "str", "(", "err", ")", ")", "else", ":", "try", ":", "for", "attr", ",", "value", "in", "attrs", ".", "items", "(", ")", ":", "setattr", "(", "response", ",", "attr", ",", "value", ")", "return", "response", "except", "Exception", "as", "exp", ":", "msg", "=", "\"Unable to populate response model. \"", "msg", "+=", "\"Type: {}, Error: {}\"", ".", "format", "(", "type", "(", "response", ")", ",", "exp", ")", "raise", "DeserializationError", "(", "msg", ")" ]
Instantiate a response model passing in deserialized args. :param response: The response model class. :param d_attrs: The deserialized response attributes.
[ "Instantiate", "a", "response", "model", "passing", "in", "deserialized", "args", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1384-L1417
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_data
def deserialize_data(self, data, data_type): """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ if data is None: return data try: if not data_type: return data if data_type in self.basic_types.values(): return self.deserialize_basic(data, data_type) if data_type in self.deserialize_type: if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) return data_val iter_type = data_type[0] + data_type[-1] if iter_type in self.deserialize_type: return self.deserialize_type[iter_type](data, data_type[1:-1]) obj_type = self.dependencies[data_type] if issubclass(obj_type, Enum): if isinstance(data, ET.Element): data = data.text return self.deserialize_enum(data, obj_type) except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise_with_traceback(DeserializationError, msg, err) else: return self._deserialize(obj_type, data)
python
def deserialize_data(self, data, data_type): """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ if data is None: return data try: if not data_type: return data if data_type in self.basic_types.values(): return self.deserialize_basic(data, data_type) if data_type in self.deserialize_type: if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) return data_val iter_type = data_type[0] + data_type[-1] if iter_type in self.deserialize_type: return self.deserialize_type[iter_type](data, data_type[1:-1]) obj_type = self.dependencies[data_type] if issubclass(obj_type, Enum): if isinstance(data, ET.Element): data = data.text return self.deserialize_enum(data, obj_type) except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise_with_traceback(DeserializationError, msg, err) else: return self._deserialize(obj_type, data)
[ "def", "deserialize_data", "(", "self", ",", "data", ",", "data_type", ")", ":", "if", "data", "is", "None", ":", "return", "data", "try", ":", "if", "not", "data_type", ":", "return", "data", "if", "data_type", "in", "self", ".", "basic_types", ".", "values", "(", ")", ":", "return", "self", ".", "deserialize_basic", "(", "data", ",", "data_type", ")", "if", "data_type", "in", "self", ".", "deserialize_type", ":", "if", "isinstance", "(", "data", ",", "self", ".", "deserialize_expected_types", ".", "get", "(", "data_type", ",", "tuple", "(", ")", ")", ")", ":", "return", "data", "is_a_text_parsing_type", "=", "lambda", "x", ":", "x", "not", "in", "[", "\"object\"", ",", "\"[]\"", ",", "r\"{}\"", "]", "if", "isinstance", "(", "data", ",", "ET", ".", "Element", ")", "and", "is_a_text_parsing_type", "(", "data_type", ")", "and", "not", "data", ".", "text", ":", "return", "None", "data_val", "=", "self", ".", "deserialize_type", "[", "data_type", "]", "(", "data", ")", "return", "data_val", "iter_type", "=", "data_type", "[", "0", "]", "+", "data_type", "[", "-", "1", "]", "if", "iter_type", "in", "self", ".", "deserialize_type", ":", "return", "self", ".", "deserialize_type", "[", "iter_type", "]", "(", "data", ",", "data_type", "[", "1", ":", "-", "1", "]", ")", "obj_type", "=", "self", ".", "dependencies", "[", "data_type", "]", "if", "issubclass", "(", "obj_type", ",", "Enum", ")", ":", "if", "isinstance", "(", "data", ",", "ET", ".", "Element", ")", ":", "data", "=", "data", ".", "text", "return", "self", ".", "deserialize_enum", "(", "data", ",", "obj_type", ")", "except", "(", "ValueError", ",", "TypeError", ",", "AttributeError", ")", "as", "err", ":", "msg", "=", "\"Unable to deserialize response data.\"", "msg", "+=", "\" Data: {}, {}\"", ".", "format", "(", "data", ",", "data_type", ")", "raise_with_traceback", "(", "DeserializationError", ",", "msg", ",", "err", ")", "else", ":", "return", "self", ".", "_deserialize", "(", "obj_type", ",", "data", ")" ]
Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object.
[ "Process", "data", "for", "deserialization", "according", "to", "data", "type", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1419-L1460
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_iter
def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. :rtype: list """ if attr is None: return None if isinstance(attr, ET.Element): # If I receive an element here, get the children attr = list(attr) if not isinstance(attr, (list, set)): raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format( iter_type, type(attr) )) return [self.deserialize_data(a, iter_type) for a in attr]
python
def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. :rtype: list """ if attr is None: return None if isinstance(attr, ET.Element): # If I receive an element here, get the children attr = list(attr) if not isinstance(attr, (list, set)): raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format( iter_type, type(attr) )) return [self.deserialize_data(a, iter_type) for a in attr]
[ "def", "deserialize_iter", "(", "self", ",", "attr", ",", "iter_type", ")", ":", "if", "attr", "is", "None", ":", "return", "None", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "# If I receive an element here, get the children", "attr", "=", "list", "(", "attr", ")", "if", "not", "isinstance", "(", "attr", ",", "(", "list", ",", "set", ")", ")", ":", "raise", "DeserializationError", "(", "\"Cannot deserialize as [{}] an object of type {}\"", ".", "format", "(", "iter_type", ",", "type", "(", "attr", ")", ")", ")", "return", "[", "self", ".", "deserialize_data", "(", "a", ",", "iter_type", ")", "for", "a", "in", "attr", "]" ]
Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. :rtype: list
[ "Deserialize", "an", "iterable", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1462-L1478
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_dict
def deserialize_dict(self, attr, dict_type): """Deserialize a dictionary. :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. :rtype: dict """ if isinstance(attr, list): return {x['key']: self.deserialize_data(x['value'], dict_type) for x in attr} if isinstance(attr, ET.Element): # Transform <Key>value</Key> into {"Key": "value"} attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
python
def deserialize_dict(self, attr, dict_type): """Deserialize a dictionary. :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. :rtype: dict """ if isinstance(attr, list): return {x['key']: self.deserialize_data(x['value'], dict_type) for x in attr} if isinstance(attr, ET.Element): # Transform <Key>value</Key> into {"Key": "value"} attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
[ "def", "deserialize_dict", "(", "self", ",", "attr", ",", "dict_type", ")", ":", "if", "isinstance", "(", "attr", ",", "list", ")", ":", "return", "{", "x", "[", "'key'", "]", ":", "self", ".", "deserialize_data", "(", "x", "[", "'value'", "]", ",", "dict_type", ")", "for", "x", "in", "attr", "}", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "# Transform <Key>value</Key> into {\"Key\": \"value\"}", "attr", "=", "{", "el", ".", "tag", ":", "el", ".", "text", "for", "el", "in", "attr", "}", "return", "{", "k", ":", "self", ".", "deserialize_data", "(", "v", ",", "dict_type", ")", "for", "k", ",", "v", "in", "attr", ".", "items", "(", ")", "}" ]
Deserialize a dictionary. :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. :rtype: dict
[ "Deserialize", "a", "dictionary", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1480-L1494
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_object
def deserialize_object(self, attr, **kwargs): """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. :rtype: dict :raises: TypeError if non-builtin datatype encountered. """ if attr is None: return None if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr if isinstance(attr, basestring): return self.deserialize_basic(attr, 'str') obj_type = type(attr) if obj_type in self.basic_types: return self.deserialize_basic(attr, self.basic_types[obj_type]) if obj_type is _long_type: return self.deserialize_long(attr) if obj_type == dict: deserialized = {} for key, value in attr.items(): try: deserialized[key] = self.deserialize_object( value, **kwargs) except ValueError: deserialized[key] = None return deserialized if obj_type == list: deserialized = [] for obj in attr: try: deserialized.append(self.deserialize_object( obj, **kwargs)) except ValueError: pass return deserialized else: error = "Cannot deserialize generic object with type: " raise TypeError(error + str(obj_type))
python
def deserialize_object(self, attr, **kwargs): """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. :rtype: dict :raises: TypeError if non-builtin datatype encountered. """ if attr is None: return None if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr if isinstance(attr, basestring): return self.deserialize_basic(attr, 'str') obj_type = type(attr) if obj_type in self.basic_types: return self.deserialize_basic(attr, self.basic_types[obj_type]) if obj_type is _long_type: return self.deserialize_long(attr) if obj_type == dict: deserialized = {} for key, value in attr.items(): try: deserialized[key] = self.deserialize_object( value, **kwargs) except ValueError: deserialized[key] = None return deserialized if obj_type == list: deserialized = [] for obj in attr: try: deserialized.append(self.deserialize_object( obj, **kwargs)) except ValueError: pass return deserialized else: error = "Cannot deserialize generic object with type: " raise TypeError(error + str(obj_type))
[ "def", "deserialize_object", "(", "self", ",", "attr", ",", "*", "*", "kwargs", ")", ":", "if", "attr", "is", "None", ":", "return", "None", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "# Do no recurse on XML, just return the tree as-is", "return", "attr", "if", "isinstance", "(", "attr", ",", "basestring", ")", ":", "return", "self", ".", "deserialize_basic", "(", "attr", ",", "'str'", ")", "obj_type", "=", "type", "(", "attr", ")", "if", "obj_type", "in", "self", ".", "basic_types", ":", "return", "self", ".", "deserialize_basic", "(", "attr", ",", "self", ".", "basic_types", "[", "obj_type", "]", ")", "if", "obj_type", "is", "_long_type", ":", "return", "self", ".", "deserialize_long", "(", "attr", ")", "if", "obj_type", "==", "dict", ":", "deserialized", "=", "{", "}", "for", "key", ",", "value", "in", "attr", ".", "items", "(", ")", ":", "try", ":", "deserialized", "[", "key", "]", "=", "self", ".", "deserialize_object", "(", "value", ",", "*", "*", "kwargs", ")", "except", "ValueError", ":", "deserialized", "[", "key", "]", "=", "None", "return", "deserialized", "if", "obj_type", "==", "list", ":", "deserialized", "=", "[", "]", "for", "obj", "in", "attr", ":", "try", ":", "deserialized", ".", "append", "(", "self", ".", "deserialize_object", "(", "obj", ",", "*", "*", "kwargs", ")", ")", "except", "ValueError", ":", "pass", "return", "deserialized", "else", ":", "error", "=", "\"Cannot deserialize generic object with type: \"", "raise", "TypeError", "(", "error", "+", "str", "(", "obj_type", ")", ")" ]
Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. :rtype: dict :raises: TypeError if non-builtin datatype encountered.
[ "Deserialize", "a", "generic", "object", ".", "This", "will", "be", "handled", "as", "a", "dictionary", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1496-L1539
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_basic
def deserialize_basic(self, attr, data_type): """Deserialize baisc builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as valid bool values. :param str attr: response string to be deserialized. :param str data_type: deserialization data type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text if isinstance(attr, ET.Element): attr = attr.text if not attr: if data_type == "str": # None or '', node <a/> is empty string. return '' else: # None or '', node <a/> with a strong type is None. # Don't try to model "empty bool" or "empty int" return None if data_type == 'bool': if attr in [True, False, 1, 0]: return bool(attr) elif isinstance(attr, basestring): if attr.lower() in ['true', '1']: return True elif attr.lower() in ['false', '0']: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == 'str': return self.deserialize_unicode(attr) return eval(data_type)(attr)
python
def deserialize_basic(self, attr, data_type): """Deserialize baisc builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as valid bool values. :param str attr: response string to be deserialized. :param str data_type: deserialization data type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text if isinstance(attr, ET.Element): attr = attr.text if not attr: if data_type == "str": # None or '', node <a/> is empty string. return '' else: # None or '', node <a/> with a strong type is None. # Don't try to model "empty bool" or "empty int" return None if data_type == 'bool': if attr in [True, False, 1, 0]: return bool(attr) elif isinstance(attr, basestring): if attr.lower() in ['true', '1']: return True elif attr.lower() in ['false', '0']: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == 'str': return self.deserialize_unicode(attr) return eval(data_type)(attr)
[ "def", "deserialize_basic", "(", "self", ",", "attr", ",", "data_type", ")", ":", "# If we're here, data is supposed to be a basic type.", "# If it's still an XML node, take the text", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "if", "not", "attr", ":", "if", "data_type", "==", "\"str\"", ":", "# None or '', node <a/> is empty string.", "return", "''", "else", ":", "# None or '', node <a/> with a strong type is None.", "# Don't try to model \"empty bool\" or \"empty int\"", "return", "None", "if", "data_type", "==", "'bool'", ":", "if", "attr", "in", "[", "True", ",", "False", ",", "1", ",", "0", "]", ":", "return", "bool", "(", "attr", ")", "elif", "isinstance", "(", "attr", ",", "basestring", ")", ":", "if", "attr", ".", "lower", "(", ")", "in", "[", "'true'", ",", "'1'", "]", ":", "return", "True", "elif", "attr", ".", "lower", "(", ")", "in", "[", "'false'", ",", "'0'", "]", ":", "return", "False", "raise", "TypeError", "(", "\"Invalid boolean value: {}\"", ".", "format", "(", "attr", ")", ")", "if", "data_type", "==", "'str'", ":", "return", "self", ".", "deserialize_unicode", "(", "attr", ")", "return", "eval", "(", "data_type", ")", "(", "attr", ")" ]
Deserialize baisc builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as valid bool values. :param str attr: response string to be deserialized. :param str data_type: deserialization data type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid.
[ "Deserialize", "baisc", "builtin", "data", "type", "from", "string", ".", "Will", "attempt", "to", "convert", "to", "str", "int", "float", "and", "bool", ".", "This", "function", "will", "also", "accept", "1", "0", "true", "and", "false", "as", "valid", "bool", "values", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1541-L1577
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_unicode
def deserialize_unicode(data): """Preserve unicode objects in Python 2, otherwise return data as a string. :param str data: response string to be deserialized. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, # and we try to deserialize a partial dict with enum inside if isinstance(data, Enum): return data # Consider this is real string try: if isinstance(data, unicode): return data except NameError: return str(data) else: return str(data)
python
def deserialize_unicode(data): """Preserve unicode objects in Python 2, otherwise return data as a string. :param str data: response string to be deserialized. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, # and we try to deserialize a partial dict with enum inside if isinstance(data, Enum): return data # Consider this is real string try: if isinstance(data, unicode): return data except NameError: return str(data) else: return str(data)
[ "def", "deserialize_unicode", "(", "data", ")", ":", "# We might be here because we have an enum modeled as string,", "# and we try to deserialize a partial dict with enum inside", "if", "isinstance", "(", "data", ",", "Enum", ")", ":", "return", "data", "# Consider this is real string", "try", ":", "if", "isinstance", "(", "data", ",", "unicode", ")", ":", "return", "data", "except", "NameError", ":", "return", "str", "(", "data", ")", "else", ":", "return", "str", "(", "data", ")" ]
Preserve unicode objects in Python 2, otherwise return data as a string. :param str data: response string to be deserialized. :rtype: str or unicode
[ "Preserve", "unicode", "objects", "in", "Python", "2", "otherwise", "return", "data", "as", "a", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1580-L1599
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_enum
def deserialize_enum(data, enum_obj): """Deserialize string into enum object. :param str data: response string to be deserialized. :param Enum enum_obj: Enum object to deserialize to. :rtype: Enum :raises: DeserializationError if string is not valid enum value. """ if isinstance(data, enum_obj): return data if isinstance(data, Enum): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] except IndexError: error = "{!r} is not a valid index for enum {!r}" raise DeserializationError(error.format(data, enum_obj)) try: return enum_obj(str(data)) except ValueError: for enum_value in enum_obj: if enum_value.value.lower() == str(data).lower(): return enum_value # We don't fail anymore for unknown value, we deserialize as a string _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) return Deserializer.deserialize_unicode(data)
python
def deserialize_enum(data, enum_obj): """Deserialize string into enum object. :param str data: response string to be deserialized. :param Enum enum_obj: Enum object to deserialize to. :rtype: Enum :raises: DeserializationError if string is not valid enum value. """ if isinstance(data, enum_obj): return data if isinstance(data, Enum): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] except IndexError: error = "{!r} is not a valid index for enum {!r}" raise DeserializationError(error.format(data, enum_obj)) try: return enum_obj(str(data)) except ValueError: for enum_value in enum_obj: if enum_value.value.lower() == str(data).lower(): return enum_value # We don't fail anymore for unknown value, we deserialize as a string _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) return Deserializer.deserialize_unicode(data)
[ "def", "deserialize_enum", "(", "data", ",", "enum_obj", ")", ":", "if", "isinstance", "(", "data", ",", "enum_obj", ")", ":", "return", "data", "if", "isinstance", "(", "data", ",", "Enum", ")", ":", "data", "=", "data", ".", "value", "if", "isinstance", "(", "data", ",", "int", ")", ":", "# Workaround. We might consider remove it in the future.", "# https://github.com/Azure/azure-rest-api-specs/issues/141", "try", ":", "return", "list", "(", "enum_obj", ".", "__members__", ".", "values", "(", ")", ")", "[", "data", "]", "except", "IndexError", ":", "error", "=", "\"{!r} is not a valid index for enum {!r}\"", "raise", "DeserializationError", "(", "error", ".", "format", "(", "data", ",", "enum_obj", ")", ")", "try", ":", "return", "enum_obj", "(", "str", "(", "data", ")", ")", "except", "ValueError", ":", "for", "enum_value", "in", "enum_obj", ":", "if", "enum_value", ".", "value", ".", "lower", "(", ")", "==", "str", "(", "data", ")", ".", "lower", "(", ")", ":", "return", "enum_value", "# We don't fail anymore for unknown value, we deserialize as a string", "_LOGGER", ".", "warning", "(", "\"Deserializer is not able to find %s as valid enum in %s\"", ",", "data", ",", "enum_obj", ")", "return", "Deserializer", ".", "deserialize_unicode", "(", "data", ")" ]
Deserialize string into enum object. :param str data: response string to be deserialized. :param Enum enum_obj: Enum object to deserialize to. :rtype: Enum :raises: DeserializationError if string is not valid enum value.
[ "Deserialize", "string", "into", "enum", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1602-L1630
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_bytearray
def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text return bytearray(b64decode(attr))
python
def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text return bytearray(b64decode(attr))
[ "def", "deserialize_bytearray", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "return", "bytearray", "(", "b64decode", "(", "attr", ")", ")" ]
Deserialize string into bytearray. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid.
[ "Deserialize", "string", "into", "bytearray", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1633-L1642
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_base64
def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text padding = '=' * (3 - (len(attr) + 3) % 4) attr = attr + padding encoded = attr.replace('-', '+').replace('_', '/') return b64decode(encoded)
python
def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text padding = '=' * (3 - (len(attr) + 3) % 4) attr = attr + padding encoded = attr.replace('-', '+').replace('_', '/') return b64decode(encoded)
[ "def", "deserialize_base64", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "padding", "=", "'='", "*", "(", "3", "-", "(", "len", "(", "attr", ")", "+", "3", ")", "%", "4", ")", "attr", "=", "attr", "+", "padding", "encoded", "=", "attr", ".", "replace", "(", "'-'", ",", "'+'", ")", ".", "replace", "(", "'_'", ",", "'/'", ")", "return", "b64decode", "(", "encoded", ")" ]
Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid.
[ "Deserialize", "base64", "encoded", "string", "into", "string", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1645-L1657
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_decimal
def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. :rtype: Decimal :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: return decimal.Decimal(attr) except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) raise_with_traceback(DeserializationError, msg, err)
python
def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. :rtype: Decimal :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: return decimal.Decimal(attr) except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) raise_with_traceback(DeserializationError, msg, err)
[ "def", "deserialize_decimal", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "try", ":", "return", "decimal", ".", "Decimal", "(", "attr", ")", "except", "decimal", ".", "DecimalException", "as", "err", ":", "msg", "=", "\"Invalid decimal {}\"", ".", "format", "(", "attr", ")", "raise_with_traceback", "(", "DeserializationError", ",", "msg", ",", "err", ")" ]
Deserialize string into Decimal object. :param str attr: response string to be deserialized. :rtype: Decimal :raises: DeserializationError if string format invalid.
[ "Deserialize", "string", "into", "Decimal", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1660-L1673
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_duration
def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. :rtype: TimeDelta :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: duration = isodate.parse_duration(attr) except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise_with_traceback(DeserializationError, msg, err) else: return duration
python
def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. :rtype: TimeDelta :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: duration = isodate.parse_duration(attr) except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise_with_traceback(DeserializationError, msg, err) else: return duration
[ "def", "deserialize_duration", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "try", ":", "duration", "=", "isodate", ".", "parse_duration", "(", "attr", ")", "except", "(", "ValueError", ",", "OverflowError", ",", "AttributeError", ")", "as", "err", ":", "msg", "=", "\"Cannot deserialize duration object.\"", "raise_with_traceback", "(", "DeserializationError", ",", "msg", ",", "err", ")", "else", ":", "return", "duration" ]
Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. :rtype: TimeDelta :raises: DeserializationError if string format invalid.
[ "Deserialize", "ISO", "-", "8601", "formatted", "string", "into", "TimeDelta", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1688-L1703
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_date
def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. :rtype: Date :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text if re.search(r"[^\W\d_]", attr, re.I + re.U): raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
python
def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. :rtype: Date :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text if re.search(r"[^\W\d_]", attr, re.I + re.U): raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
[ "def", "deserialize_date", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "if", "re", ".", "search", "(", "r\"[^\\W\\d_]\"", ",", "attr", ",", "re", ".", "I", "+", "re", ".", "U", ")", ":", "raise", "DeserializationError", "(", "\"Date must have only digits and -. Received: %s\"", "%", "attr", ")", "# This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.", "return", "isodate", ".", "parse_date", "(", "attr", ",", "defaultmonth", "=", "None", ",", "defaultday", "=", "None", ")" ]
Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. :rtype: Date :raises: DeserializationError if string format invalid.
[ "Deserialize", "ISO", "-", "8601", "formatted", "string", "into", "Date", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1706-L1718
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_rfc
def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: date_obj = datetime.datetime.strptime( attr, "%a, %d %b %Y %H:%M:%S %Z") if not date_obj.tzinfo: date_obj = date_obj.replace(tzinfo=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj
python
def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: date_obj = datetime.datetime.strptime( attr, "%a, %d %b %Y %H:%M:%S %Z") if not date_obj.tzinfo: date_obj = date_obj.replace(tzinfo=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj
[ "def", "deserialize_rfc", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "try", ":", "date_obj", "=", "datetime", ".", "datetime", ".", "strptime", "(", "attr", ",", "\"%a, %d %b %Y %H:%M:%S %Z\"", ")", "if", "not", "date_obj", ".", "tzinfo", ":", "date_obj", "=", "date_obj", ".", "replace", "(", "tzinfo", "=", "TZ_UTC", ")", "except", "ValueError", "as", "err", ":", "msg", "=", "\"Cannot deserialize to rfc datetime object.\"", "raise_with_traceback", "(", "DeserializationError", ",", "msg", ",", "err", ")", "else", ":", "return", "date_obj" ]
Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid.
[ "Deserialize", "RFC", "-", "1123", "formatted", "string", "into", "Datetime", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1721-L1739
train
Azure/msrest-for-python
msrest/serialization.py
Deserializer.deserialize_iso
def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: attr = attr.upper() match = Deserializer.valid_date.match(attr) if not match: raise ValueError("Invalid datetime string: " + attr) check_decimal = attr.split('.') if len(check_decimal) > 1: decimal_str = "" for digit in check_decimal[1]: if digit.isdigit(): decimal_str += digit else: break if len(decimal_str) > 6: attr = attr.replace(decimal_str, decimal_str[0:6]) date_obj = isodate.parse_datetime(attr) test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj
python
def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: attr = attr.upper() match = Deserializer.valid_date.match(attr) if not match: raise ValueError("Invalid datetime string: " + attr) check_decimal = attr.split('.') if len(check_decimal) > 1: decimal_str = "" for digit in check_decimal[1]: if digit.isdigit(): decimal_str += digit else: break if len(decimal_str) > 6: attr = attr.replace(decimal_str, decimal_str[0:6]) date_obj = isodate.parse_datetime(attr) test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj
[ "def", "deserialize_iso", "(", "attr", ")", ":", "if", "isinstance", "(", "attr", ",", "ET", ".", "Element", ")", ":", "attr", "=", "attr", ".", "text", "try", ":", "attr", "=", "attr", ".", "upper", "(", ")", "match", "=", "Deserializer", ".", "valid_date", ".", "match", "(", "attr", ")", "if", "not", "match", ":", "raise", "ValueError", "(", "\"Invalid datetime string: \"", "+", "attr", ")", "check_decimal", "=", "attr", ".", "split", "(", "'.'", ")", "if", "len", "(", "check_decimal", ")", ">", "1", ":", "decimal_str", "=", "\"\"", "for", "digit", "in", "check_decimal", "[", "1", "]", ":", "if", "digit", ".", "isdigit", "(", ")", ":", "decimal_str", "+=", "digit", "else", ":", "break", "if", "len", "(", "decimal_str", ")", ">", "6", ":", "attr", "=", "attr", ".", "replace", "(", "decimal_str", ",", "decimal_str", "[", "0", ":", "6", "]", ")", "date_obj", "=", "isodate", ".", "parse_datetime", "(", "attr", ")", "test_utc", "=", "date_obj", ".", "utctimetuple", "(", ")", "if", "test_utc", ".", "tm_year", ">", "9999", "or", "test_utc", ".", "tm_year", "<", "1", ":", "raise", "OverflowError", "(", "\"Hit max or min date\"", ")", "except", "(", "ValueError", ",", "OverflowError", ",", "AttributeError", ")", "as", "err", ":", "msg", "=", "\"Cannot deserialize datetime object.\"", "raise_with_traceback", "(", "DeserializationError", ",", "msg", ",", "err", ")", "else", ":", "return", "date_obj" ]
Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid.
[ "Deserialize", "ISO", "-", "8601", "formatted", "string", "into", "Datetime", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L1742-L1776
train
Azure/msrest-for-python
msrest/paging.py
Paged.raw
def raw(self): # type: () -> ClientRawResponse """Get current page as ClientRawResponse. :rtype: ClientRawResponse """ raw = ClientRawResponse(self.current_page, self._response) if self._raw_headers: raw.add_headers(self._raw_headers) return raw
python
def raw(self): # type: () -> ClientRawResponse """Get current page as ClientRawResponse. :rtype: ClientRawResponse """ raw = ClientRawResponse(self.current_page, self._response) if self._raw_headers: raw.add_headers(self._raw_headers) return raw
[ "def", "raw", "(", "self", ")", ":", "# type: () -> ClientRawResponse", "raw", "=", "ClientRawResponse", "(", "self", ".", "current_page", ",", "self", ".", "_response", ")", "if", "self", ".", "_raw_headers", ":", "raw", ".", "add_headers", "(", "self", ".", "_raw_headers", ")", "return", "raw" ]
Get current page as ClientRawResponse. :rtype: ClientRawResponse
[ "Get", "current", "page", "as", "ClientRawResponse", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/paging.py#L85-L94
train
Azure/msrest-for-python
msrest/paging.py
Paged.advance_page
def advance_page(self): # type: () -> List[Model] """Force moving the cursor to the next azure call. This method is for advanced usage, iterator protocol is prefered. :raises: StopIteration if no further page :return: The current page list :rtype: list """ if self.next_link is None: raise StopIteration("End of paging") self._current_page_iter_index = 0 self._response = self._get_next(self.next_link) self._derserializer(self, self._response) return self.current_page
python
def advance_page(self): # type: () -> List[Model] """Force moving the cursor to the next azure call. This method is for advanced usage, iterator protocol is prefered. :raises: StopIteration if no further page :return: The current page list :rtype: list """ if self.next_link is None: raise StopIteration("End of paging") self._current_page_iter_index = 0 self._response = self._get_next(self.next_link) self._derserializer(self, self._response) return self.current_page
[ "def", "advance_page", "(", "self", ")", ":", "# type: () -> List[Model]", "if", "self", ".", "next_link", "is", "None", ":", "raise", "StopIteration", "(", "\"End of paging\"", ")", "self", ".", "_current_page_iter_index", "=", "0", "self", ".", "_response", "=", "self", ".", "_get_next", "(", "self", ".", "next_link", ")", "self", ".", "_derserializer", "(", "self", ",", "self", ".", "_response", ")", "return", "self", ".", "current_page" ]
Force moving the cursor to the next azure call. This method is for advanced usage, iterator protocol is prefered. :raises: StopIteration if no further page :return: The current page list :rtype: list
[ "Force", "moving", "the", "cursor", "to", "the", "next", "azure", "call", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/paging.py#L116-L131
train
soravux/scoop
scoop/shared.py
_ensureAtomicity
def _ensureAtomicity(fn): """Ensure atomicity of passed elements on the whole worker pool""" @ensureScoopStartedProperly def wrapper(*args, **kwargs): """setConst(**kwargs) Set a constant that will be shared to every workers. This call blocks until the constant has propagated to at least one worker. :param \*\*kwargs: One or more combination(s) key=value. Key being the variable name and value the object to share. :returns: None. Usage: setConst(name=value) """ # Note that the docstring is the one of setConst. # This is because of the documentation framework (sphinx) limitations. from . import _control # Enforce retrieval of currently awaiting constants _control.execQueue.socket.pumpInfoSocket() for key, value in kwargs.items(): # Object name existence check if key in itertools.chain(*(elem.keys() for elem in elements.values())): raise TypeError("This constant already exists: {0}.".format(key)) # Retry element propagation until it is returned while all(key in elements.get(scoop.worker, []) for key in kwargs.keys()) is not True: scoop.logger.debug("Sending global variables {0}...".format( list(kwargs.keys()) )) # Call the function fn(*args, **kwargs) # Enforce retrieval of currently awaiting constants _control.execQueue.socket.pumpInfoSocket() # TODO: Make previous blocking instead of sleep time.sleep(0.1) # Atomicity check elementNames = list(itertools.chain(*(elem.keys() for elem in elements.values()))) if len(elementNames) != len(set(elementNames)): raise TypeError("This constant already exists: {0}.".format(key)) return wrapper
python
def _ensureAtomicity(fn): """Ensure atomicity of passed elements on the whole worker pool""" @ensureScoopStartedProperly def wrapper(*args, **kwargs): """setConst(**kwargs) Set a constant that will be shared to every workers. This call blocks until the constant has propagated to at least one worker. :param \*\*kwargs: One or more combination(s) key=value. Key being the variable name and value the object to share. :returns: None. Usage: setConst(name=value) """ # Note that the docstring is the one of setConst. # This is because of the documentation framework (sphinx) limitations. from . import _control # Enforce retrieval of currently awaiting constants _control.execQueue.socket.pumpInfoSocket() for key, value in kwargs.items(): # Object name existence check if key in itertools.chain(*(elem.keys() for elem in elements.values())): raise TypeError("This constant already exists: {0}.".format(key)) # Retry element propagation until it is returned while all(key in elements.get(scoop.worker, []) for key in kwargs.keys()) is not True: scoop.logger.debug("Sending global variables {0}...".format( list(kwargs.keys()) )) # Call the function fn(*args, **kwargs) # Enforce retrieval of currently awaiting constants _control.execQueue.socket.pumpInfoSocket() # TODO: Make previous blocking instead of sleep time.sleep(0.1) # Atomicity check elementNames = list(itertools.chain(*(elem.keys() for elem in elements.values()))) if len(elementNames) != len(set(elementNames)): raise TypeError("This constant already exists: {0}.".format(key)) return wrapper
[ "def", "_ensureAtomicity", "(", "fn", ")", ":", "@", "ensureScoopStartedProperly", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"setConst(**kwargs)\n Set a constant that will be shared to every workers.\n This call blocks until the constant has propagated to at least one\n worker.\n\n :param \\*\\*kwargs: One or more combination(s) key=value. Key being the\n variable name and value the object to share.\n\n :returns: None.\n\n Usage: setConst(name=value)\n \"\"\"", "# Note that the docstring is the one of setConst.", "# This is because of the documentation framework (sphinx) limitations.", "from", ".", "import", "_control", "# Enforce retrieval of currently awaiting constants", "_control", ".", "execQueue", ".", "socket", ".", "pumpInfoSocket", "(", ")", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "# Object name existence check", "if", "key", "in", "itertools", ".", "chain", "(", "*", "(", "elem", ".", "keys", "(", ")", "for", "elem", "in", "elements", ".", "values", "(", ")", ")", ")", ":", "raise", "TypeError", "(", "\"This constant already exists: {0}.\"", ".", "format", "(", "key", ")", ")", "# Retry element propagation until it is returned", "while", "all", "(", "key", "in", "elements", ".", "get", "(", "scoop", ".", "worker", ",", "[", "]", ")", "for", "key", "in", "kwargs", ".", "keys", "(", ")", ")", "is", "not", "True", ":", "scoop", ".", "logger", ".", "debug", "(", "\"Sending global variables {0}...\"", ".", "format", "(", "list", "(", "kwargs", ".", "keys", "(", ")", ")", ")", ")", "# Call the function", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# Enforce retrieval of currently awaiting constants", "_control", ".", "execQueue", ".", "socket", ".", "pumpInfoSocket", "(", ")", "# TODO: Make previous blocking instead of sleep", "time", ".", "sleep", "(", "0.1", ")", "# Atomicity check", "elementNames", "=", "list", "(", "itertools", ".", "chain", "(", "*", "(", "elem", ".", "keys", "(", ")", "for", "elem", "in", "elements", ".", "values", "(", ")", ")", ")", ")", "if", "len", "(", "elementNames", ")", "!=", "len", "(", "set", "(", "elementNames", ")", ")", ":", "raise", "TypeError", "(", "\"This constant already exists: {0}.\"", ".", "format", "(", "key", ")", ")", "return", "wrapper" ]
Ensure atomicity of passed elements on the whole worker pool
[ "Ensure", "atomicity", "of", "passed", "elements", "on", "the", "whole", "worker", "pool" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/shared.py#L32-L80
train
soravux/scoop
scoop/shared.py
getConst
def getConst(name, timeout=0.1): """Get a shared constant. :param name: The name of the shared variable to retrieve. :param timeout: The maximum time to wait in seconds for the propagation of the constant. :returns: The shared object. Usage: value = getConst('name') """ from . import _control import time timeStamp = time.time() while True: # Enforce retrieval of currently awaiting constants _control.execQueue.socket.pumpInfoSocket() # Constants concatenation constants = dict(reduce( lambda x, y: x + list(y.items()), elements.values(), [] )) timeoutHappened = time.time() - timeStamp > timeout if constants.get(name) is not None or timeoutHappened: return constants.get(name) time.sleep(0.01)
python
def getConst(name, timeout=0.1): """Get a shared constant. :param name: The name of the shared variable to retrieve. :param timeout: The maximum time to wait in seconds for the propagation of the constant. :returns: The shared object. Usage: value = getConst('name') """ from . import _control import time timeStamp = time.time() while True: # Enforce retrieval of currently awaiting constants _control.execQueue.socket.pumpInfoSocket() # Constants concatenation constants = dict(reduce( lambda x, y: x + list(y.items()), elements.values(), [] )) timeoutHappened = time.time() - timeStamp > timeout if constants.get(name) is not None or timeoutHappened: return constants.get(name) time.sleep(0.01)
[ "def", "getConst", "(", "name", ",", "timeout", "=", "0.1", ")", ":", "from", ".", "import", "_control", "import", "time", "timeStamp", "=", "time", ".", "time", "(", ")", "while", "True", ":", "# Enforce retrieval of currently awaiting constants", "_control", ".", "execQueue", ".", "socket", ".", "pumpInfoSocket", "(", ")", "# Constants concatenation", "constants", "=", "dict", "(", "reduce", "(", "lambda", "x", ",", "y", ":", "x", "+", "list", "(", "y", ".", "items", "(", ")", ")", ",", "elements", ".", "values", "(", ")", ",", "[", "]", ")", ")", "timeoutHappened", "=", "time", ".", "time", "(", ")", "-", "timeStamp", ">", "timeout", "if", "constants", ".", "get", "(", "name", ")", "is", "not", "None", "or", "timeoutHappened", ":", "return", "constants", ".", "get", "(", "name", ")", "time", ".", "sleep", "(", "0.01", ")" ]
Get a shared constant. :param name: The name of the shared variable to retrieve. :param timeout: The maximum time to wait in seconds for the propagation of the constant. :returns: The shared object. Usage: value = getConst('name')
[ "Get", "a", "shared", "constant", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/shared.py#L109-L137
train
soravux/scoop
scoop/launch/__main__.py
launchBootstraps
def launchBootstraps(): """Launch the bootstrap instances in separate subprocesses""" global processes worker_amount, verbosity, args = getArgs() was_origin = False if verbosity >= 1: sys.stderr.write("Launching {0} worker(s) using {1}.\n".format( worker_amount, os.environ['SHELL'] if 'SHELL' in os.environ else "an unknown shell", ) ) sys.stderr.flush() processes = [] for _ in range(worker_amount): command = [sys.executable, "-m", BOOTSTRAP_MODULE] + args if verbosity >= 3: sys.stderr.write("Executing '{0}'...\n".format(command)) sys.stderr.flush() processes.append(Popen(command)) # Only have a single origin try: args.remove("--origin") except ValueError: pass else: was_origin = True if was_origin: # Only wait on the origin, this will return and notify the launcher # the the job has finished and start the cleanup phase try: processes[0].wait() except KeyboardInterrupt: pass else: for p in processes: p.wait()
python
def launchBootstraps(): """Launch the bootstrap instances in separate subprocesses""" global processes worker_amount, verbosity, args = getArgs() was_origin = False if verbosity >= 1: sys.stderr.write("Launching {0} worker(s) using {1}.\n".format( worker_amount, os.environ['SHELL'] if 'SHELL' in os.environ else "an unknown shell", ) ) sys.stderr.flush() processes = [] for _ in range(worker_amount): command = [sys.executable, "-m", BOOTSTRAP_MODULE] + args if verbosity >= 3: sys.stderr.write("Executing '{0}'...\n".format(command)) sys.stderr.flush() processes.append(Popen(command)) # Only have a single origin try: args.remove("--origin") except ValueError: pass else: was_origin = True if was_origin: # Only wait on the origin, this will return and notify the launcher # the the job has finished and start the cleanup phase try: processes[0].wait() except KeyboardInterrupt: pass else: for p in processes: p.wait()
[ "def", "launchBootstraps", "(", ")", ":", "global", "processes", "worker_amount", ",", "verbosity", ",", "args", "=", "getArgs", "(", ")", "was_origin", "=", "False", "if", "verbosity", ">=", "1", ":", "sys", ".", "stderr", ".", "write", "(", "\"Launching {0} worker(s) using {1}.\\n\"", ".", "format", "(", "worker_amount", ",", "os", ".", "environ", "[", "'SHELL'", "]", "if", "'SHELL'", "in", "os", ".", "environ", "else", "\"an unknown shell\"", ",", ")", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "processes", "=", "[", "]", "for", "_", "in", "range", "(", "worker_amount", ")", ":", "command", "=", "[", "sys", ".", "executable", ",", "\"-m\"", ",", "BOOTSTRAP_MODULE", "]", "+", "args", "if", "verbosity", ">=", "3", ":", "sys", ".", "stderr", ".", "write", "(", "\"Executing '{0}'...\\n\"", ".", "format", "(", "command", ")", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "processes", ".", "append", "(", "Popen", "(", "command", ")", ")", "# Only have a single origin", "try", ":", "args", ".", "remove", "(", "\"--origin\"", ")", "except", "ValueError", ":", "pass", "else", ":", "was_origin", "=", "True", "if", "was_origin", ":", "# Only wait on the origin, this will return and notify the launcher", "# the the job has finished and start the cleanup phase", "try", ":", "processes", "[", "0", "]", ".", "wait", "(", ")", "except", "KeyboardInterrupt", ":", "pass", "else", ":", "for", "p", "in", "processes", ":", "p", ".", "wait", "(", ")" ]
Launch the bootstrap instances in separate subprocesses
[ "Launch", "the", "bootstrap", "instances", "in", "separate", "subprocesses" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/__main__.py#L66-L105
train
soravux/scoop
scoop/backports/dictconfig.py
BaseConfigurator.resolve
def resolve(self, s): """ Resolve strings to objects using standard import and attribute syntax. """ name = s.split('.') used = name.pop(0) try: found = self.importer(used) for frag in name: used += '.' + frag try: found = getattr(found, frag) except AttributeError: self.importer(used) found = getattr(found, frag) return found except ImportError: e, tb = sys.exc_info()[1:] v = ValueError('Cannot resolve %r: %s' % (s, e)) v.__cause__, v.__traceback__ = e, tb raise v
python
def resolve(self, s): """ Resolve strings to objects using standard import and attribute syntax. """ name = s.split('.') used = name.pop(0) try: found = self.importer(used) for frag in name: used += '.' + frag try: found = getattr(found, frag) except AttributeError: self.importer(used) found = getattr(found, frag) return found except ImportError: e, tb = sys.exc_info()[1:] v = ValueError('Cannot resolve %r: %s' % (s, e)) v.__cause__, v.__traceback__ = e, tb raise v
[ "def", "resolve", "(", "self", ",", "s", ")", ":", "name", "=", "s", ".", "split", "(", "'.'", ")", "used", "=", "name", ".", "pop", "(", "0", ")", "try", ":", "found", "=", "self", ".", "importer", "(", "used", ")", "for", "frag", "in", "name", ":", "used", "+=", "'.'", "+", "frag", "try", ":", "found", "=", "getattr", "(", "found", ",", "frag", ")", "except", "AttributeError", ":", "self", ".", "importer", "(", "used", ")", "found", "=", "getattr", "(", "found", ",", "frag", ")", "return", "found", "except", "ImportError", ":", "e", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "[", "1", ":", "]", "v", "=", "ValueError", "(", "'Cannot resolve %r: %s'", "%", "(", "s", ",", "e", ")", ")", "v", ".", "__cause__", ",", "v", ".", "__traceback__", "=", "e", ",", "tb", "raise", "v" ]
Resolve strings to objects using standard import and attribute syntax.
[ "Resolve", "strings", "to", "objects", "using", "standard", "import", "and", "attribute", "syntax", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/backports/dictconfig.py#L153-L174
train
soravux/scoop
scoop/backports/dictconfig.py
BaseConfigurator.as_tuple
def as_tuple(self, value): """Utility function which converts lists to tuples.""" if isinstance(value, list): value = tuple(value) return value
python
def as_tuple(self, value): """Utility function which converts lists to tuples.""" if isinstance(value, list): value = tuple(value) return value
[ "def", "as_tuple", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "list", ")", ":", "value", "=", "tuple", "(", "value", ")", "return", "value" ]
Utility function which converts lists to tuples.
[ "Utility", "function", "which", "converts", "lists", "to", "tuples", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/backports/dictconfig.py#L256-L260
train
soravux/scoop
scoop/backports/dictconfig.py
DictConfigurator.configure_formatter
def configure_formatter(self, config): """Configure a formatter from a dictionary.""" if '()' in config: factory = config['()'] # for use in exception handler try: result = self.configure_custom(config) except TypeError, te: if "'format'" not in str(te): raise #Name of parameter changed from fmt to format. #Retry with old name. #This is so that code can be used with older Python versions #(e.g. by Django) config['fmt'] = config.pop('format') config['()'] = factory result = self.configure_custom(config) else: fmt = config.get('format', None) dfmt = config.get('datefmt', None) result = logging.Formatter(fmt, dfmt) return result
python
def configure_formatter(self, config): """Configure a formatter from a dictionary.""" if '()' in config: factory = config['()'] # for use in exception handler try: result = self.configure_custom(config) except TypeError, te: if "'format'" not in str(te): raise #Name of parameter changed from fmt to format. #Retry with old name. #This is so that code can be used with older Python versions #(e.g. by Django) config['fmt'] = config.pop('format') config['()'] = factory result = self.configure_custom(config) else: fmt = config.get('format', None) dfmt = config.get('datefmt', None) result = logging.Formatter(fmt, dfmt) return result
[ "def", "configure_formatter", "(", "self", ",", "config", ")", ":", "if", "'()'", "in", "config", ":", "factory", "=", "config", "[", "'()'", "]", "# for use in exception handler", "try", ":", "result", "=", "self", ".", "configure_custom", "(", "config", ")", "except", "TypeError", ",", "te", ":", "if", "\"'format'\"", "not", "in", "str", "(", "te", ")", ":", "raise", "#Name of parameter changed from fmt to format.", "#Retry with old name.", "#This is so that code can be used with older Python versions", "#(e.g. by Django)", "config", "[", "'fmt'", "]", "=", "config", ".", "pop", "(", "'format'", ")", "config", "[", "'()'", "]", "=", "factory", "result", "=", "self", ".", "configure_custom", "(", "config", ")", "else", ":", "fmt", "=", "config", ".", "get", "(", "'format'", ",", "None", ")", "dfmt", "=", "config", ".", "get", "(", "'datefmt'", ",", "None", ")", "result", "=", "logging", ".", "Formatter", "(", "fmt", ",", "dfmt", ")", "return", "result" ]
Configure a formatter from a dictionary.
[ "Configure", "a", "formatter", "from", "a", "dictionary", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/backports/dictconfig.py#L414-L434
train
soravux/scoop
scoop/backports/dictconfig.py
DictConfigurator.configure_filter
def configure_filter(self, config): """Configure a filter from a dictionary.""" if '()' in config: result = self.configure_custom(config) else: name = config.get('name', '') result = logging.Filter(name) return result
python
def configure_filter(self, config): """Configure a filter from a dictionary.""" if '()' in config: result = self.configure_custom(config) else: name = config.get('name', '') result = logging.Filter(name) return result
[ "def", "configure_filter", "(", "self", ",", "config", ")", ":", "if", "'()'", "in", "config", ":", "result", "=", "self", ".", "configure_custom", "(", "config", ")", "else", ":", "name", "=", "config", ".", "get", "(", "'name'", ",", "''", ")", "result", "=", "logging", ".", "Filter", "(", "name", ")", "return", "result" ]
Configure a filter from a dictionary.
[ "Configure", "a", "filter", "from", "a", "dictionary", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/backports/dictconfig.py#L436-L443
train
soravux/scoop
scoop/backports/dictconfig.py
DictConfigurator.configure_logger
def configure_logger(self, name, config, incremental=False): """Configure a non-root logger from a dictionary.""" logger = logging.getLogger(name) self.common_logger_config(logger, config, incremental) propagate = config.get('propagate', None) if propagate is not None: logger.propagate = propagate
python
def configure_logger(self, name, config, incremental=False): """Configure a non-root logger from a dictionary.""" logger = logging.getLogger(name) self.common_logger_config(logger, config, incremental) propagate = config.get('propagate', None) if propagate is not None: logger.propagate = propagate
[ "def", "configure_logger", "(", "self", ",", "name", ",", "config", ",", "incremental", "=", "False", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "name", ")", "self", ".", "common_logger_config", "(", "logger", ",", "config", ",", "incremental", ")", "propagate", "=", "config", ".", "get", "(", "'propagate'", ",", "None", ")", "if", "propagate", "is", "not", "None", ":", "logger", ".", "propagate", "=", "propagate" ]
Configure a non-root logger from a dictionary.
[ "Configure", "a", "non", "-", "root", "logger", "from", "a", "dictionary", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/backports/dictconfig.py#L532-L538
train
soravux/scoop
scoop/backports/dictconfig.py
DictConfigurator.configure_root
def configure_root(self, config, incremental=False): """Configure a root logger from a dictionary.""" root = logging.getLogger() self.common_logger_config(root, config, incremental)
python
def configure_root(self, config, incremental=False): """Configure a root logger from a dictionary.""" root = logging.getLogger() self.common_logger_config(root, config, incremental)
[ "def", "configure_root", "(", "self", ",", "config", ",", "incremental", "=", "False", ")", ":", "root", "=", "logging", ".", "getLogger", "(", ")", "self", ".", "common_logger_config", "(", "root", ",", "config", ",", "incremental", ")" ]
Configure a root logger from a dictionary.
[ "Configure", "a", "root", "logger", "from", "a", "dictionary", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/backports/dictconfig.py#L540-L543
train
soravux/scoop
examples/image_resize.py
sliceImage
def sliceImage(image, divWidth, divHeight): """Divide the received image in multiple tiles""" w, h = image.size tiles = [] for y in range(0, h - 1 , h/divHeight): my = min(y + h/divHeight, h) for x in range(0, w - 1, w/divWidth): mx = min(x + w/divWidth, w) tiles.append(image.crop((x, y, mx, my))) return tiles
python
def sliceImage(image, divWidth, divHeight): """Divide the received image in multiple tiles""" w, h = image.size tiles = [] for y in range(0, h - 1 , h/divHeight): my = min(y + h/divHeight, h) for x in range(0, w - 1, w/divWidth): mx = min(x + w/divWidth, w) tiles.append(image.crop((x, y, mx, my))) return tiles
[ "def", "sliceImage", "(", "image", ",", "divWidth", ",", "divHeight", ")", ":", "w", ",", "h", "=", "image", ".", "size", "tiles", "=", "[", "]", "for", "y", "in", "range", "(", "0", ",", "h", "-", "1", ",", "h", "/", "divHeight", ")", ":", "my", "=", "min", "(", "y", "+", "h", "/", "divHeight", ",", "h", ")", "for", "x", "in", "range", "(", "0", ",", "w", "-", "1", ",", "w", "/", "divWidth", ")", ":", "mx", "=", "min", "(", "x", "+", "w", "/", "divWidth", ",", "w", ")", "tiles", ".", "append", "(", "image", ".", "crop", "(", "(", "x", ",", "y", ",", "mx", ",", "my", ")", ")", ")", "return", "tiles" ]
Divide the received image in multiple tiles
[ "Divide", "the", "received", "image", "in", "multiple", "tiles" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/examples/image_resize.py#L49-L58
train
soravux/scoop
examples/image_resize.py
resizeTile
def resizeTile(index, size): """Apply Antialiasing resizing to tile""" resized = tiles[index].resize(size, Image.ANTIALIAS) return sImage(resized.tostring(), resized.size, resized.mode)
python
def resizeTile(index, size): """Apply Antialiasing resizing to tile""" resized = tiles[index].resize(size, Image.ANTIALIAS) return sImage(resized.tostring(), resized.size, resized.mode)
[ "def", "resizeTile", "(", "index", ",", "size", ")", ":", "resized", "=", "tiles", "[", "index", "]", ".", "resize", "(", "size", ",", "Image", ".", "ANTIALIAS", ")", "return", "sImage", "(", "resized", ".", "tostring", "(", ")", ",", "resized", ".", "size", ",", "resized", ".", "mode", ")" ]
Apply Antialiasing resizing to tile
[ "Apply", "Antialiasing", "resizing", "to", "tile" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/examples/image_resize.py#L61-L64
train
soravux/scoop
scoop/utils.py
initLogging
def initLogging(verbosity=0, name="SCOOP"): """Creates a logger.""" global loggingConfig verbose_levels = { -2: "CRITICAL", -1: "ERROR", 0: "WARNING", 1: "INFO", 2: "DEBUG", 3: "DEBUG", 4: "NOSET", } log_handlers = { "console": { "class": "logging.StreamHandler", "formatter": "{name}Formatter".format(name=name), "stream": "ext://sys.stderr", }, } loggingConfig.update({ "{name}Logger".format(name=name): { "handlers": ["console"], "level": verbose_levels[verbosity], }, }) dict_log_config = { "version": 1, "handlers": log_handlers, "loggers": loggingConfig, "formatters": { "{name}Formatter".format(name=name): { "format": "[%(asctime)-15s] %(module)-9s " "%(levelname)-7s %(message)s", }, }, } dictConfig(dict_log_config) return logging.getLogger("{name}Logger".format(name=name))
python
def initLogging(verbosity=0, name="SCOOP"): """Creates a logger.""" global loggingConfig verbose_levels = { -2: "CRITICAL", -1: "ERROR", 0: "WARNING", 1: "INFO", 2: "DEBUG", 3: "DEBUG", 4: "NOSET", } log_handlers = { "console": { "class": "logging.StreamHandler", "formatter": "{name}Formatter".format(name=name), "stream": "ext://sys.stderr", }, } loggingConfig.update({ "{name}Logger".format(name=name): { "handlers": ["console"], "level": verbose_levels[verbosity], }, }) dict_log_config = { "version": 1, "handlers": log_handlers, "loggers": loggingConfig, "formatters": { "{name}Formatter".format(name=name): { "format": "[%(asctime)-15s] %(module)-9s " "%(levelname)-7s %(message)s", }, }, } dictConfig(dict_log_config) return logging.getLogger("{name}Logger".format(name=name))
[ "def", "initLogging", "(", "verbosity", "=", "0", ",", "name", "=", "\"SCOOP\"", ")", ":", "global", "loggingConfig", "verbose_levels", "=", "{", "-", "2", ":", "\"CRITICAL\"", ",", "-", "1", ":", "\"ERROR\"", ",", "0", ":", "\"WARNING\"", ",", "1", ":", "\"INFO\"", ",", "2", ":", "\"DEBUG\"", ",", "3", ":", "\"DEBUG\"", ",", "4", ":", "\"NOSET\"", ",", "}", "log_handlers", "=", "{", "\"console\"", ":", "{", "\"class\"", ":", "\"logging.StreamHandler\"", ",", "\"formatter\"", ":", "\"{name}Formatter\"", ".", "format", "(", "name", "=", "name", ")", ",", "\"stream\"", ":", "\"ext://sys.stderr\"", ",", "}", ",", "}", "loggingConfig", ".", "update", "(", "{", "\"{name}Logger\"", ".", "format", "(", "name", "=", "name", ")", ":", "{", "\"handlers\"", ":", "[", "\"console\"", "]", ",", "\"level\"", ":", "verbose_levels", "[", "verbosity", "]", ",", "}", ",", "}", ")", "dict_log_config", "=", "{", "\"version\"", ":", "1", ",", "\"handlers\"", ":", "log_handlers", ",", "\"loggers\"", ":", "loggingConfig", ",", "\"formatters\"", ":", "{", "\"{name}Formatter\"", ".", "format", "(", "name", "=", "name", ")", ":", "{", "\"format\"", ":", "\"[%(asctime)-15s] %(module)-9s \"", "\"%(levelname)-7s %(message)s\"", ",", "}", ",", "}", ",", "}", "dictConfig", "(", "dict_log_config", ")", "return", "logging", ".", "getLogger", "(", "\"{name}Logger\"", ".", "format", "(", "name", "=", "name", ")", ")" ]
Creates a logger.
[ "Creates", "a", "logger", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L54-L96
train
soravux/scoop
scoop/utils.py
externalHostname
def externalHostname(hosts): """Ensure external hostname is routable.""" hostname = hosts[0][0] if hostname in localHostnames and len(hosts) > 1: hostname = socket.getfqdn().split(".")[0] try: socket.getaddrinfo(hostname, None) except socket.gaierror: raise Exception("\nThe first host (containing a broker) is not" " routable.\nMake sure the address is correct.") return hostname
python
def externalHostname(hosts): """Ensure external hostname is routable.""" hostname = hosts[0][0] if hostname in localHostnames and len(hosts) > 1: hostname = socket.getfqdn().split(".")[0] try: socket.getaddrinfo(hostname, None) except socket.gaierror: raise Exception("\nThe first host (containing a broker) is not" " routable.\nMake sure the address is correct.") return hostname
[ "def", "externalHostname", "(", "hosts", ")", ":", "hostname", "=", "hosts", "[", "0", "]", "[", "0", "]", "if", "hostname", "in", "localHostnames", "and", "len", "(", "hosts", ")", ">", "1", ":", "hostname", "=", "socket", ".", "getfqdn", "(", ")", ".", "split", "(", "\".\"", ")", "[", "0", "]", "try", ":", "socket", ".", "getaddrinfo", "(", "hostname", ",", "None", ")", "except", "socket", ".", "gaierror", ":", "raise", "Exception", "(", "\"\\nThe first host (containing a broker) is not\"", "\" routable.\\nMake sure the address is correct.\"", ")", "return", "hostname" ]
Ensure external hostname is routable.
[ "Ensure", "external", "hostname", "is", "routable", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L99-L109
train
soravux/scoop
scoop/utils.py
getHosts
def getHosts(filename=None, hostlist=None): """Return a list of hosts depending on the environment""" if filename: return getHostsFromFile(filename) elif hostlist: return getHostsFromList(hostlist) elif getEnv() == "SLURM": return getHostsFromSLURM() elif getEnv() == "PBS": return getHostsFromPBS() elif getEnv() == "SGE": return getHostsFromSGE() else: return getDefaultHosts()
python
def getHosts(filename=None, hostlist=None): """Return a list of hosts depending on the environment""" if filename: return getHostsFromFile(filename) elif hostlist: return getHostsFromList(hostlist) elif getEnv() == "SLURM": return getHostsFromSLURM() elif getEnv() == "PBS": return getHostsFromPBS() elif getEnv() == "SGE": return getHostsFromSGE() else: return getDefaultHosts()
[ "def", "getHosts", "(", "filename", "=", "None", ",", "hostlist", "=", "None", ")", ":", "if", "filename", ":", "return", "getHostsFromFile", "(", "filename", ")", "elif", "hostlist", ":", "return", "getHostsFromList", "(", "hostlist", ")", "elif", "getEnv", "(", ")", "==", "\"SLURM\"", ":", "return", "getHostsFromSLURM", "(", ")", "elif", "getEnv", "(", ")", "==", "\"PBS\"", ":", "return", "getHostsFromPBS", "(", ")", "elif", "getEnv", "(", ")", "==", "\"SGE\"", ":", "return", "getHostsFromSGE", "(", ")", "else", ":", "return", "getDefaultHosts", "(", ")" ]
Return a list of hosts depending on the environment
[ "Return", "a", "list", "of", "hosts", "depending", "on", "the", "environment" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L144-L157
train
soravux/scoop
scoop/utils.py
getHostsFromFile
def getHostsFromFile(filename): """Parse a file to return a list of hosts.""" valid_hostname = r"^[^ /\t=\n]+" workers = r"\d+" hostname_re = re.compile(valid_hostname) worker_re = re.compile(workers) hosts = [] with open(filename) as f: for line in f: # check to see if it is a SLURM grouping instead of a # regular list of hosts if re.search('[\[\]]', line): hosts = hosts + parseSLURM(line.strip()) else: host = hostname_re.search(line.strip()) if host: hostname = host.group() n = worker_re.search(line[host.end():]) if n: n = n.group() else: # Automatically assign based on CPU count n = 0 hosts.append((hostname, int(n))) return hosts
python
def getHostsFromFile(filename): """Parse a file to return a list of hosts.""" valid_hostname = r"^[^ /\t=\n]+" workers = r"\d+" hostname_re = re.compile(valid_hostname) worker_re = re.compile(workers) hosts = [] with open(filename) as f: for line in f: # check to see if it is a SLURM grouping instead of a # regular list of hosts if re.search('[\[\]]', line): hosts = hosts + parseSLURM(line.strip()) else: host = hostname_re.search(line.strip()) if host: hostname = host.group() n = worker_re.search(line[host.end():]) if n: n = n.group() else: # Automatically assign based on CPU count n = 0 hosts.append((hostname, int(n))) return hosts
[ "def", "getHostsFromFile", "(", "filename", ")", ":", "valid_hostname", "=", "r\"^[^ /\\t=\\n]+\"", "workers", "=", "r\"\\d+\"", "hostname_re", "=", "re", ".", "compile", "(", "valid_hostname", ")", "worker_re", "=", "re", ".", "compile", "(", "workers", ")", "hosts", "=", "[", "]", "with", "open", "(", "filename", ")", "as", "f", ":", "for", "line", "in", "f", ":", "# check to see if it is a SLURM grouping instead of a", "# regular list of hosts", "if", "re", ".", "search", "(", "'[\\[\\]]'", ",", "line", ")", ":", "hosts", "=", "hosts", "+", "parseSLURM", "(", "line", ".", "strip", "(", ")", ")", "else", ":", "host", "=", "hostname_re", ".", "search", "(", "line", ".", "strip", "(", ")", ")", "if", "host", ":", "hostname", "=", "host", ".", "group", "(", ")", "n", "=", "worker_re", ".", "search", "(", "line", "[", "host", ".", "end", "(", ")", ":", "]", ")", "if", "n", ":", "n", "=", "n", ".", "group", "(", ")", "else", ":", "# Automatically assign based on CPU count", "n", "=", "0", "hosts", ".", "append", "(", "(", "hostname", ",", "int", "(", "n", ")", ")", ")", "return", "hosts" ]
Parse a file to return a list of hosts.
[ "Parse", "a", "file", "to", "return", "a", "list", "of", "hosts", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L160-L184
train
soravux/scoop
scoop/utils.py
getHostsFromList
def getHostsFromList(hostlist): """Return the hosts from the command line""" # check to see if it is a SLURM grouping instead of a # regular list of hosts if any(re.search('[\[\]]', x) for x in hostlist): return parseSLURM(str(hostlist)) # Counter would be more efficient but: # 1. Won't be Python 2.6 compatible # 2. Won't be ordered hostlist = groupTogether(hostlist) retVal = [] for key, group in groupby(hostlist): retVal.append((key, len(list(group)))) return retVal
python
def getHostsFromList(hostlist): """Return the hosts from the command line""" # check to see if it is a SLURM grouping instead of a # regular list of hosts if any(re.search('[\[\]]', x) for x in hostlist): return parseSLURM(str(hostlist)) # Counter would be more efficient but: # 1. Won't be Python 2.6 compatible # 2. Won't be ordered hostlist = groupTogether(hostlist) retVal = [] for key, group in groupby(hostlist): retVal.append((key, len(list(group)))) return retVal
[ "def", "getHostsFromList", "(", "hostlist", ")", ":", "# check to see if it is a SLURM grouping instead of a", "# regular list of hosts", "if", "any", "(", "re", ".", "search", "(", "'[\\[\\]]'", ",", "x", ")", "for", "x", "in", "hostlist", ")", ":", "return", "parseSLURM", "(", "str", "(", "hostlist", ")", ")", "# Counter would be more efficient but:", "# 1. Won't be Python 2.6 compatible", "# 2. Won't be ordered", "hostlist", "=", "groupTogether", "(", "hostlist", ")", "retVal", "=", "[", "]", "for", "key", ",", "group", "in", "groupby", "(", "hostlist", ")", ":", "retVal", ".", "append", "(", "(", "key", ",", "len", "(", "list", "(", "group", ")", ")", ")", ")", "return", "retVal" ]
Return the hosts from the command line
[ "Return", "the", "hosts", "from", "the", "command", "line" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L187-L201
train
soravux/scoop
scoop/utils.py
parseSLURM
def parseSLURM(string): """Return a host list from a SLURM string""" # Use scontrol utility to get the hosts list import subprocess, os hostsstr = subprocess.check_output(["scontrol", "show", "hostnames", string]) if sys.version_info.major > 2: hostsstr = hostsstr.decode() # Split using endline hosts = hostsstr.split(os.linesep) # Take out last empty host hosts = filter(None, hosts) # Create the desired pair of host and number of hosts hosts = [(host, 1) for host in hosts] return hosts
python
def parseSLURM(string): """Return a host list from a SLURM string""" # Use scontrol utility to get the hosts list import subprocess, os hostsstr = subprocess.check_output(["scontrol", "show", "hostnames", string]) if sys.version_info.major > 2: hostsstr = hostsstr.decode() # Split using endline hosts = hostsstr.split(os.linesep) # Take out last empty host hosts = filter(None, hosts) # Create the desired pair of host and number of hosts hosts = [(host, 1) for host in hosts] return hosts
[ "def", "parseSLURM", "(", "string", ")", ":", "# Use scontrol utility to get the hosts list", "import", "subprocess", ",", "os", "hostsstr", "=", "subprocess", ".", "check_output", "(", "[", "\"scontrol\"", ",", "\"show\"", ",", "\"hostnames\"", ",", "string", "]", ")", "if", "sys", ".", "version_info", ".", "major", ">", "2", ":", "hostsstr", "=", "hostsstr", ".", "decode", "(", ")", "# Split using endline", "hosts", "=", "hostsstr", ".", "split", "(", "os", ".", "linesep", ")", "# Take out last empty host", "hosts", "=", "filter", "(", "None", ",", "hosts", ")", "# Create the desired pair of host and number of hosts", "hosts", "=", "[", "(", "host", ",", "1", ")", "for", "host", "in", "hosts", "]", "return", "hosts" ]
Return a host list from a SLURM string
[ "Return", "a", "host", "list", "from", "a", "SLURM", "string" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L204-L217
train
soravux/scoop
scoop/utils.py
getHostsFromPBS
def getHostsFromPBS(): """Return a host list in a PBS environment""" # See above comment about Counter with open(os.environ["PBS_NODEFILE"], 'r') as hosts: hostlist = groupTogether(hosts.read().split()) retVal = [] for key, group in groupby(hostlist): retVal.append((key, len(list(group)))) return retVal
python
def getHostsFromPBS(): """Return a host list in a PBS environment""" # See above comment about Counter with open(os.environ["PBS_NODEFILE"], 'r') as hosts: hostlist = groupTogether(hosts.read().split()) retVal = [] for key, group in groupby(hostlist): retVal.append((key, len(list(group)))) return retVal
[ "def", "getHostsFromPBS", "(", ")", ":", "# See above comment about Counter", "with", "open", "(", "os", ".", "environ", "[", "\"PBS_NODEFILE\"", "]", ",", "'r'", ")", "as", "hosts", ":", "hostlist", "=", "groupTogether", "(", "hosts", ".", "read", "(", ")", ".", "split", "(", ")", ")", "retVal", "=", "[", "]", "for", "key", ",", "group", "in", "groupby", "(", "hostlist", ")", ":", "retVal", ".", "append", "(", "(", "key", ",", "len", "(", "list", "(", "group", ")", ")", ")", ")", "return", "retVal" ]
Return a host list in a PBS environment
[ "Return", "a", "host", "list", "in", "a", "PBS", "environment" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L225-L233
train
soravux/scoop
scoop/utils.py
getHostsFromSGE
def getHostsFromSGE(): """Return a host list in a SGE environment""" with open(os.environ["PE_HOSTFILE"], 'r') as hosts: return [(host.split()[0], int(host.split()[1])) for host in hosts]
python
def getHostsFromSGE(): """Return a host list in a SGE environment""" with open(os.environ["PE_HOSTFILE"], 'r') as hosts: return [(host.split()[0], int(host.split()[1])) for host in hosts]
[ "def", "getHostsFromSGE", "(", ")", ":", "with", "open", "(", "os", ".", "environ", "[", "\"PE_HOSTFILE\"", "]", ",", "'r'", ")", "as", "hosts", ":", "return", "[", "(", "host", ".", "split", "(", ")", "[", "0", "]", ",", "int", "(", "host", ".", "split", "(", ")", "[", "1", "]", ")", ")", "for", "host", "in", "hosts", "]" ]
Return a host list in a SGE environment
[ "Return", "a", "host", "list", "in", "a", "SGE", "environment" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L236-L239
train
soravux/scoop
scoop/utils.py
getWorkerQte
def getWorkerQte(hosts): """Return the number of workers to launch depending on the environment""" if "SLURM_NTASKS" in os.environ: return int(os.environ["SLURM_NTASKS"]) elif "PBS_NP" in os.environ: return int(os.environ["PBS_NP"]) elif "NSLOTS" in os.environ: return int(os.environ["NSLOTS"]) else: return sum(host[1] for host in hosts)
python
def getWorkerQte(hosts): """Return the number of workers to launch depending on the environment""" if "SLURM_NTASKS" in os.environ: return int(os.environ["SLURM_NTASKS"]) elif "PBS_NP" in os.environ: return int(os.environ["PBS_NP"]) elif "NSLOTS" in os.environ: return int(os.environ["NSLOTS"]) else: return sum(host[1] for host in hosts)
[ "def", "getWorkerQte", "(", "hosts", ")", ":", "if", "\"SLURM_NTASKS\"", "in", "os", ".", "environ", ":", "return", "int", "(", "os", ".", "environ", "[", "\"SLURM_NTASKS\"", "]", ")", "elif", "\"PBS_NP\"", "in", "os", ".", "environ", ":", "return", "int", "(", "os", ".", "environ", "[", "\"PBS_NP\"", "]", ")", "elif", "\"NSLOTS\"", "in", "os", ".", "environ", ":", "return", "int", "(", "os", ".", "environ", "[", "\"NSLOTS\"", "]", ")", "else", ":", "return", "sum", "(", "host", "[", "1", "]", "for", "host", "in", "hosts", ")" ]
Return the number of workers to launch depending on the environment
[ "Return", "the", "number", "of", "workers", "to", "launch", "depending", "on", "the", "environment" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/utils.py#L242-L251
train
soravux/scoop
scoop/encapsulation.py
functionFactory
def functionFactory(in_code, name, defaults, globals_, imports): """Creates a function at runtime using binary compiled inCode""" def generatedFunction(): pass generatedFunction.__code__ = marshal.loads(in_code) generatedFunction.__name__ = name generatedFunction.__defaults = defaults generatedFunction.__globals__.update(pickle.loads(globals_)) for key, value in imports.items(): imported_module = __import__(value) scoop.logger.debug("Dynamically loaded module {0}".format(value)) generatedFunction.__globals__.update({key: imported_module}) return generatedFunction
python
def functionFactory(in_code, name, defaults, globals_, imports): """Creates a function at runtime using binary compiled inCode""" def generatedFunction(): pass generatedFunction.__code__ = marshal.loads(in_code) generatedFunction.__name__ = name generatedFunction.__defaults = defaults generatedFunction.__globals__.update(pickle.loads(globals_)) for key, value in imports.items(): imported_module = __import__(value) scoop.logger.debug("Dynamically loaded module {0}".format(value)) generatedFunction.__globals__.update({key: imported_module}) return generatedFunction
[ "def", "functionFactory", "(", "in_code", ",", "name", ",", "defaults", ",", "globals_", ",", "imports", ")", ":", "def", "generatedFunction", "(", ")", ":", "pass", "generatedFunction", ".", "__code__", "=", "marshal", ".", "loads", "(", "in_code", ")", "generatedFunction", ".", "__name__", "=", "name", "generatedFunction", ".", "__defaults", "=", "defaults", "generatedFunction", ".", "__globals__", ".", "update", "(", "pickle", ".", "loads", "(", "globals_", ")", ")", "for", "key", ",", "value", "in", "imports", ".", "items", "(", ")", ":", "imported_module", "=", "__import__", "(", "value", ")", "scoop", ".", "logger", ".", "debug", "(", "\"Dynamically loaded module {0}\"", ".", "format", "(", "value", ")", ")", "generatedFunction", ".", "__globals__", ".", "update", "(", "{", "key", ":", "imported_module", "}", ")", "return", "generatedFunction" ]
Creates a function at runtime using binary compiled inCode
[ "Creates", "a", "function", "at", "runtime", "using", "binary", "compiled", "inCode" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/encapsulation.py#L41-L53
train
soravux/scoop
scoop/encapsulation.py
makeLambdaPicklable
def makeLambdaPicklable(lambda_function): """Take input lambda function l and makes it picklable.""" if isinstance(lambda_function, type(lambda: None)) and lambda_function.__name__ == '<lambda>': def __reduce_ex__(proto): # TODO: argdefs, closure return unpickleLambda, (marshal.dumps(lambda_function.__code__), ) lambda_function.__reduce_ex__ = __reduce_ex__ return lambda_function
python
def makeLambdaPicklable(lambda_function): """Take input lambda function l and makes it picklable.""" if isinstance(lambda_function, type(lambda: None)) and lambda_function.__name__ == '<lambda>': def __reduce_ex__(proto): # TODO: argdefs, closure return unpickleLambda, (marshal.dumps(lambda_function.__code__), ) lambda_function.__reduce_ex__ = __reduce_ex__ return lambda_function
[ "def", "makeLambdaPicklable", "(", "lambda_function", ")", ":", "if", "isinstance", "(", "lambda_function", ",", "type", "(", "lambda", ":", "None", ")", ")", "and", "lambda_function", ".", "__name__", "==", "'<lambda>'", ":", "def", "__reduce_ex__", "(", "proto", ")", ":", "# TODO: argdefs, closure", "return", "unpickleLambda", ",", "(", "marshal", ".", "dumps", "(", "lambda_function", ".", "__code__", ")", ",", ")", "lambda_function", ".", "__reduce_ex__", "=", "__reduce_ex__", "return", "lambda_function" ]
Take input lambda function l and makes it picklable.
[ "Take", "input", "lambda", "function", "l", "and", "makes", "it", "picklable", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/encapsulation.py#L143-L151
train
soravux/scoop
examples/dependency/sortingnetwork.py
SortingNetwork.addConnector
def addConnector(self, wire1, wire2): """Add a connector between wire1 and wire2 in the network.""" if wire1 == wire2: return if wire1 > wire2: wire1, wire2 = wire2, wire1 try: last_level = self[-1] except IndexError: # Empty network, create new level and connector self.append([(wire1, wire2)]) return for wires in last_level: if wires[1] >= wire1 and wires[0] <= wire2: self.append([(wire1, wire2)]) return last_level.append((wire1, wire2))
python
def addConnector(self, wire1, wire2): """Add a connector between wire1 and wire2 in the network.""" if wire1 == wire2: return if wire1 > wire2: wire1, wire2 = wire2, wire1 try: last_level = self[-1] except IndexError: # Empty network, create new level and connector self.append([(wire1, wire2)]) return for wires in last_level: if wires[1] >= wire1 and wires[0] <= wire2: self.append([(wire1, wire2)]) return last_level.append((wire1, wire2))
[ "def", "addConnector", "(", "self", ",", "wire1", ",", "wire2", ")", ":", "if", "wire1", "==", "wire2", ":", "return", "if", "wire1", ">", "wire2", ":", "wire1", ",", "wire2", "=", "wire2", ",", "wire1", "try", ":", "last_level", "=", "self", "[", "-", "1", "]", "except", "IndexError", ":", "# Empty network, create new level and connector", "self", ".", "append", "(", "[", "(", "wire1", ",", "wire2", ")", "]", ")", "return", "for", "wires", "in", "last_level", ":", "if", "wires", "[", "1", "]", ">=", "wire1", "and", "wires", "[", "0", "]", "<=", "wire2", ":", "self", ".", "append", "(", "[", "(", "wire1", ",", "wire2", ")", "]", ")", "return", "last_level", ".", "append", "(", "(", "wire1", ",", "wire2", ")", ")" ]
Add a connector between wire1 and wire2 in the network.
[ "Add", "a", "connector", "between", "wire1", "and", "wire2", "in", "the", "network", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/examples/dependency/sortingnetwork.py#L43-L63
train
soravux/scoop
examples/dependency/sortingnetwork.py
SortingNetwork.sort
def sort(self, values): """Sort the values in-place based on the connectors in the network.""" for level in self: for wire1, wire2 in level: if values[wire1] > values[wire2]: values[wire1], values[wire2] = values[wire2], values[wire1]
python
def sort(self, values): """Sort the values in-place based on the connectors in the network.""" for level in self: for wire1, wire2 in level: if values[wire1] > values[wire2]: values[wire1], values[wire2] = values[wire2], values[wire1]
[ "def", "sort", "(", "self", ",", "values", ")", ":", "for", "level", "in", "self", ":", "for", "wire1", ",", "wire2", "in", "level", ":", "if", "values", "[", "wire1", "]", ">", "values", "[", "wire2", "]", ":", "values", "[", "wire1", "]", ",", "values", "[", "wire2", "]", "=", "values", "[", "wire2", "]", ",", "values", "[", "wire1", "]" ]
Sort the values in-place based on the connectors in the network.
[ "Sort", "the", "values", "in", "-", "place", "based", "on", "the", "connectors", "in", "the", "network", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/examples/dependency/sortingnetwork.py#L65-L70
train
soravux/scoop
examples/dependency/sortingnetwork.py
SortingNetwork.draw
def draw(self): """Return an ASCII representation of the network.""" str_wires = [["-"]*7 * self.depth] str_wires[0][0] = "0" str_wires[0][1] = " o" str_spaces = [] for i in range(1, self.dimension): str_wires.append(["-"]*7 * self.depth) str_spaces.append([" "]*7 * self.depth) str_wires[i][0] = str(i) str_wires[i][1] = " o" for index, level in enumerate(self): for wire1, wire2 in level: str_wires[wire1][(index+1)*6] = "x" str_wires[wire2][(index+1)*6] = "x" for i in range(wire1, wire2): str_spaces[i][(index+1)*6+1] = "|" for i in range(wire1+1, wire2): str_wires[i][(index+1)*6] = "|" network_draw = "".join(str_wires[0]) for line, space in zip(str_wires[1:], str_spaces): network_draw += "\n" network_draw += "".join(space) network_draw += "\n" network_draw += "".join(line) return network_draw
python
def draw(self): """Return an ASCII representation of the network.""" str_wires = [["-"]*7 * self.depth] str_wires[0][0] = "0" str_wires[0][1] = " o" str_spaces = [] for i in range(1, self.dimension): str_wires.append(["-"]*7 * self.depth) str_spaces.append([" "]*7 * self.depth) str_wires[i][0] = str(i) str_wires[i][1] = " o" for index, level in enumerate(self): for wire1, wire2 in level: str_wires[wire1][(index+1)*6] = "x" str_wires[wire2][(index+1)*6] = "x" for i in range(wire1, wire2): str_spaces[i][(index+1)*6+1] = "|" for i in range(wire1+1, wire2): str_wires[i][(index+1)*6] = "|" network_draw = "".join(str_wires[0]) for line, space in zip(str_wires[1:], str_spaces): network_draw += "\n" network_draw += "".join(space) network_draw += "\n" network_draw += "".join(line) return network_draw
[ "def", "draw", "(", "self", ")", ":", "str_wires", "=", "[", "[", "\"-\"", "]", "*", "7", "*", "self", ".", "depth", "]", "str_wires", "[", "0", "]", "[", "0", "]", "=", "\"0\"", "str_wires", "[", "0", "]", "[", "1", "]", "=", "\" o\"", "str_spaces", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "self", ".", "dimension", ")", ":", "str_wires", ".", "append", "(", "[", "\"-\"", "]", "*", "7", "*", "self", ".", "depth", ")", "str_spaces", ".", "append", "(", "[", "\" \"", "]", "*", "7", "*", "self", ".", "depth", ")", "str_wires", "[", "i", "]", "[", "0", "]", "=", "str", "(", "i", ")", "str_wires", "[", "i", "]", "[", "1", "]", "=", "\" o\"", "for", "index", ",", "level", "in", "enumerate", "(", "self", ")", ":", "for", "wire1", ",", "wire2", "in", "level", ":", "str_wires", "[", "wire1", "]", "[", "(", "index", "+", "1", ")", "*", "6", "]", "=", "\"x\"", "str_wires", "[", "wire2", "]", "[", "(", "index", "+", "1", ")", "*", "6", "]", "=", "\"x\"", "for", "i", "in", "range", "(", "wire1", ",", "wire2", ")", ":", "str_spaces", "[", "i", "]", "[", "(", "index", "+", "1", ")", "*", "6", "+", "1", "]", "=", "\"|\"", "for", "i", "in", "range", "(", "wire1", "+", "1", ",", "wire2", ")", ":", "str_wires", "[", "i", "]", "[", "(", "index", "+", "1", ")", "*", "6", "]", "=", "\"|\"", "network_draw", "=", "\"\"", ".", "join", "(", "str_wires", "[", "0", "]", ")", "for", "line", ",", "space", "in", "zip", "(", "str_wires", "[", "1", ":", "]", ",", "str_spaces", ")", ":", "network_draw", "+=", "\"\\n\"", "network_draw", "+=", "\"\"", ".", "join", "(", "space", ")", "network_draw", "+=", "\"\\n\"", "network_draw", "+=", "\"\"", ".", "join", "(", "line", ")", "return", "network_draw" ]
Return an ASCII representation of the network.
[ "Return", "an", "ASCII", "representation", "of", "the", "network", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/examples/dependency/sortingnetwork.py#L88-L116
train
soravux/scoop
bench/process_debug.py
getWorkersName
def getWorkersName(data): """Returns the list of the names of the workers sorted alphabetically""" names = [fichier for fichier in data.keys()] names.sort() try: names.remove("broker") except ValueError: pass return names
python
def getWorkersName(data): """Returns the list of the names of the workers sorted alphabetically""" names = [fichier for fichier in data.keys()] names.sort() try: names.remove("broker") except ValueError: pass return names
[ "def", "getWorkersName", "(", "data", ")", ":", "names", "=", "[", "fichier", "for", "fichier", "in", "data", ".", "keys", "(", ")", "]", "names", ".", "sort", "(", ")", "try", ":", "names", ".", "remove", "(", "\"broker\"", ")", "except", "ValueError", ":", "pass", "return", "names" ]
Returns the list of the names of the workers sorted alphabetically
[ "Returns", "the", "list", "of", "the", "names", "of", "the", "workers", "sorted", "alphabetically" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L41-L49
train
soravux/scoop
bench/process_debug.py
importData
def importData(directory): """Parse the input files and return two dictionnaries""" dataTask = OrderedDict() dataQueue = OrderedDict() for fichier in sorted(os.listdir(directory)): try: with open("{directory}/{fichier}".format(**locals()), 'rb') as f: fileName, fileType = fichier.rsplit('-', 1) if fileType == "QUEUE": dataQueue[fileName] = pickle.load(f) else: dataTask[fileName] = pickle.load(f) except: # Can be a directory pass return dataTask, dataQueue
python
def importData(directory): """Parse the input files and return two dictionnaries""" dataTask = OrderedDict() dataQueue = OrderedDict() for fichier in sorted(os.listdir(directory)): try: with open("{directory}/{fichier}".format(**locals()), 'rb') as f: fileName, fileType = fichier.rsplit('-', 1) if fileType == "QUEUE": dataQueue[fileName] = pickle.load(f) else: dataTask[fileName] = pickle.load(f) except: # Can be a directory pass return dataTask, dataQueue
[ "def", "importData", "(", "directory", ")", ":", "dataTask", "=", "OrderedDict", "(", ")", "dataQueue", "=", "OrderedDict", "(", ")", "for", "fichier", "in", "sorted", "(", "os", ".", "listdir", "(", "directory", ")", ")", ":", "try", ":", "with", "open", "(", "\"{directory}/{fichier}\"", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ",", "'rb'", ")", "as", "f", ":", "fileName", ",", "fileType", "=", "fichier", ".", "rsplit", "(", "'-'", ",", "1", ")", "if", "fileType", "==", "\"QUEUE\"", ":", "dataQueue", "[", "fileName", "]", "=", "pickle", ".", "load", "(", "f", ")", "else", ":", "dataTask", "[", "fileName", "]", "=", "pickle", ".", "load", "(", "f", ")", "except", ":", "# Can be a directory", "pass", "return", "dataTask", ",", "dataQueue" ]
Parse the input files and return two dictionnaries
[ "Parse", "the", "input", "files", "and", "return", "two", "dictionnaries" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L51-L66
train
soravux/scoop
bench/process_debug.py
getTimes
def getTimes(dataTasks): """Get the start time and the end time of data in milliseconds""" global begin_time start_time, end_time = float('inf'), 0 for fichier, vals in dataTask.items(): try: if hasattr(vals, 'values'): tmp_start_time = min([a['start_time'] for a in vals.values()])[0] if tmp_start_time < start_time: start_time = tmp_start_time tmp_end_time = max([a['end_time'] for a in vals.values()])[0] if tmp_end_time > end_time: end_time = tmp_end_time except ValueError: continue begin_time = 1000 * start_time return 1000 * start_time, 1000 * end_time
python
def getTimes(dataTasks): """Get the start time and the end time of data in milliseconds""" global begin_time start_time, end_time = float('inf'), 0 for fichier, vals in dataTask.items(): try: if hasattr(vals, 'values'): tmp_start_time = min([a['start_time'] for a in vals.values()])[0] if tmp_start_time < start_time: start_time = tmp_start_time tmp_end_time = max([a['end_time'] for a in vals.values()])[0] if tmp_end_time > end_time: end_time = tmp_end_time except ValueError: continue begin_time = 1000 * start_time return 1000 * start_time, 1000 * end_time
[ "def", "getTimes", "(", "dataTasks", ")", ":", "global", "begin_time", "start_time", ",", "end_time", "=", "float", "(", "'inf'", ")", ",", "0", "for", "fichier", ",", "vals", "in", "dataTask", ".", "items", "(", ")", ":", "try", ":", "if", "hasattr", "(", "vals", ",", "'values'", ")", ":", "tmp_start_time", "=", "min", "(", "[", "a", "[", "'start_time'", "]", "for", "a", "in", "vals", ".", "values", "(", ")", "]", ")", "[", "0", "]", "if", "tmp_start_time", "<", "start_time", ":", "start_time", "=", "tmp_start_time", "tmp_end_time", "=", "max", "(", "[", "a", "[", "'end_time'", "]", "for", "a", "in", "vals", ".", "values", "(", ")", "]", ")", "[", "0", "]", "if", "tmp_end_time", ">", "end_time", ":", "end_time", "=", "tmp_end_time", "except", "ValueError", ":", "continue", "begin_time", "=", "1000", "*", "start_time", "return", "1000", "*", "start_time", ",", "1000", "*", "end_time" ]
Get the start time and the end time of data in milliseconds
[ "Get", "the", "start", "time", "and", "the", "end", "time", "of", "data", "in", "milliseconds" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L79-L95
train
soravux/scoop
bench/process_debug.py
WorkersDensity
def WorkersDensity(dataTasks): """Return the worker density data for the graph.""" start_time, end_time = getTimes(dataTasks) graphdata = [] for name in getWorkersName(dataTasks): vals = dataTasks[name] if hasattr(vals, 'values'): # Data from worker workerdata = [] print("Plotting density map for {}".format(name)) # We only have 800 pixels try: for graphtime in timeRange(start_time, end_time, DENSITY_MAP_TIME_AXIS_LENGTH): for a in vals.values(): if not all((a['start_time'], a['end_time'])): print("Invalid data:", a['start_time'], a['end_time']) #print(a['start_time'], a['end_time']) workerdata.append(sum([a['start_time'][0] <= float(graphtime) / 1000. <= a['end_time'][0] for a in vals.values() if a['start_time'] and a['end_time']]) ) except OverflowError: print("Error processing {0} or {1}".format(start_time, end_time)) graphdata.append(workerdata) if args.binarydensity: # Normalize [...] maxval = max(graphdata[-1]) if maxval > 1: maxval = maxval - 1 graphdata[-1] = [x - maxval for x in graphdata[-1]] return graphdata
python
def WorkersDensity(dataTasks): """Return the worker density data for the graph.""" start_time, end_time = getTimes(dataTasks) graphdata = [] for name in getWorkersName(dataTasks): vals = dataTasks[name] if hasattr(vals, 'values'): # Data from worker workerdata = [] print("Plotting density map for {}".format(name)) # We only have 800 pixels try: for graphtime in timeRange(start_time, end_time, DENSITY_MAP_TIME_AXIS_LENGTH): for a in vals.values(): if not all((a['start_time'], a['end_time'])): print("Invalid data:", a['start_time'], a['end_time']) #print(a['start_time'], a['end_time']) workerdata.append(sum([a['start_time'][0] <= float(graphtime) / 1000. <= a['end_time'][0] for a in vals.values() if a['start_time'] and a['end_time']]) ) except OverflowError: print("Error processing {0} or {1}".format(start_time, end_time)) graphdata.append(workerdata) if args.binarydensity: # Normalize [...] maxval = max(graphdata[-1]) if maxval > 1: maxval = maxval - 1 graphdata[-1] = [x - maxval for x in graphdata[-1]] return graphdata
[ "def", "WorkersDensity", "(", "dataTasks", ")", ":", "start_time", ",", "end_time", "=", "getTimes", "(", "dataTasks", ")", "graphdata", "=", "[", "]", "for", "name", "in", "getWorkersName", "(", "dataTasks", ")", ":", "vals", "=", "dataTasks", "[", "name", "]", "if", "hasattr", "(", "vals", ",", "'values'", ")", ":", "# Data from worker", "workerdata", "=", "[", "]", "print", "(", "\"Plotting density map for {}\"", ".", "format", "(", "name", ")", ")", "# We only have 800 pixels", "try", ":", "for", "graphtime", "in", "timeRange", "(", "start_time", ",", "end_time", ",", "DENSITY_MAP_TIME_AXIS_LENGTH", ")", ":", "for", "a", "in", "vals", ".", "values", "(", ")", ":", "if", "not", "all", "(", "(", "a", "[", "'start_time'", "]", ",", "a", "[", "'end_time'", "]", ")", ")", ":", "print", "(", "\"Invalid data:\"", ",", "a", "[", "'start_time'", "]", ",", "a", "[", "'end_time'", "]", ")", "#print(a['start_time'], a['end_time'])", "workerdata", ".", "append", "(", "sum", "(", "[", "a", "[", "'start_time'", "]", "[", "0", "]", "<=", "float", "(", "graphtime", ")", "/", "1000.", "<=", "a", "[", "'end_time'", "]", "[", "0", "]", "for", "a", "in", "vals", ".", "values", "(", ")", "if", "a", "[", "'start_time'", "]", "and", "a", "[", "'end_time'", "]", "]", ")", ")", "except", "OverflowError", ":", "print", "(", "\"Error processing {0} or {1}\"", ".", "format", "(", "start_time", ",", "end_time", ")", ")", "graphdata", ".", "append", "(", "workerdata", ")", "if", "args", ".", "binarydensity", ":", "# Normalize [...]", "maxval", "=", "max", "(", "graphdata", "[", "-", "1", "]", ")", "if", "maxval", ">", "1", ":", "maxval", "=", "maxval", "-", "1", "graphdata", "[", "-", "1", "]", "=", "[", "x", "-", "maxval", "for", "x", "in", "graphdata", "[", "-", "1", "]", "]", "return", "graphdata" ]
Return the worker density data for the graph.
[ "Return", "the", "worker", "density", "data", "for", "the", "graph", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L97-L130
train
soravux/scoop
bench/process_debug.py
plotDensity
def plotDensity(dataTask, filename): """Plot the worker density graph""" #def format_worker(x, pos=None): # """Formats the worker name""" # #workers = filter (lambda a: a[:6] != "broker", dataTask.keys()) # workers = [a for a in dataTask.keys() if a[:6] != "broker"] # return workers[x] def format_time(x, pos=None): """Formats the time""" start_time, end_time = [(a - begin_time) / 1000 for a in getTimes(dataTask)] return int(end_time * x / DENSITY_MAP_TIME_AXIS_LENGTH) graphdata = WorkersDensity(dataTask) if len(graphdata): fig = plt.figure() ax = fig.add_subplot(111) box = ax.get_position() ax.set_position([box.x0 + 0.15 * box.width, box.y0, box.width, box.height]) #cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto') if args.binarydensity: cmap = ListedColormap(['r', 'g']) norm = BoundaryNorm([0, 0.5, 1], cmap.N) cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto', cmap=cmap, norm=norm) else: cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto') plt.xlabel('time (s)'); plt.ylabel('Worker'); ax.set_title('Work density') ax.yaxis.set_ticks(range(len(graphdata))) ax.tick_params(axis='both', which='major', labelsize=6) #ax.yaxis.set_major_formatter(ticker.FuncFormatter(format_worker)) interval_size = DENSITY_MAP_TIME_AXIS_LENGTH // 4 ax.xaxis.set_ticks(range(0, DENSITY_MAP_TIME_AXIS_LENGTH + interval_size, interval_size)) ax.xaxis.set_major_formatter(ticker.FuncFormatter(format_time)) if args.binarydensity: cax.set_clim(0, 1) cbar = fig.colorbar(cax, ticks=[0, 1]) else: cbar = fig.colorbar(cax) fig.savefig(filename)
python
def plotDensity(dataTask, filename): """Plot the worker density graph""" #def format_worker(x, pos=None): # """Formats the worker name""" # #workers = filter (lambda a: a[:6] != "broker", dataTask.keys()) # workers = [a for a in dataTask.keys() if a[:6] != "broker"] # return workers[x] def format_time(x, pos=None): """Formats the time""" start_time, end_time = [(a - begin_time) / 1000 for a in getTimes(dataTask)] return int(end_time * x / DENSITY_MAP_TIME_AXIS_LENGTH) graphdata = WorkersDensity(dataTask) if len(graphdata): fig = plt.figure() ax = fig.add_subplot(111) box = ax.get_position() ax.set_position([box.x0 + 0.15 * box.width, box.y0, box.width, box.height]) #cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto') if args.binarydensity: cmap = ListedColormap(['r', 'g']) norm = BoundaryNorm([0, 0.5, 1], cmap.N) cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto', cmap=cmap, norm=norm) else: cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto') plt.xlabel('time (s)'); plt.ylabel('Worker'); ax.set_title('Work density') ax.yaxis.set_ticks(range(len(graphdata))) ax.tick_params(axis='both', which='major', labelsize=6) #ax.yaxis.set_major_formatter(ticker.FuncFormatter(format_worker)) interval_size = DENSITY_MAP_TIME_AXIS_LENGTH // 4 ax.xaxis.set_ticks(range(0, DENSITY_MAP_TIME_AXIS_LENGTH + interval_size, interval_size)) ax.xaxis.set_major_formatter(ticker.FuncFormatter(format_time)) if args.binarydensity: cax.set_clim(0, 1) cbar = fig.colorbar(cax, ticks=[0, 1]) else: cbar = fig.colorbar(cax) fig.savefig(filename)
[ "def", "plotDensity", "(", "dataTask", ",", "filename", ")", ":", "#def format_worker(x, pos=None):", "# \"\"\"Formats the worker name\"\"\"", "# #workers = filter (lambda a: a[:6] != \"broker\", dataTask.keys())", "# workers = [a for a in dataTask.keys() if a[:6] != \"broker\"]", "# return workers[x]", "def", "format_time", "(", "x", ",", "pos", "=", "None", ")", ":", "\"\"\"Formats the time\"\"\"", "start_time", ",", "end_time", "=", "[", "(", "a", "-", "begin_time", ")", "/", "1000", "for", "a", "in", "getTimes", "(", "dataTask", ")", "]", "return", "int", "(", "end_time", "*", "x", "/", "DENSITY_MAP_TIME_AXIS_LENGTH", ")", "graphdata", "=", "WorkersDensity", "(", "dataTask", ")", "if", "len", "(", "graphdata", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "box", "=", "ax", ".", "get_position", "(", ")", "ax", ".", "set_position", "(", "[", "box", ".", "x0", "+", "0.15", "*", "box", ".", "width", ",", "box", ".", "y0", ",", "box", ".", "width", ",", "box", ".", "height", "]", ")", "#cax = ax.imshow(graphdata, interpolation='nearest', aspect='auto')", "if", "args", ".", "binarydensity", ":", "cmap", "=", "ListedColormap", "(", "[", "'r'", ",", "'g'", "]", ")", "norm", "=", "BoundaryNorm", "(", "[", "0", ",", "0.5", ",", "1", "]", ",", "cmap", ".", "N", ")", "cax", "=", "ax", ".", "imshow", "(", "graphdata", ",", "interpolation", "=", "'nearest'", ",", "aspect", "=", "'auto'", ",", "cmap", "=", "cmap", ",", "norm", "=", "norm", ")", "else", ":", "cax", "=", "ax", ".", "imshow", "(", "graphdata", ",", "interpolation", "=", "'nearest'", ",", "aspect", "=", "'auto'", ")", "plt", ".", "xlabel", "(", "'time (s)'", ")", "plt", ".", "ylabel", "(", "'Worker'", ")", "ax", ".", "set_title", "(", "'Work density'", ")", "ax", ".", "yaxis", ".", "set_ticks", "(", "range", "(", "len", "(", "graphdata", ")", ")", ")", "ax", ".", "tick_params", "(", "axis", "=", "'both'", ",", "which", "=", "'major'", ",", "labelsize", "=", "6", ")", "#ax.yaxis.set_major_formatter(ticker.FuncFormatter(format_worker))", "interval_size", "=", "DENSITY_MAP_TIME_AXIS_LENGTH", "//", "4", "ax", ".", "xaxis", ".", "set_ticks", "(", "range", "(", "0", ",", "DENSITY_MAP_TIME_AXIS_LENGTH", "+", "interval_size", ",", "interval_size", ")", ")", "ax", ".", "xaxis", ".", "set_major_formatter", "(", "ticker", ".", "FuncFormatter", "(", "format_time", ")", ")", "if", "args", ".", "binarydensity", ":", "cax", ".", "set_clim", "(", "0", ",", "1", ")", "cbar", "=", "fig", ".", "colorbar", "(", "cax", ",", "ticks", "=", "[", "0", ",", "1", "]", ")", "else", ":", "cbar", "=", "fig", ".", "colorbar", "(", "cax", ")", "fig", ".", "savefig", "(", "filename", ")" ]
Plot the worker density graph
[ "Plot", "the", "worker", "density", "graph" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L132-L173
train
soravux/scoop
bench/process_debug.py
plotBrokerQueue
def plotBrokerQueue(dataTask, filename): """Generates the broker queue length graphic.""" print("Plotting broker queue length for {0}.".format(filename)) plt.figure() # Queue length plt.subplot(211) for fichier, vals in dataTask.items(): if type(vals) == list: timestamps = list(map(datetime.fromtimestamp, map(int, list(zip(*vals))[0]))) # Data is from broker plt.plot_date(timestamps, list(zip(*vals))[2], linewidth=1.0, marker='o', markersize=2, label=fichier) plt.title('Broker queue length') plt.ylabel('Tasks') # Requests received plt.subplot(212) for fichier, vals in dataTask.items(): if type(vals) == list: timestamps = list(map(datetime.fromtimestamp, map(int, list(zip(*vals))[0]))) # Data is from broker plt.plot_date(timestamps, list(zip(*vals))[3], linewidth=1.0, marker='o', markersize=2, label=fichier) plt.title('Broker pending requests') plt.xlabel('time (s)') plt.ylabel('Requests') plt.savefig(filename)
python
def plotBrokerQueue(dataTask, filename): """Generates the broker queue length graphic.""" print("Plotting broker queue length for {0}.".format(filename)) plt.figure() # Queue length plt.subplot(211) for fichier, vals in dataTask.items(): if type(vals) == list: timestamps = list(map(datetime.fromtimestamp, map(int, list(zip(*vals))[0]))) # Data is from broker plt.plot_date(timestamps, list(zip(*vals))[2], linewidth=1.0, marker='o', markersize=2, label=fichier) plt.title('Broker queue length') plt.ylabel('Tasks') # Requests received plt.subplot(212) for fichier, vals in dataTask.items(): if type(vals) == list: timestamps = list(map(datetime.fromtimestamp, map(int, list(zip(*vals))[0]))) # Data is from broker plt.plot_date(timestamps, list(zip(*vals))[3], linewidth=1.0, marker='o', markersize=2, label=fichier) plt.title('Broker pending requests') plt.xlabel('time (s)') plt.ylabel('Requests') plt.savefig(filename)
[ "def", "plotBrokerQueue", "(", "dataTask", ",", "filename", ")", ":", "print", "(", "\"Plotting broker queue length for {0}.\"", ".", "format", "(", "filename", ")", ")", "plt", ".", "figure", "(", ")", "# Queue length", "plt", ".", "subplot", "(", "211", ")", "for", "fichier", ",", "vals", "in", "dataTask", ".", "items", "(", ")", ":", "if", "type", "(", "vals", ")", "==", "list", ":", "timestamps", "=", "list", "(", "map", "(", "datetime", ".", "fromtimestamp", ",", "map", "(", "int", ",", "list", "(", "zip", "(", "*", "vals", ")", ")", "[", "0", "]", ")", ")", ")", "# Data is from broker", "plt", ".", "plot_date", "(", "timestamps", ",", "list", "(", "zip", "(", "*", "vals", ")", ")", "[", "2", "]", ",", "linewidth", "=", "1.0", ",", "marker", "=", "'o'", ",", "markersize", "=", "2", ",", "label", "=", "fichier", ")", "plt", ".", "title", "(", "'Broker queue length'", ")", "plt", ".", "ylabel", "(", "'Tasks'", ")", "# Requests received", "plt", ".", "subplot", "(", "212", ")", "for", "fichier", ",", "vals", "in", "dataTask", ".", "items", "(", ")", ":", "if", "type", "(", "vals", ")", "==", "list", ":", "timestamps", "=", "list", "(", "map", "(", "datetime", ".", "fromtimestamp", ",", "map", "(", "int", ",", "list", "(", "zip", "(", "*", "vals", ")", ")", "[", "0", "]", ")", ")", ")", "# Data is from broker", "plt", ".", "plot_date", "(", "timestamps", ",", "list", "(", "zip", "(", "*", "vals", ")", ")", "[", "3", "]", ",", "linewidth", "=", "1.0", ",", "marker", "=", "'o'", ",", "markersize", "=", "2", ",", "label", "=", "fichier", ")", "plt", ".", "title", "(", "'Broker pending requests'", ")", "plt", ".", "xlabel", "(", "'time (s)'", ")", "plt", ".", "ylabel", "(", "'Requests'", ")", "plt", ".", "savefig", "(", "filename", ")" ]
Generates the broker queue length graphic.
[ "Generates", "the", "broker", "queue", "length", "graphic", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L175-L209
train
soravux/scoop
bench/process_debug.py
getWorkerInfo
def getWorkerInfo(dataTask): """Returns the total execution time and task quantity by worker""" workertime = [] workertasks = [] for fichier, vals in dataTask.items(): if hasattr(vals, 'values'): #workers_names.append(fichier) # Data from worker totaltime = sum([a['executionTime'] for a in vals.values()]) totaltasks = sum([1 for a in vals.values()]) workertime.append(totaltime) workertasks.append(totaltasks) return workertime, workertasks
python
def getWorkerInfo(dataTask): """Returns the total execution time and task quantity by worker""" workertime = [] workertasks = [] for fichier, vals in dataTask.items(): if hasattr(vals, 'values'): #workers_names.append(fichier) # Data from worker totaltime = sum([a['executionTime'] for a in vals.values()]) totaltasks = sum([1 for a in vals.values()]) workertime.append(totaltime) workertasks.append(totaltasks) return workertime, workertasks
[ "def", "getWorkerInfo", "(", "dataTask", ")", ":", "workertime", "=", "[", "]", "workertasks", "=", "[", "]", "for", "fichier", ",", "vals", "in", "dataTask", ".", "items", "(", ")", ":", "if", "hasattr", "(", "vals", ",", "'values'", ")", ":", "#workers_names.append(fichier)", "# Data from worker", "totaltime", "=", "sum", "(", "[", "a", "[", "'executionTime'", "]", "for", "a", "in", "vals", ".", "values", "(", ")", "]", ")", "totaltasks", "=", "sum", "(", "[", "1", "for", "a", "in", "vals", ".", "values", "(", ")", "]", ")", "workertime", ".", "append", "(", "totaltime", ")", "workertasks", ".", "append", "(", "totaltasks", ")", "return", "workertime", ",", "workertasks" ]
Returns the total execution time and task quantity by worker
[ "Returns", "the", "total", "execution", "time", "and", "task", "quantity", "by", "worker" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L234-L246
train
soravux/scoop
bench/process_debug.py
timelines
def timelines(fig, y, xstart, xstop, color='b'): """Plot timelines at y from xstart to xstop with given color.""" fig.hlines(y, xstart, xstop, color, lw=4) fig.vlines(xstart, y+0.03, y-0.03, color, lw=2) fig.vlines(xstop, y+0.03, y-0.03, color, lw=2)
python
def timelines(fig, y, xstart, xstop, color='b'): """Plot timelines at y from xstart to xstop with given color.""" fig.hlines(y, xstart, xstop, color, lw=4) fig.vlines(xstart, y+0.03, y-0.03, color, lw=2) fig.vlines(xstop, y+0.03, y-0.03, color, lw=2)
[ "def", "timelines", "(", "fig", ",", "y", ",", "xstart", ",", "xstop", ",", "color", "=", "'b'", ")", ":", "fig", ".", "hlines", "(", "y", ",", "xstart", ",", "xstop", ",", "color", ",", "lw", "=", "4", ")", "fig", ".", "vlines", "(", "xstart", ",", "y", "+", "0.03", ",", "y", "-", "0.03", ",", "color", ",", "lw", "=", "2", ")", "fig", ".", "vlines", "(", "xstop", ",", "y", "+", "0.03", ",", "y", "-", "0.03", ",", "color", ",", "lw", "=", "2", ")" ]
Plot timelines at y from xstart to xstop with given color.
[ "Plot", "timelines", "at", "y", "from", "xstart", "to", "xstop", "with", "given", "color", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L331-L335
train
soravux/scoop
bench/process_debug.py
plotTimeline
def plotTimeline(dataTask, filename): """Build a timeline""" fig = plt.figure() ax = fig.gca() worker_names = [x for x in dataTask.keys() if "broker" not in x] min_time = getMinimumTime(dataTask) ystep = 1. / (len(worker_names) + 1) y = 0 for worker, vals in dataTask.items(): if "broker" in worker: continue y += ystep if hasattr(vals, 'values'): for future in vals.values(): start_time = [future['start_time'][0] - min_time] end_time = [future['end_time'][0] - min_time] timelines(ax, y, start_time, end_time) #ax.xaxis_date() #myFmt = DateFormatter('%H:%M:%S') #ax.xaxis.set_major_formatter(myFmt) #ax.xaxis.set_major_locator(SecondLocator(0, interval=20)) #delta = (stop.max() - start.min())/10 ax.set_yticks(np.arange(ystep, 1, ystep)) ax.set_yticklabels(worker_names) ax.set_ylim(0, 1) #fig.xlim() ax.set_xlabel('Time') fig.savefig(filename)
python
def plotTimeline(dataTask, filename): """Build a timeline""" fig = plt.figure() ax = fig.gca() worker_names = [x for x in dataTask.keys() if "broker" not in x] min_time = getMinimumTime(dataTask) ystep = 1. / (len(worker_names) + 1) y = 0 for worker, vals in dataTask.items(): if "broker" in worker: continue y += ystep if hasattr(vals, 'values'): for future in vals.values(): start_time = [future['start_time'][0] - min_time] end_time = [future['end_time'][0] - min_time] timelines(ax, y, start_time, end_time) #ax.xaxis_date() #myFmt = DateFormatter('%H:%M:%S') #ax.xaxis.set_major_formatter(myFmt) #ax.xaxis.set_major_locator(SecondLocator(0, interval=20)) #delta = (stop.max() - start.min())/10 ax.set_yticks(np.arange(ystep, 1, ystep)) ax.set_yticklabels(worker_names) ax.set_ylim(0, 1) #fig.xlim() ax.set_xlabel('Time') fig.savefig(filename)
[ "def", "plotTimeline", "(", "dataTask", ",", "filename", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "gca", "(", ")", "worker_names", "=", "[", "x", "for", "x", "in", "dataTask", ".", "keys", "(", ")", "if", "\"broker\"", "not", "in", "x", "]", "min_time", "=", "getMinimumTime", "(", "dataTask", ")", "ystep", "=", "1.", "/", "(", "len", "(", "worker_names", ")", "+", "1", ")", "y", "=", "0", "for", "worker", ",", "vals", "in", "dataTask", ".", "items", "(", ")", ":", "if", "\"broker\"", "in", "worker", ":", "continue", "y", "+=", "ystep", "if", "hasattr", "(", "vals", ",", "'values'", ")", ":", "for", "future", "in", "vals", ".", "values", "(", ")", ":", "start_time", "=", "[", "future", "[", "'start_time'", "]", "[", "0", "]", "-", "min_time", "]", "end_time", "=", "[", "future", "[", "'end_time'", "]", "[", "0", "]", "-", "min_time", "]", "timelines", "(", "ax", ",", "y", ",", "start_time", ",", "end_time", ")", "#ax.xaxis_date()", "#myFmt = DateFormatter('%H:%M:%S')", "#ax.xaxis.set_major_formatter(myFmt)", "#ax.xaxis.set_major_locator(SecondLocator(0, interval=20))", "#delta = (stop.max() - start.min())/10", "ax", ".", "set_yticks", "(", "np", ".", "arange", "(", "ystep", ",", "1", ",", "ystep", ")", ")", "ax", ".", "set_yticklabels", "(", "worker_names", ")", "ax", ".", "set_ylim", "(", "0", ",", "1", ")", "#fig.xlim()", "ax", ".", "set_xlabel", "(", "'Time'", ")", "fig", ".", "savefig", "(", "filename", ")" ]
Build a timeline
[ "Build", "a", "timeline" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/bench/process_debug.py#L350-L383
train
soravux/scoop
scoop/launch/workerLaunch.py
Host.setWorker
def setWorker(self, *args, **kwargs): """Add a worker assignation Arguments and order to pass are defined in LAUNCHING_ARGUMENTS Using named args is advised. """ try: la = self.LAUNCHING_ARGUMENTS(*args, **kwargs) except TypeError as e: scoop.logger.error(("addWorker failed to convert args %s and kwargs %s " "to namedtuple (requires %s arguments (names %s)") % (args, kwargs, len(self.LAUNCHING_ARGUMENTS._fields), self.LAUNCHING_ARGUMENTS._fields)) self.workersArguments = la
python
def setWorker(self, *args, **kwargs): """Add a worker assignation Arguments and order to pass are defined in LAUNCHING_ARGUMENTS Using named args is advised. """ try: la = self.LAUNCHING_ARGUMENTS(*args, **kwargs) except TypeError as e: scoop.logger.error(("addWorker failed to convert args %s and kwargs %s " "to namedtuple (requires %s arguments (names %s)") % (args, kwargs, len(self.LAUNCHING_ARGUMENTS._fields), self.LAUNCHING_ARGUMENTS._fields)) self.workersArguments = la
[ "def", "setWorker", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "la", "=", "self", ".", "LAUNCHING_ARGUMENTS", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "TypeError", "as", "e", ":", "scoop", ".", "logger", ".", "error", "(", "(", "\"addWorker failed to convert args %s and kwargs %s \"", "\"to namedtuple (requires %s arguments (names %s)\"", ")", "%", "(", "args", ",", "kwargs", ",", "len", "(", "self", ".", "LAUNCHING_ARGUMENTS", ".", "_fields", ")", ",", "self", ".", "LAUNCHING_ARGUMENTS", ".", "_fields", ")", ")", "self", ".", "workersArguments", "=", "la" ]
Add a worker assignation Arguments and order to pass are defined in LAUNCHING_ARGUMENTS Using named args is advised.
[ "Add", "a", "worker", "assignation", "Arguments", "and", "order", "to", "pass", "are", "defined", "in", "LAUNCHING_ARGUMENTS", "Using", "named", "args", "is", "advised", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L61-L74
train
soravux/scoop
scoop/launch/workerLaunch.py
Host._WorkerCommand_environment
def _WorkerCommand_environment(self): """Return list of shell commands to prepare the environment for bootstrap.""" worker = self.workersArguments c = [] if worker.prolog: c.extend([ "source", worker.prolog, "&&", ]) if worker.pythonPath and not self.isLocal(): # Tried to make it compliant to all shell variants. c.extend([ "env", "PYTHONPATH={0}:$PYTHONPATH".format(worker.pythonPath), ]) elif worker.pythonPath and self.isLocal(): # Tried to make it compliant to all shell variants. c.extend([ "env", "PYTHONPATH={0}:{1}".format( worker.pythonPath, os.environ.get("PYTHONPATH", ""), ), ]) return c
python
def _WorkerCommand_environment(self): """Return list of shell commands to prepare the environment for bootstrap.""" worker = self.workersArguments c = [] if worker.prolog: c.extend([ "source", worker.prolog, "&&", ]) if worker.pythonPath and not self.isLocal(): # Tried to make it compliant to all shell variants. c.extend([ "env", "PYTHONPATH={0}:$PYTHONPATH".format(worker.pythonPath), ]) elif worker.pythonPath and self.isLocal(): # Tried to make it compliant to all shell variants. c.extend([ "env", "PYTHONPATH={0}:{1}".format( worker.pythonPath, os.environ.get("PYTHONPATH", ""), ), ]) return c
[ "def", "_WorkerCommand_environment", "(", "self", ")", ":", "worker", "=", "self", ".", "workersArguments", "c", "=", "[", "]", "if", "worker", ".", "prolog", ":", "c", ".", "extend", "(", "[", "\"source\"", ",", "worker", ".", "prolog", ",", "\"&&\"", ",", "]", ")", "if", "worker", ".", "pythonPath", "and", "not", "self", ".", "isLocal", "(", ")", ":", "# Tried to make it compliant to all shell variants.", "c", ".", "extend", "(", "[", "\"env\"", ",", "\"PYTHONPATH={0}:$PYTHONPATH\"", ".", "format", "(", "worker", ".", "pythonPath", ")", ",", "]", ")", "elif", "worker", ".", "pythonPath", "and", "self", ".", "isLocal", "(", ")", ":", "# Tried to make it compliant to all shell variants.", "c", ".", "extend", "(", "[", "\"env\"", ",", "\"PYTHONPATH={0}:{1}\"", ".", "format", "(", "worker", ".", "pythonPath", ",", "os", ".", "environ", ".", "get", "(", "\"PYTHONPATH\"", ",", "\"\"", ")", ",", ")", ",", "]", ")", "return", "c" ]
Return list of shell commands to prepare the environment for bootstrap.
[ "Return", "list", "of", "shell", "commands", "to", "prepare", "the", "environment", "for", "bootstrap", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L80-L106
train
soravux/scoop
scoop/launch/workerLaunch.py
Host._WorkerCommand_launcher
def _WorkerCommand_launcher(self): """Return list commands to start the bootstrap process""" return [ self.workersArguments.pythonExecutable, '-m', 'scoop.launch.__main__', str(self.workerAmount), str(self.workersArguments.verbose), ]
python
def _WorkerCommand_launcher(self): """Return list commands to start the bootstrap process""" return [ self.workersArguments.pythonExecutable, '-m', 'scoop.launch.__main__', str(self.workerAmount), str(self.workersArguments.verbose), ]
[ "def", "_WorkerCommand_launcher", "(", "self", ")", ":", "return", "[", "self", ".", "workersArguments", ".", "pythonExecutable", ",", "'-m'", ",", "'scoop.launch.__main__'", ",", "str", "(", "self", ".", "workerAmount", ")", ",", "str", "(", "self", ".", "workersArguments", ".", "verbose", ")", ",", "]" ]
Return list commands to start the bootstrap process
[ "Return", "list", "commands", "to", "start", "the", "bootstrap", "process" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L108-L116
train
soravux/scoop
scoop/launch/workerLaunch.py
Host._WorkerCommand_options
def _WorkerCommand_options(self): """Return list of options for bootstrap""" worker = self.workersArguments c = [] # If broker is on localhost if self.hostname == worker.brokerHostname: broker = "127.0.0.1" else: broker = worker.brokerHostname if worker.nice is not None: c.extend(['--nice', str(worker.nice)]) c.extend(['--size', str(worker.size)]) if self.isLocal(): c.extend(['--workingDirectory', str(worker.path)]) else: c.extend(['--workingDirectory', '"{0}"'.format(str(worker.path))]) c.extend(['--brokerHostname', broker]) c.extend(['--externalBrokerHostname', worker.brokerHostname]) c.extend(['--taskPort', str(worker.brokerPorts[0])]) c.extend(['--metaPort', str(worker.brokerPorts[1])]) if worker.origin and worker.executable: c.append('--origin') if worker.debug: c.append('--debug') if worker.profiling: c.append('--profile') if worker.backend: c.append('--backend={0}'.format(worker.backend)) if worker.verbose >= 1: c.append('-' + 'v' * worker.verbose) return c
python
def _WorkerCommand_options(self): """Return list of options for bootstrap""" worker = self.workersArguments c = [] # If broker is on localhost if self.hostname == worker.brokerHostname: broker = "127.0.0.1" else: broker = worker.brokerHostname if worker.nice is not None: c.extend(['--nice', str(worker.nice)]) c.extend(['--size', str(worker.size)]) if self.isLocal(): c.extend(['--workingDirectory', str(worker.path)]) else: c.extend(['--workingDirectory', '"{0}"'.format(str(worker.path))]) c.extend(['--brokerHostname', broker]) c.extend(['--externalBrokerHostname', worker.brokerHostname]) c.extend(['--taskPort', str(worker.brokerPorts[0])]) c.extend(['--metaPort', str(worker.brokerPorts[1])]) if worker.origin and worker.executable: c.append('--origin') if worker.debug: c.append('--debug') if worker.profiling: c.append('--profile') if worker.backend: c.append('--backend={0}'.format(worker.backend)) if worker.verbose >= 1: c.append('-' + 'v' * worker.verbose) return c
[ "def", "_WorkerCommand_options", "(", "self", ")", ":", "worker", "=", "self", ".", "workersArguments", "c", "=", "[", "]", "# If broker is on localhost", "if", "self", ".", "hostname", "==", "worker", ".", "brokerHostname", ":", "broker", "=", "\"127.0.0.1\"", "else", ":", "broker", "=", "worker", ".", "brokerHostname", "if", "worker", ".", "nice", "is", "not", "None", ":", "c", ".", "extend", "(", "[", "'--nice'", ",", "str", "(", "worker", ".", "nice", ")", "]", ")", "c", ".", "extend", "(", "[", "'--size'", ",", "str", "(", "worker", ".", "size", ")", "]", ")", "if", "self", ".", "isLocal", "(", ")", ":", "c", ".", "extend", "(", "[", "'--workingDirectory'", ",", "str", "(", "worker", ".", "path", ")", "]", ")", "else", ":", "c", ".", "extend", "(", "[", "'--workingDirectory'", ",", "'\"{0}\"'", ".", "format", "(", "str", "(", "worker", ".", "path", ")", ")", "]", ")", "c", ".", "extend", "(", "[", "'--brokerHostname'", ",", "broker", "]", ")", "c", ".", "extend", "(", "[", "'--externalBrokerHostname'", ",", "worker", ".", "brokerHostname", "]", ")", "c", ".", "extend", "(", "[", "'--taskPort'", ",", "str", "(", "worker", ".", "brokerPorts", "[", "0", "]", ")", "]", ")", "c", ".", "extend", "(", "[", "'--metaPort'", ",", "str", "(", "worker", ".", "brokerPorts", "[", "1", "]", ")", "]", ")", "if", "worker", ".", "origin", "and", "worker", ".", "executable", ":", "c", ".", "append", "(", "'--origin'", ")", "if", "worker", ".", "debug", ":", "c", ".", "append", "(", "'--debug'", ")", "if", "worker", ".", "profiling", ":", "c", ".", "append", "(", "'--profile'", ")", "if", "worker", ".", "backend", ":", "c", ".", "append", "(", "'--backend={0}'", ".", "format", "(", "worker", ".", "backend", ")", ")", "if", "worker", ".", "verbose", ">=", "1", ":", "c", ".", "append", "(", "'-'", "+", "'v'", "*", "worker", ".", "verbose", ")", "return", "c" ]
Return list of options for bootstrap
[ "Return", "list", "of", "options", "for", "bootstrap" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L118-L150
train
soravux/scoop
scoop/launch/workerLaunch.py
Host._WorkerCommand_executable
def _WorkerCommand_executable(self): """Return executable and any options to be executed by bootstrap""" worker = self.workersArguments c = [] if worker.executable: c.append(worker.executable) # This trick is used to parse correctly quotes # (ie. myScript.py 'arg1 "arg2" arg3') # Because it is parsed by a shell, every quote gets re-interpreted. # It replaces simple quotation marks with \\\" which gets evaluated to # \" by the second shell which prints it out as a double quote. if worker.args: if self.isLocal(): # If local, no shell is used so no escaping needed c.extend([ '{0}'.format(a) for a in worker.args ]) else: c.extend([ '"{0}"'.format(a.replace('"', '\\\"')) for a in worker.args ]) return c
python
def _WorkerCommand_executable(self): """Return executable and any options to be executed by bootstrap""" worker = self.workersArguments c = [] if worker.executable: c.append(worker.executable) # This trick is used to parse correctly quotes # (ie. myScript.py 'arg1 "arg2" arg3') # Because it is parsed by a shell, every quote gets re-interpreted. # It replaces simple quotation marks with \\\" which gets evaluated to # \" by the second shell which prints it out as a double quote. if worker.args: if self.isLocal(): # If local, no shell is used so no escaping needed c.extend([ '{0}'.format(a) for a in worker.args ]) else: c.extend([ '"{0}"'.format(a.replace('"', '\\\"')) for a in worker.args ]) return c
[ "def", "_WorkerCommand_executable", "(", "self", ")", ":", "worker", "=", "self", ".", "workersArguments", "c", "=", "[", "]", "if", "worker", ".", "executable", ":", "c", ".", "append", "(", "worker", ".", "executable", ")", "# This trick is used to parse correctly quotes", "# (ie. myScript.py 'arg1 \"arg2\" arg3')", "# Because it is parsed by a shell, every quote gets re-interpreted.", "# It replaces simple quotation marks with \\\\\\\" which gets evaluated to", "# \\\" by the second shell which prints it out as a double quote.", "if", "worker", ".", "args", ":", "if", "self", ".", "isLocal", "(", ")", ":", "# If local, no shell is used so no escaping needed", "c", ".", "extend", "(", "[", "'{0}'", ".", "format", "(", "a", ")", "for", "a", "in", "worker", ".", "args", "]", ")", "else", ":", "c", ".", "extend", "(", "[", "'\"{0}\"'", ".", "format", "(", "a", ".", "replace", "(", "'\"'", ",", "'\\\\\\\"'", ")", ")", "for", "a", "in", "worker", ".", "args", "]", ")", "return", "c" ]
Return executable and any options to be executed by bootstrap
[ "Return", "executable", "and", "any", "options", "to", "be", "executed", "by", "bootstrap" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L152-L174
train
soravux/scoop
scoop/launch/workerLaunch.py
Host._getWorkerCommandList
def _getWorkerCommandList(self): """Generate the workerCommand as list""" c = [] c.extend(self._WorkerCommand_environment()) c.extend(self._WorkerCommand_launcher()) c.extend(self._WorkerCommand_options()) c.extend(self._WorkerCommand_executable()) return c
python
def _getWorkerCommandList(self): """Generate the workerCommand as list""" c = [] c.extend(self._WorkerCommand_environment()) c.extend(self._WorkerCommand_launcher()) c.extend(self._WorkerCommand_options()) c.extend(self._WorkerCommand_executable()) return c
[ "def", "_getWorkerCommandList", "(", "self", ")", ":", "c", "=", "[", "]", "c", ".", "extend", "(", "self", ".", "_WorkerCommand_environment", "(", ")", ")", "c", ".", "extend", "(", "self", ".", "_WorkerCommand_launcher", "(", ")", ")", "c", ".", "extend", "(", "self", ".", "_WorkerCommand_options", "(", ")", ")", "c", ".", "extend", "(", "self", ".", "_WorkerCommand_executable", "(", ")", ")", "return", "c" ]
Generate the workerCommand as list
[ "Generate", "the", "workerCommand", "as", "list" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L176-L184
train
soravux/scoop
scoop/launch/workerLaunch.py
Host.launch
def launch(self, tunnelPorts=None): """Launch every worker assigned on this host.""" if self.isLocal(): # Launching local workers c = self._getWorkerCommandList() self.subprocesses.append(subprocess.Popen(c)) else: # Launching remotely BASE_SSH[0] = self.ssh_executable sshCmd = BASE_SSH if not self.rsh else BASE_RSH if tunnelPorts is not None: sshCmd += [ '-R {0}:127.0.0.1:{0}'.format(tunnelPorts[0]), '-R {0}:127.0.0.1:{0}'.format(tunnelPorts[1]), ] self.subprocesses.append( subprocess.Popen(sshCmd + [self.hostname, self.getCommand()], bufsize=-1, stdout=None, stderr=None, stdin=subprocess.PIPE ) ) return self.subprocesses
python
def launch(self, tunnelPorts=None): """Launch every worker assigned on this host.""" if self.isLocal(): # Launching local workers c = self._getWorkerCommandList() self.subprocesses.append(subprocess.Popen(c)) else: # Launching remotely BASE_SSH[0] = self.ssh_executable sshCmd = BASE_SSH if not self.rsh else BASE_RSH if tunnelPorts is not None: sshCmd += [ '-R {0}:127.0.0.1:{0}'.format(tunnelPorts[0]), '-R {0}:127.0.0.1:{0}'.format(tunnelPorts[1]), ] self.subprocesses.append( subprocess.Popen(sshCmd + [self.hostname, self.getCommand()], bufsize=-1, stdout=None, stderr=None, stdin=subprocess.PIPE ) ) return self.subprocesses
[ "def", "launch", "(", "self", ",", "tunnelPorts", "=", "None", ")", ":", "if", "self", ".", "isLocal", "(", ")", ":", "# Launching local workers", "c", "=", "self", ".", "_getWorkerCommandList", "(", ")", "self", ".", "subprocesses", ".", "append", "(", "subprocess", ".", "Popen", "(", "c", ")", ")", "else", ":", "# Launching remotely", "BASE_SSH", "[", "0", "]", "=", "self", ".", "ssh_executable", "sshCmd", "=", "BASE_SSH", "if", "not", "self", ".", "rsh", "else", "BASE_RSH", "if", "tunnelPorts", "is", "not", "None", ":", "sshCmd", "+=", "[", "'-R {0}:127.0.0.1:{0}'", ".", "format", "(", "tunnelPorts", "[", "0", "]", ")", ",", "'-R {0}:127.0.0.1:{0}'", ".", "format", "(", "tunnelPorts", "[", "1", "]", ")", ",", "]", "self", ".", "subprocesses", ".", "append", "(", "subprocess", ".", "Popen", "(", "sshCmd", "+", "[", "self", ".", "hostname", ",", "self", ".", "getCommand", "(", ")", "]", ",", "bufsize", "=", "-", "1", ",", "stdout", "=", "None", ",", "stderr", "=", "None", ",", "stdin", "=", "subprocess", ".", "PIPE", ")", ")", "return", "self", ".", "subprocesses" ]
Launch every worker assigned on this host.
[ "Launch", "every", "worker", "assigned", "on", "this", "host", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launch/workerLaunch.py#L190-L214
train
soravux/scoop
scoop/_types.py
Future._switch
def _switch(self, future): """Switch greenlet.""" scoop._control.current = self assert self.greenlet is not None, ("No greenlet to switch to:" "\n{0}".format(self.__dict__)) return self.greenlet.switch(future)
python
def _switch(self, future): """Switch greenlet.""" scoop._control.current = self assert self.greenlet is not None, ("No greenlet to switch to:" "\n{0}".format(self.__dict__)) return self.greenlet.switch(future)
[ "def", "_switch", "(", "self", ",", "future", ")", ":", "scoop", ".", "_control", ".", "current", "=", "self", "assert", "self", ".", "greenlet", "is", "not", "None", ",", "(", "\"No greenlet to switch to:\"", "\"\\n{0}\"", ".", "format", "(", "self", ".", "__dict__", ")", ")", "return", "self", ".", "greenlet", ".", "switch", "(", "future", ")" ]
Switch greenlet.
[ "Switch", "greenlet", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L134-L139
train
soravux/scoop
scoop/_types.py
Future.cancel
def cancel(self): """If the call is currently being executed or sent for remote execution, then it cannot be cancelled and the method will return False, otherwise the call will be cancelled and the method will return True.""" if self in scoop._control.execQueue.movable: self.exceptionValue = CancelledError() scoop._control.futureDict[self.id]._delete() scoop._control.execQueue.remove(self) return True return False
python
def cancel(self): """If the call is currently being executed or sent for remote execution, then it cannot be cancelled and the method will return False, otherwise the call will be cancelled and the method will return True.""" if self in scoop._control.execQueue.movable: self.exceptionValue = CancelledError() scoop._control.futureDict[self.id]._delete() scoop._control.execQueue.remove(self) return True return False
[ "def", "cancel", "(", "self", ")", ":", "if", "self", "in", "scoop", ".", "_control", ".", "execQueue", ".", "movable", ":", "self", ".", "exceptionValue", "=", "CancelledError", "(", ")", "scoop", ".", "_control", ".", "futureDict", "[", "self", ".", "id", "]", ".", "_delete", "(", ")", "scoop", ".", "_control", ".", "execQueue", ".", "remove", "(", "self", ")", "return", "True", "return", "False" ]
If the call is currently being executed or sent for remote execution, then it cannot be cancelled and the method will return False, otherwise the call will be cancelled and the method will return True.
[ "If", "the", "call", "is", "currently", "being", "executed", "or", "sent", "for", "remote", "execution", "then", "it", "cannot", "be", "cancelled", "and", "the", "method", "will", "return", "False", "otherwise", "the", "call", "will", "be", "cancelled", "and", "the", "method", "will", "return", "True", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L141-L151
train
soravux/scoop
scoop/_types.py
Future.done
def done(self): """Returns True if the call was successfully cancelled or finished running, False otherwise. This function updates the executionQueue so it receives all the awaiting message.""" # Flush the current future in the local buffer (potential deadlock # otherwise) try: scoop._control.execQueue.remove(self) scoop._control.execQueue.socket.sendFuture(self) except ValueError as e: # Future was not in the local queue, everything is fine pass # Process buffers scoop._control.execQueue.updateQueue() return self._ended()
python
def done(self): """Returns True if the call was successfully cancelled or finished running, False otherwise. This function updates the executionQueue so it receives all the awaiting message.""" # Flush the current future in the local buffer (potential deadlock # otherwise) try: scoop._control.execQueue.remove(self) scoop._control.execQueue.socket.sendFuture(self) except ValueError as e: # Future was not in the local queue, everything is fine pass # Process buffers scoop._control.execQueue.updateQueue() return self._ended()
[ "def", "done", "(", "self", ")", ":", "# Flush the current future in the local buffer (potential deadlock", "# otherwise)", "try", ":", "scoop", ".", "_control", ".", "execQueue", ".", "remove", "(", "self", ")", "scoop", ".", "_control", ".", "execQueue", ".", "socket", ".", "sendFuture", "(", "self", ")", "except", "ValueError", "as", "e", ":", "# Future was not in the local queue, everything is fine", "pass", "# Process buffers", "scoop", ".", "_control", ".", "execQueue", ".", "updateQueue", "(", ")", "return", "self", ".", "_ended", "(", ")" ]
Returns True if the call was successfully cancelled or finished running, False otherwise. This function updates the executionQueue so it receives all the awaiting message.
[ "Returns", "True", "if", "the", "call", "was", "successfully", "cancelled", "or", "finished", "running", "False", "otherwise", ".", "This", "function", "updates", "the", "executionQueue", "so", "it", "receives", "all", "the", "awaiting", "message", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L163-L177
train
soravux/scoop
scoop/_types.py
Future.add_done_callback
def add_done_callback(self, callable_, inCallbackType=CallbackType.standard, inCallbackGroup=None): """Attach a callable to the future that will be called when the future is cancelled or finishes running. Callable will be called with the future as its only argument. Added callables are called in the order that they were added and are always called in a thread belonging to the process that added them. If the callable raises an Exception then it will be logged and ignored. If the callable raises another BaseException then behavior is not defined. If the future has already completed or been cancelled then callable will be called immediately.""" self.callback.append(callbackEntry(callable_, inCallbackType, inCallbackGroup)) # If already completed or cancelled, execute it immediately if self._ended(): self.callback[-1].func(self)
python
def add_done_callback(self, callable_, inCallbackType=CallbackType.standard, inCallbackGroup=None): """Attach a callable to the future that will be called when the future is cancelled or finishes running. Callable will be called with the future as its only argument. Added callables are called in the order that they were added and are always called in a thread belonging to the process that added them. If the callable raises an Exception then it will be logged and ignored. If the callable raises another BaseException then behavior is not defined. If the future has already completed or been cancelled then callable will be called immediately.""" self.callback.append(callbackEntry(callable_, inCallbackType, inCallbackGroup)) # If already completed or cancelled, execute it immediately if self._ended(): self.callback[-1].func(self)
[ "def", "add_done_callback", "(", "self", ",", "callable_", ",", "inCallbackType", "=", "CallbackType", ".", "standard", ",", "inCallbackGroup", "=", "None", ")", ":", "self", ".", "callback", ".", "append", "(", "callbackEntry", "(", "callable_", ",", "inCallbackType", ",", "inCallbackGroup", ")", ")", "# If already completed or cancelled, execute it immediately", "if", "self", ".", "_ended", "(", ")", ":", "self", ".", "callback", "[", "-", "1", "]", ".", "func", "(", "self", ")" ]
Attach a callable to the future that will be called when the future is cancelled or finishes running. Callable will be called with the future as its only argument. Added callables are called in the order that they were added and are always called in a thread belonging to the process that added them. If the callable raises an Exception then it will be logged and ignored. If the callable raises another BaseException then behavior is not defined. If the future has already completed or been cancelled then callable will be called immediately.
[ "Attach", "a", "callable", "to", "the", "future", "that", "will", "be", "called", "when", "the", "future", "is", "cancelled", "or", "finishes", "running", ".", "Callable", "will", "be", "called", "with", "the", "future", "as", "its", "only", "argument", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L221-L241
train
soravux/scoop
scoop/_types.py
FutureQueue.append
def append(self, future): """Append a future to the queue.""" if future._ended() and future.index is None: self.inprogress.add(future) elif future._ended() and future.index is not None: self.ready.append(future) elif future.greenlet is not None: self.inprogress.add(future) else: self.movable.append(future) # Send the oldest future in the movable deque until under the hwm over_hwm = self.timelen(self.movable) > self.highwatermark while over_hwm and len(self.movable) > 1: sending_future = self.movable.popleft() if sending_future.id[0] != scoop.worker: sending_future._delete() self.socket.sendFuture(sending_future) over_hwm = self.timelen(self.movable) > self.highwatermark
python
def append(self, future): """Append a future to the queue.""" if future._ended() and future.index is None: self.inprogress.add(future) elif future._ended() and future.index is not None: self.ready.append(future) elif future.greenlet is not None: self.inprogress.add(future) else: self.movable.append(future) # Send the oldest future in the movable deque until under the hwm over_hwm = self.timelen(self.movable) > self.highwatermark while over_hwm and len(self.movable) > 1: sending_future = self.movable.popleft() if sending_future.id[0] != scoop.worker: sending_future._delete() self.socket.sendFuture(sending_future) over_hwm = self.timelen(self.movable) > self.highwatermark
[ "def", "append", "(", "self", ",", "future", ")", ":", "if", "future", ".", "_ended", "(", ")", "and", "future", ".", "index", "is", "None", ":", "self", ".", "inprogress", ".", "add", "(", "future", ")", "elif", "future", ".", "_ended", "(", ")", "and", "future", ".", "index", "is", "not", "None", ":", "self", ".", "ready", ".", "append", "(", "future", ")", "elif", "future", ".", "greenlet", "is", "not", "None", ":", "self", ".", "inprogress", ".", "add", "(", "future", ")", "else", ":", "self", ".", "movable", ".", "append", "(", "future", ")", "# Send the oldest future in the movable deque until under the hwm", "over_hwm", "=", "self", ".", "timelen", "(", "self", ".", "movable", ")", ">", "self", ".", "highwatermark", "while", "over_hwm", "and", "len", "(", "self", ".", "movable", ")", ">", "1", ":", "sending_future", "=", "self", ".", "movable", ".", "popleft", "(", ")", "if", "sending_future", ".", "id", "[", "0", "]", "!=", "scoop", ".", "worker", ":", "sending_future", ".", "_delete", "(", ")", "self", ".", "socket", ".", "sendFuture", "(", "sending_future", ")", "over_hwm", "=", "self", ".", "timelen", "(", "self", ".", "movable", ")", ">", "self", ".", "highwatermark" ]
Append a future to the queue.
[ "Append", "a", "future", "to", "the", "queue", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L299-L317
train
soravux/scoop
scoop/_types.py
FutureQueue.askForPreviousFutures
def askForPreviousFutures(self): """Request a status for every future to the broker.""" # Don't request it too often (otherwise it ping-pongs because) # the broker answer triggers the _poll of pop() if time.time() < self.lastStatus + POLLING_TIME / 1000: return self.lastStatus = time.time() for future in scoop._control.futureDict.values(): # Skip the root future if scoop.IS_ORIGIN and future.id == (scoop.worker, 0): continue if future not in self.inprogress: self.socket.sendStatusRequest(future)
python
def askForPreviousFutures(self): """Request a status for every future to the broker.""" # Don't request it too often (otherwise it ping-pongs because) # the broker answer triggers the _poll of pop() if time.time() < self.lastStatus + POLLING_TIME / 1000: return self.lastStatus = time.time() for future in scoop._control.futureDict.values(): # Skip the root future if scoop.IS_ORIGIN and future.id == (scoop.worker, 0): continue if future not in self.inprogress: self.socket.sendStatusRequest(future)
[ "def", "askForPreviousFutures", "(", "self", ")", ":", "# Don't request it too often (otherwise it ping-pongs because)", "# the broker answer triggers the _poll of pop()", "if", "time", ".", "time", "(", ")", "<", "self", ".", "lastStatus", "+", "POLLING_TIME", "/", "1000", ":", "return", "self", ".", "lastStatus", "=", "time", ".", "time", "(", ")", "for", "future", "in", "scoop", ".", "_control", ".", "futureDict", ".", "values", "(", ")", ":", "# Skip the root future", "if", "scoop", ".", "IS_ORIGIN", "and", "future", ".", "id", "==", "(", "scoop", ".", "worker", ",", "0", ")", ":", "continue", "if", "future", "not", "in", "self", ".", "inprogress", ":", "self", ".", "socket", ".", "sendStatusRequest", "(", "future", ")" ]
Request a status for every future to the broker.
[ "Request", "a", "status", "for", "every", "future", "to", "the", "broker", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L319-L333
train
soravux/scoop
scoop/_types.py
FutureQueue.pop
def pop(self): """Pop the next future from the queue; in progress futures have priority over those that have not yet started; higher level futures have priority over lower level ones; """ self.updateQueue() # If our buffer is underflowing, request more Futures if self.timelen(self) < self.lowwatermark: self.requestFuture() # If an unmovable Future is ready to be executed, return it if len(self.ready) != 0: return self.ready.popleft() # Then, use Futures in the movable queue elif len(self.movable) != 0: return self.movable.popleft() else: # Otherwise, block until a new task arrives self.lastStatus = time.time() while len(self) == 0: # Block until message arrives self.askForPreviousFutures() self.socket._poll(POLLING_TIME) self.updateQueue() if len(self.ready) != 0: return self.ready.popleft() elif len(self.movable) != 0: return self.movable.popleft()
python
def pop(self): """Pop the next future from the queue; in progress futures have priority over those that have not yet started; higher level futures have priority over lower level ones; """ self.updateQueue() # If our buffer is underflowing, request more Futures if self.timelen(self) < self.lowwatermark: self.requestFuture() # If an unmovable Future is ready to be executed, return it if len(self.ready) != 0: return self.ready.popleft() # Then, use Futures in the movable queue elif len(self.movable) != 0: return self.movable.popleft() else: # Otherwise, block until a new task arrives self.lastStatus = time.time() while len(self) == 0: # Block until message arrives self.askForPreviousFutures() self.socket._poll(POLLING_TIME) self.updateQueue() if len(self.ready) != 0: return self.ready.popleft() elif len(self.movable) != 0: return self.movable.popleft()
[ "def", "pop", "(", "self", ")", ":", "self", ".", "updateQueue", "(", ")", "# If our buffer is underflowing, request more Futures", "if", "self", ".", "timelen", "(", "self", ")", "<", "self", ".", "lowwatermark", ":", "self", ".", "requestFuture", "(", ")", "# If an unmovable Future is ready to be executed, return it", "if", "len", "(", "self", ".", "ready", ")", "!=", "0", ":", "return", "self", ".", "ready", ".", "popleft", "(", ")", "# Then, use Futures in the movable queue", "elif", "len", "(", "self", ".", "movable", ")", "!=", "0", ":", "return", "self", ".", "movable", ".", "popleft", "(", ")", "else", ":", "# Otherwise, block until a new task arrives", "self", ".", "lastStatus", "=", "time", ".", "time", "(", ")", "while", "len", "(", "self", ")", "==", "0", ":", "# Block until message arrives", "self", ".", "askForPreviousFutures", "(", ")", "self", ".", "socket", ".", "_poll", "(", "POLLING_TIME", ")", "self", ".", "updateQueue", "(", ")", "if", "len", "(", "self", ".", "ready", ")", "!=", "0", ":", "return", "self", ".", "ready", ".", "popleft", "(", ")", "elif", "len", "(", "self", ".", "movable", ")", "!=", "0", ":", "return", "self", ".", "movable", ".", "popleft", "(", ")" ]
Pop the next future from the queue; in progress futures have priority over those that have not yet started; higher level futures have priority over lower level ones;
[ "Pop", "the", "next", "future", "from", "the", "queue", ";", "in", "progress", "futures", "have", "priority", "over", "those", "that", "have", "not", "yet", "started", ";", "higher", "level", "futures", "have", "priority", "over", "lower", "level", "ones", ";" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L335-L363
train
soravux/scoop
scoop/_types.py
FutureQueue.flush
def flush(self): """Empty the local queue and send its elements to be executed remotely. """ for elem in self: if elem.id[0] != scoop.worker: elem._delete() self.socket.sendFuture(elem) self.ready.clear() self.movable.clear()
python
def flush(self): """Empty the local queue and send its elements to be executed remotely. """ for elem in self: if elem.id[0] != scoop.worker: elem._delete() self.socket.sendFuture(elem) self.ready.clear() self.movable.clear()
[ "def", "flush", "(", "self", ")", ":", "for", "elem", "in", "self", ":", "if", "elem", ".", "id", "[", "0", "]", "!=", "scoop", ".", "worker", ":", "elem", ".", "_delete", "(", ")", "self", ".", "socket", ".", "sendFuture", "(", "elem", ")", "self", ".", "ready", ".", "clear", "(", ")", "self", ".", "movable", ".", "clear", "(", ")" ]
Empty the local queue and send its elements to be executed remotely.
[ "Empty", "the", "local", "queue", "and", "send", "its", "elements", "to", "be", "executed", "remotely", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L365-L373
train
soravux/scoop
scoop/_types.py
FutureQueue.updateQueue
def updateQueue(self): """Process inbound communication buffer. Updates the local queue with elements from the broker.""" for future in self.socket.recvFuture(): if future._ended(): # If the answer is coming back, update its entry try: thisFuture = scoop._control.futureDict[future.id] except KeyError: # Already received? scoop.logger.warn('{0}: Received an unexpected future: ' '{1}'.format(scoop.worker, future.id)) continue thisFuture.resultValue = future.resultValue thisFuture.exceptionValue = future.exceptionValue thisFuture.executor = future.executor thisFuture.isDone = future.isDone # Execute standard callbacks here (on parent) thisFuture._execute_callbacks(CallbackType.standard) self.append(thisFuture) future._delete() elif future.id not in scoop._control.futureDict: scoop._control.futureDict[future.id] = future self.append(scoop._control.futureDict[future.id]) else: self.append(scoop._control.futureDict[future.id])
python
def updateQueue(self): """Process inbound communication buffer. Updates the local queue with elements from the broker.""" for future in self.socket.recvFuture(): if future._ended(): # If the answer is coming back, update its entry try: thisFuture = scoop._control.futureDict[future.id] except KeyError: # Already received? scoop.logger.warn('{0}: Received an unexpected future: ' '{1}'.format(scoop.worker, future.id)) continue thisFuture.resultValue = future.resultValue thisFuture.exceptionValue = future.exceptionValue thisFuture.executor = future.executor thisFuture.isDone = future.isDone # Execute standard callbacks here (on parent) thisFuture._execute_callbacks(CallbackType.standard) self.append(thisFuture) future._delete() elif future.id not in scoop._control.futureDict: scoop._control.futureDict[future.id] = future self.append(scoop._control.futureDict[future.id]) else: self.append(scoop._control.futureDict[future.id])
[ "def", "updateQueue", "(", "self", ")", ":", "for", "future", "in", "self", ".", "socket", ".", "recvFuture", "(", ")", ":", "if", "future", ".", "_ended", "(", ")", ":", "# If the answer is coming back, update its entry", "try", ":", "thisFuture", "=", "scoop", ".", "_control", ".", "futureDict", "[", "future", ".", "id", "]", "except", "KeyError", ":", "# Already received?", "scoop", ".", "logger", ".", "warn", "(", "'{0}: Received an unexpected future: '", "'{1}'", ".", "format", "(", "scoop", ".", "worker", ",", "future", ".", "id", ")", ")", "continue", "thisFuture", ".", "resultValue", "=", "future", ".", "resultValue", "thisFuture", ".", "exceptionValue", "=", "future", ".", "exceptionValue", "thisFuture", ".", "executor", "=", "future", ".", "executor", "thisFuture", ".", "isDone", "=", "future", ".", "isDone", "# Execute standard callbacks here (on parent)", "thisFuture", ".", "_execute_callbacks", "(", "CallbackType", ".", "standard", ")", "self", ".", "append", "(", "thisFuture", ")", "future", ".", "_delete", "(", ")", "elif", "future", ".", "id", "not", "in", "scoop", ".", "_control", ".", "futureDict", ":", "scoop", ".", "_control", ".", "futureDict", "[", "future", ".", "id", "]", "=", "future", "self", ".", "append", "(", "scoop", ".", "_control", ".", "futureDict", "[", "future", ".", "id", "]", ")", "else", ":", "self", ".", "append", "(", "scoop", ".", "_control", ".", "futureDict", "[", "future", ".", "id", "]", ")" ]
Process inbound communication buffer. Updates the local queue with elements from the broker.
[ "Process", "inbound", "communication", "buffer", ".", "Updates", "the", "local", "queue", "with", "elements", "from", "the", "broker", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L379-L404
train
soravux/scoop
scoop/_types.py
FutureQueue.sendResult
def sendResult(self, future): """Send back results to broker for distribution to parent task.""" # Greenlets cannot be pickled future.greenlet = None assert future._ended(), "The results are not valid" self.socket.sendResult(future)
python
def sendResult(self, future): """Send back results to broker for distribution to parent task.""" # Greenlets cannot be pickled future.greenlet = None assert future._ended(), "The results are not valid" self.socket.sendResult(future)
[ "def", "sendResult", "(", "self", ",", "future", ")", ":", "# Greenlets cannot be pickled", "future", ".", "greenlet", "=", "None", "assert", "future", ".", "_ended", "(", ")", ",", "\"The results are not valid\"", "self", ".", "socket", ".", "sendResult", "(", "future", ")" ]
Send back results to broker for distribution to parent task.
[ "Send", "back", "results", "to", "broker", "for", "distribution", "to", "parent", "task", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L411-L416
train
soravux/scoop
scoop/_types.py
FutureQueue.shutdown
def shutdown(self): """Shutdown the ressources used by the queue""" self.socket.shutdown() if scoop: if scoop.DEBUG: from scoop import _debug _debug.writeWorkerDebug( scoop._control.debug_stats, scoop._control.QueueLength, )
python
def shutdown(self): """Shutdown the ressources used by the queue""" self.socket.shutdown() if scoop: if scoop.DEBUG: from scoop import _debug _debug.writeWorkerDebug( scoop._control.debug_stats, scoop._control.QueueLength, )
[ "def", "shutdown", "(", "self", ")", ":", "self", ".", "socket", ".", "shutdown", "(", ")", "if", "scoop", ":", "if", "scoop", ".", "DEBUG", ":", "from", "scoop", "import", "_debug", "_debug", ".", "writeWorkerDebug", "(", "scoop", ".", "_control", ".", "debug_stats", ",", "scoop", ".", "_control", ".", "QueueLength", ",", ")" ]
Shutdown the ressources used by the queue
[ "Shutdown", "the", "ressources", "used", "by", "the", "queue" ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_types.py#L418-L428
train
soravux/scoop
scoop/_debug.py
redirectSTDOUTtoDebugFile
def redirectSTDOUTtoDebugFile(): """Redirects the stdout and stderr of the current process to a file.""" import sys kwargs = {} if sys.version_info >= (3,): kwargs["encoding"] = "utf8" sys.stdout = open( os.path.join( getDebugDirectory(), "{0}.stdout".format(getDebugIdentifier()), ), "w", 1, # Buffering by line **kwargs ) sys.stderr = open( os.path.join( getDebugDirectory(), "{0}.stderr".format(getDebugIdentifier()), ), "w", 1, # Buffering by line **kwargs )
python
def redirectSTDOUTtoDebugFile(): """Redirects the stdout and stderr of the current process to a file.""" import sys kwargs = {} if sys.version_info >= (3,): kwargs["encoding"] = "utf8" sys.stdout = open( os.path.join( getDebugDirectory(), "{0}.stdout".format(getDebugIdentifier()), ), "w", 1, # Buffering by line **kwargs ) sys.stderr = open( os.path.join( getDebugDirectory(), "{0}.stderr".format(getDebugIdentifier()), ), "w", 1, # Buffering by line **kwargs )
[ "def", "redirectSTDOUTtoDebugFile", "(", ")", ":", "import", "sys", "kwargs", "=", "{", "}", "if", "sys", ".", "version_info", ">=", "(", "3", ",", ")", ":", "kwargs", "[", "\"encoding\"", "]", "=", "\"utf8\"", "sys", ".", "stdout", "=", "open", "(", "os", ".", "path", ".", "join", "(", "getDebugDirectory", "(", ")", ",", "\"{0}.stdout\"", ".", "format", "(", "getDebugIdentifier", "(", ")", ")", ",", ")", ",", "\"w\"", ",", "1", ",", "# Buffering by line", "*", "*", "kwargs", ")", "sys", ".", "stderr", "=", "open", "(", "os", ".", "path", ".", "join", "(", "getDebugDirectory", "(", ")", ",", "\"{0}.stderr\"", ".", "format", "(", "getDebugIdentifier", "(", ")", ")", ",", ")", ",", "\"w\"", ",", "1", ",", "# Buffering by line", "*", "*", "kwargs", ")" ]
Redirects the stdout and stderr of the current process to a file.
[ "Redirects", "the", "stdout", "and", "stderr", "of", "the", "current", "process", "to", "a", "file", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_debug.py#L46-L69
train
soravux/scoop
scoop/_debug.py
writeWorkerDebug
def writeWorkerDebug(debugStats, queueLength, path_suffix=""): """Serialize the execution data using pickle and writes it into the debug directory.""" createDirectory(path_suffix) origin_prefix = "origin-" if scoop.IS_ORIGIN else "" statsFilename = os.path.join( getDebugDirectory(), path_suffix, "{1}worker-{0}-STATS".format(getDebugIdentifier(), origin_prefix) ) lengthFilename = os.path.join( getDebugDirectory(), path_suffix, "{1}worker-{0}-QUEUE".format(getDebugIdentifier(), origin_prefix) ) with open(statsFilename, 'wb') as f: pickle.dump(debugStats, f) with open(lengthFilename, 'wb') as f: pickle.dump(queueLength, f)
python
def writeWorkerDebug(debugStats, queueLength, path_suffix=""): """Serialize the execution data using pickle and writes it into the debug directory.""" createDirectory(path_suffix) origin_prefix = "origin-" if scoop.IS_ORIGIN else "" statsFilename = os.path.join( getDebugDirectory(), path_suffix, "{1}worker-{0}-STATS".format(getDebugIdentifier(), origin_prefix) ) lengthFilename = os.path.join( getDebugDirectory(), path_suffix, "{1}worker-{0}-QUEUE".format(getDebugIdentifier(), origin_prefix) ) with open(statsFilename, 'wb') as f: pickle.dump(debugStats, f) with open(lengthFilename, 'wb') as f: pickle.dump(queueLength, f)
[ "def", "writeWorkerDebug", "(", "debugStats", ",", "queueLength", ",", "path_suffix", "=", "\"\"", ")", ":", "createDirectory", "(", "path_suffix", ")", "origin_prefix", "=", "\"origin-\"", "if", "scoop", ".", "IS_ORIGIN", "else", "\"\"", "statsFilename", "=", "os", ".", "path", ".", "join", "(", "getDebugDirectory", "(", ")", ",", "path_suffix", ",", "\"{1}worker-{0}-STATS\"", ".", "format", "(", "getDebugIdentifier", "(", ")", ",", "origin_prefix", ")", ")", "lengthFilename", "=", "os", ".", "path", ".", "join", "(", "getDebugDirectory", "(", ")", ",", "path_suffix", ",", "\"{1}worker-{0}-QUEUE\"", ".", "format", "(", "getDebugIdentifier", "(", ")", ",", "origin_prefix", ")", ")", "with", "open", "(", "statsFilename", ",", "'wb'", ")", "as", "f", ":", "pickle", ".", "dump", "(", "debugStats", ",", "f", ")", "with", "open", "(", "lengthFilename", ",", "'wb'", ")", "as", "f", ":", "pickle", ".", "dump", "(", "queueLength", ",", "f", ")" ]
Serialize the execution data using pickle and writes it into the debug directory.
[ "Serialize", "the", "execution", "data", "using", "pickle", "and", "writes", "it", "into", "the", "debug", "directory", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/_debug.py#L72-L90
train
soravux/scoop
scoop/launcher.py
main
def main(): """Execution of the SCOOP module. Parses its command-line arguments and launch needed resources.""" # Generate a argparse parser and parse the command-line arguments parser = makeParser() args = parser.parse_args() # Get a list of resources to launch worker(s) on hosts = utils.getHosts(args.hostfile, args.hosts) if args.n: n = args.n else: n = utils.getWorkerQte(hosts) assert n >= 0, ( "Scoop couldn't determine the number of worker to start.\n" "Use the '-n' flag to set it manually." ) if not args.external_hostname: args.external_hostname = [utils.externalHostname(hosts)] # Launch SCOOP thisScoopApp = ScoopApp(hosts, n, args.b, args.verbose if not args.quiet else 0, args.python_interpreter, args.external_hostname[0], args.executable, args.args, args.tunnel, args.path, args.debug, args.nice, utils.getEnv(), args.profile, args.pythonpath[0], args.prolog[0], args.backend, args.rsh, args.ssh_executable) rootTaskExitCode = False interruptPreventer = Thread(target=thisScoopApp.close) try: rootTaskExitCode = thisScoopApp.run() except Exception as e: logging.error('Error while launching SCOOP subprocesses:') logging.error(traceback.format_exc()) rootTaskExitCode = -1 finally: # This should not be interrupted (ie. by a KeyboadInterrupt) # The only cross-platform way to do it I found was by using a thread. interruptPreventer.start() interruptPreventer.join() # Exit with the proper exit code if rootTaskExitCode: sys.exit(rootTaskExitCode)
python
def main(): """Execution of the SCOOP module. Parses its command-line arguments and launch needed resources.""" # Generate a argparse parser and parse the command-line arguments parser = makeParser() args = parser.parse_args() # Get a list of resources to launch worker(s) on hosts = utils.getHosts(args.hostfile, args.hosts) if args.n: n = args.n else: n = utils.getWorkerQte(hosts) assert n >= 0, ( "Scoop couldn't determine the number of worker to start.\n" "Use the '-n' flag to set it manually." ) if not args.external_hostname: args.external_hostname = [utils.externalHostname(hosts)] # Launch SCOOP thisScoopApp = ScoopApp(hosts, n, args.b, args.verbose if not args.quiet else 0, args.python_interpreter, args.external_hostname[0], args.executable, args.args, args.tunnel, args.path, args.debug, args.nice, utils.getEnv(), args.profile, args.pythonpath[0], args.prolog[0], args.backend, args.rsh, args.ssh_executable) rootTaskExitCode = False interruptPreventer = Thread(target=thisScoopApp.close) try: rootTaskExitCode = thisScoopApp.run() except Exception as e: logging.error('Error while launching SCOOP subprocesses:') logging.error(traceback.format_exc()) rootTaskExitCode = -1 finally: # This should not be interrupted (ie. by a KeyboadInterrupt) # The only cross-platform way to do it I found was by using a thread. interruptPreventer.start() interruptPreventer.join() # Exit with the proper exit code if rootTaskExitCode: sys.exit(rootTaskExitCode)
[ "def", "main", "(", ")", ":", "# Generate a argparse parser and parse the command-line arguments", "parser", "=", "makeParser", "(", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "# Get a list of resources to launch worker(s) on", "hosts", "=", "utils", ".", "getHosts", "(", "args", ".", "hostfile", ",", "args", ".", "hosts", ")", "if", "args", ".", "n", ":", "n", "=", "args", ".", "n", "else", ":", "n", "=", "utils", ".", "getWorkerQte", "(", "hosts", ")", "assert", "n", ">=", "0", ",", "(", "\"Scoop couldn't determine the number of worker to start.\\n\"", "\"Use the '-n' flag to set it manually.\"", ")", "if", "not", "args", ".", "external_hostname", ":", "args", ".", "external_hostname", "=", "[", "utils", ".", "externalHostname", "(", "hosts", ")", "]", "# Launch SCOOP", "thisScoopApp", "=", "ScoopApp", "(", "hosts", ",", "n", ",", "args", ".", "b", ",", "args", ".", "verbose", "if", "not", "args", ".", "quiet", "else", "0", ",", "args", ".", "python_interpreter", ",", "args", ".", "external_hostname", "[", "0", "]", ",", "args", ".", "executable", ",", "args", ".", "args", ",", "args", ".", "tunnel", ",", "args", ".", "path", ",", "args", ".", "debug", ",", "args", ".", "nice", ",", "utils", ".", "getEnv", "(", ")", ",", "args", ".", "profile", ",", "args", ".", "pythonpath", "[", "0", "]", ",", "args", ".", "prolog", "[", "0", "]", ",", "args", ".", "backend", ",", "args", ".", "rsh", ",", "args", ".", "ssh_executable", ")", "rootTaskExitCode", "=", "False", "interruptPreventer", "=", "Thread", "(", "target", "=", "thisScoopApp", ".", "close", ")", "try", ":", "rootTaskExitCode", "=", "thisScoopApp", ".", "run", "(", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "'Error while launching SCOOP subprocesses:'", ")", "logging", ".", "error", "(", "traceback", ".", "format_exc", "(", ")", ")", "rootTaskExitCode", "=", "-", "1", "finally", ":", "# This should not be interrupted (ie. by a KeyboadInterrupt)", "# The only cross-platform way to do it I found was by using a thread.", "interruptPreventer", ".", "start", "(", ")", "interruptPreventer", ".", "join", "(", ")", "# Exit with the proper exit code", "if", "rootTaskExitCode", ":", "sys", ".", "exit", "(", "rootTaskExitCode", ")" ]
Execution of the SCOOP module. Parses its command-line arguments and launch needed resources.
[ "Execution", "of", "the", "SCOOP", "module", ".", "Parses", "its", "command", "-", "line", "arguments", "and", "launch", "needed", "resources", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launcher.py#L450-L499
train
soravux/scoop
scoop/launcher.py
ScoopApp.initLogging
def initLogging(self): """Configures the logger.""" verbose_levels = { 0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG, } logging.basicConfig( level=verbose_levels[self.verbose], format="[%(asctime)-15s] %(module)-9s %(levelname)-7s %(message)s" ) return logging.getLogger(self.__class__.__name__)
python
def initLogging(self): """Configures the logger.""" verbose_levels = { 0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG, } logging.basicConfig( level=verbose_levels[self.verbose], format="[%(asctime)-15s] %(module)-9s %(levelname)-7s %(message)s" ) return logging.getLogger(self.__class__.__name__)
[ "def", "initLogging", "(", "self", ")", ":", "verbose_levels", "=", "{", "0", ":", "logging", ".", "WARNING", ",", "1", ":", "logging", ".", "INFO", ",", "2", ":", "logging", ".", "DEBUG", ",", "}", "logging", ".", "basicConfig", "(", "level", "=", "verbose_levels", "[", "self", ".", "verbose", "]", ",", "format", "=", "\"[%(asctime)-15s] %(module)-9s %(levelname)-7s %(message)s\"", ")", "return", "logging", ".", "getLogger", "(", "self", ".", "__class__", ".", "__name__", ")" ]
Configures the logger.
[ "Configures", "the", "logger", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launcher.py#L138-L149
train
soravux/scoop
scoop/launcher.py
ScoopApp.divideHosts
def divideHosts(self, hosts, qty): """Divide processes among hosts.""" maximumWorkers = sum(host[1] for host in hosts) # If specified amount of workers is greater than sum of each specified. if qty > maximumWorkers: index = 0 while qty > maximumWorkers: hosts[index] = (hosts[index][0], hosts[index][1] + 1) index = (index + 1) % len(hosts) maximumWorkers += 1 # If specified amount of workers if lower than sum of each specified. elif qty < maximumWorkers: while qty < maximumWorkers: maximumWorkers -= hosts[-1][1] if qty > maximumWorkers: hosts[-1] = (hosts[-1][0], qty - maximumWorkers) maximumWorkers += hosts[-1][1] else: del hosts[-1] # Checking if the broker if externally routable if self.externalHostname in utils.loopbackReferences and \ len(hosts) > 1 and \ not self.tunnel: raise Exception("\n" "Could not find route from external worker to the " "broker: Unresolvable hostname or IP address.\n " "Please specify your externally routable hostname " "or IP using the --external-hostname parameter or " "use the --tunnel flag.") return hosts
python
def divideHosts(self, hosts, qty): """Divide processes among hosts.""" maximumWorkers = sum(host[1] for host in hosts) # If specified amount of workers is greater than sum of each specified. if qty > maximumWorkers: index = 0 while qty > maximumWorkers: hosts[index] = (hosts[index][0], hosts[index][1] + 1) index = (index + 1) % len(hosts) maximumWorkers += 1 # If specified amount of workers if lower than sum of each specified. elif qty < maximumWorkers: while qty < maximumWorkers: maximumWorkers -= hosts[-1][1] if qty > maximumWorkers: hosts[-1] = (hosts[-1][0], qty - maximumWorkers) maximumWorkers += hosts[-1][1] else: del hosts[-1] # Checking if the broker if externally routable if self.externalHostname in utils.loopbackReferences and \ len(hosts) > 1 and \ not self.tunnel: raise Exception("\n" "Could not find route from external worker to the " "broker: Unresolvable hostname or IP address.\n " "Please specify your externally routable hostname " "or IP using the --external-hostname parameter or " "use the --tunnel flag.") return hosts
[ "def", "divideHosts", "(", "self", ",", "hosts", ",", "qty", ")", ":", "maximumWorkers", "=", "sum", "(", "host", "[", "1", "]", "for", "host", "in", "hosts", ")", "# If specified amount of workers is greater than sum of each specified.", "if", "qty", ">", "maximumWorkers", ":", "index", "=", "0", "while", "qty", ">", "maximumWorkers", ":", "hosts", "[", "index", "]", "=", "(", "hosts", "[", "index", "]", "[", "0", "]", ",", "hosts", "[", "index", "]", "[", "1", "]", "+", "1", ")", "index", "=", "(", "index", "+", "1", ")", "%", "len", "(", "hosts", ")", "maximumWorkers", "+=", "1", "# If specified amount of workers if lower than sum of each specified.", "elif", "qty", "<", "maximumWorkers", ":", "while", "qty", "<", "maximumWorkers", ":", "maximumWorkers", "-=", "hosts", "[", "-", "1", "]", "[", "1", "]", "if", "qty", ">", "maximumWorkers", ":", "hosts", "[", "-", "1", "]", "=", "(", "hosts", "[", "-", "1", "]", "[", "0", "]", ",", "qty", "-", "maximumWorkers", ")", "maximumWorkers", "+=", "hosts", "[", "-", "1", "]", "[", "1", "]", "else", ":", "del", "hosts", "[", "-", "1", "]", "# Checking if the broker if externally routable", "if", "self", ".", "externalHostname", "in", "utils", ".", "loopbackReferences", "and", "len", "(", "hosts", ")", ">", "1", "and", "not", "self", ".", "tunnel", ":", "raise", "Exception", "(", "\"\\n\"", "\"Could not find route from external worker to the \"", "\"broker: Unresolvable hostname or IP address.\\n \"", "\"Please specify your externally routable hostname \"", "\"or IP using the --external-hostname parameter or \"", "\"use the --tunnel flag.\"", ")", "return", "hosts" ]
Divide processes among hosts.
[ "Divide", "processes", "among", "hosts", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launcher.py#L151-L185
train
soravux/scoop
scoop/launcher.py
ScoopApp.showHostDivision
def showHostDivision(self, headless): """Show the worker distribution over the hosts.""" scoop.logger.info('Worker d--istribution: ') for worker, number in self.worker_hosts: first_worker = (worker == self.worker_hosts[0][0]) scoop.logger.info(' {0}:\t{1} {2}'.format( worker, number - 1 if first_worker or headless else str(number), "+ origin" if first_worker or headless else "", ) )
python
def showHostDivision(self, headless): """Show the worker distribution over the hosts.""" scoop.logger.info('Worker d--istribution: ') for worker, number in self.worker_hosts: first_worker = (worker == self.worker_hosts[0][0]) scoop.logger.info(' {0}:\t{1} {2}'.format( worker, number - 1 if first_worker or headless else str(number), "+ origin" if first_worker or headless else "", ) )
[ "def", "showHostDivision", "(", "self", ",", "headless", ")", ":", "scoop", ".", "logger", ".", "info", "(", "'Worker d--istribution: '", ")", "for", "worker", ",", "number", "in", "self", ".", "worker_hosts", ":", "first_worker", "=", "(", "worker", "==", "self", ".", "worker_hosts", "[", "0", "]", "[", "0", "]", ")", "scoop", ".", "logger", ".", "info", "(", "' {0}:\\t{1} {2}'", ".", "format", "(", "worker", ",", "number", "-", "1", "if", "first_worker", "or", "headless", "else", "str", "(", "number", ")", ",", "\"+ origin\"", "if", "first_worker", "or", "headless", "else", "\"\"", ",", ")", ")" ]
Show the worker distribution over the hosts.
[ "Show", "the", "worker", "distribution", "over", "the", "hosts", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launcher.py#L187-L197
train
soravux/scoop
scoop/launcher.py
ScoopApp.setWorkerInfo
def setWorkerInfo(self, hostname, workerAmount, origin): """Sets the worker information for the current host.""" scoop.logger.debug('Initialising {0}{1} worker {2} [{3}].'.format( "local" if hostname in utils.localHostnames else "remote", " origin" if origin else "", self.workersLeft, hostname, ) ) add_args, add_kwargs = self._setWorker_args(origin) self.workers[-1].setWorker(*add_args, **add_kwargs) self.workers[-1].setWorkerAmount(workerAmount)
python
def setWorkerInfo(self, hostname, workerAmount, origin): """Sets the worker information for the current host.""" scoop.logger.debug('Initialising {0}{1} worker {2} [{3}].'.format( "local" if hostname in utils.localHostnames else "remote", " origin" if origin else "", self.workersLeft, hostname, ) ) add_args, add_kwargs = self._setWorker_args(origin) self.workers[-1].setWorker(*add_args, **add_kwargs) self.workers[-1].setWorkerAmount(workerAmount)
[ "def", "setWorkerInfo", "(", "self", ",", "hostname", ",", "workerAmount", ",", "origin", ")", ":", "scoop", ".", "logger", ".", "debug", "(", "'Initialising {0}{1} worker {2} [{3}].'", ".", "format", "(", "\"local\"", "if", "hostname", "in", "utils", ".", "localHostnames", "else", "\"remote\"", ",", "\" origin\"", "if", "origin", "else", "\"\"", ",", "self", ".", "workersLeft", ",", "hostname", ",", ")", ")", "add_args", ",", "add_kwargs", "=", "self", ".", "_setWorker_args", "(", "origin", ")", "self", ".", "workers", "[", "-", "1", "]", ".", "setWorker", "(", "*", "add_args", ",", "*", "*", "add_kwargs", ")", "self", ".", "workers", "[", "-", "1", "]", ".", "setWorkerAmount", "(", "workerAmount", ")" ]
Sets the worker information for the current host.
[ "Sets", "the", "worker", "information", "for", "the", "current", "host", "." ]
d391dfa62f47e49d48328ee9cf08aa114256fd33
https://github.com/soravux/scoop/blob/d391dfa62f47e49d48328ee9cf08aa114256fd33/scoop/launcher.py#L228-L241
train