repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
Loudr/pale | pale/endpoint.py | Endpoint._fix_up_fields | def _fix_up_fields(cls):
"""Add names to all of the Endpoint's Arguments.
This method will get called on class declaration because of
Endpoint's metaclass. The functionality is based on Google's NDB
implementation."""
cls._arguments = dict()
if cls.__module__ == __name__: # skip the classes in this file
return
for name in set(dir(cls)):
attr = getattr(cls, name, None)
if isinstance(attr, BaseArgument):
if name.startswith('_'):
raise TypeError("Endpoint argument %s cannot begin with "
"an underscore, as these attributes are reserved "
"for instance variables of the endpoint object, "
"rather than for arguments to your HTTP Endpoint."
% name)
attr._fix_up(cls, name)
cls._arguments[attr.name] = attr | python | def _fix_up_fields(cls):
"""Add names to all of the Endpoint's Arguments.
This method will get called on class declaration because of
Endpoint's metaclass. The functionality is based on Google's NDB
implementation."""
cls._arguments = dict()
if cls.__module__ == __name__: # skip the classes in this file
return
for name in set(dir(cls)):
attr = getattr(cls, name, None)
if isinstance(attr, BaseArgument):
if name.startswith('_'):
raise TypeError("Endpoint argument %s cannot begin with "
"an underscore, as these attributes are reserved "
"for instance variables of the endpoint object, "
"rather than for arguments to your HTTP Endpoint."
% name)
attr._fix_up(cls, name)
cls._arguments[attr.name] = attr | [
"def",
"_fix_up_fields",
"(",
"cls",
")",
":",
"cls",
".",
"_arguments",
"=",
"dict",
"(",
")",
"if",
"cls",
".",
"__module__",
"==",
"__name__",
":",
"# skip the classes in this file",
"return",
"for",
"name",
"in",
"set",
"(",
"dir",
"(",
"cls",
")",
")",
":",
"attr",
"=",
"getattr",
"(",
"cls",
",",
"name",
",",
"None",
")",
"if",
"isinstance",
"(",
"attr",
",",
"BaseArgument",
")",
":",
"if",
"name",
".",
"startswith",
"(",
"'_'",
")",
":",
"raise",
"TypeError",
"(",
"\"Endpoint argument %s cannot begin with \"",
"\"an underscore, as these attributes are reserved \"",
"\"for instance variables of the endpoint object, \"",
"\"rather than for arguments to your HTTP Endpoint.\"",
"%",
"name",
")",
"attr",
".",
"_fix_up",
"(",
"cls",
",",
"name",
")",
"cls",
".",
"_arguments",
"[",
"attr",
".",
"name",
"]",
"=",
"attr"
] | Add names to all of the Endpoint's Arguments.
This method will get called on class declaration because of
Endpoint's metaclass. The functionality is based on Google's NDB
implementation. | [
"Add",
"names",
"to",
"all",
"of",
"the",
"Endpoint",
"s",
"Arguments",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/endpoint.py#L68-L87 | train |
Loudr/pale | pale/endpoint.py | Endpoint._execute | def _execute(self, request, **kwargs):
"""The top-level execute function for the endpoint.
This method is intended to remain as-is, and not be overridden.
It gets called by your HTTP framework's route handler, and performs
the following actions to process the request:
``authenticate_request``
Validate the Bearer token, populate the ``current_user``, and make
sure that the token covers the scope needed to call the requested
method.
*
*
``parse arguments``
The argument parser is responsible for:
- First, coercing and patching any parameters that might require
it due to versioning (i.e. the caller is using an old API
version that supports `index` as a parameter for pagination,
but the current version uses the name `offset`)
- Second, iterating through the endpoint's supported arguments
and validating that the params passed in comply with the
endpoint's requirements
- Third, populating the `context.args` array with the validated
arguments
If any of the arguments are invalid, then the Argument parser will
raise an ArgumentError that bubbles up to the `try/catch` block of
the execute method.
*
*
``before handler``
The before_handlers are specified by the Endpoint definition, and
are intended to supporty DRY-ing up your codebase. Have a set of
Endpoints that all need to grab an object from the ORM based on the
same parameter? Make them inherit from an Endpoint subclass that
performs that task in a before_handler!
*
*
``handle``
The core logic of your API endpoint, as implemented by you in your
Endpoint subclass. The API Framework expects ``handle`` to return
a dictionary specifying the response object and the JSON key that
it should hang off of, or a tuple of a dictionary and an HTTP status
code.
*
*
``after_handler``
Like the before_handlers, the ``after_handlers`` happen after the
handle method, and allow the endpoint developer to re-use code for
post-processing data from an endpoint.
*
*
``render response``
Like the argument parser, the response renderer is responsible for
a few things:
- First, it converts the ORM objects into JSON-serializable
Python dictionaries using the Resource objects defined by the
API implementation,
- Second, it does any version parameter coersion, renaming and
reformatting the edge version of the response to match the
version requested by the API caller,
- and Third, it serializes the Python dictionary into the response
format requested by the API caller (right now, we only support
JSON responses, but it'd be reasonble to support something like
HTML or XML or whatever in the future).
The rendered JSON text is then returned as the response that should
be sent by your HTTP framework's routing handler.
*
*
``_after_response_handler``
The `_after_response_handlers` are specified by the Endpoint
definition, and enable manipulation of the response object before it
is returned to the client, but after the response is rendered.
Because these are instancemethods, they may share instance data
from `self` specified in the endpoint's `_handle` method.
``_finalize_content``
The `_finalize_content` method is overridden by the Endpoint and is called
after the response is rendered into a serializable result.
This method is called with two arguments, the context and the rendered content,
and expected to return updated rendered content.
For in-place modification of dicts, this method will still be expected
to return the given argument.
``_allow_cors``
This value is set to enable CORs for a given endpoint.
When set to a string it supplies an explicit value to
'Access-Control-Allow-Origin'.
Set to True, this will allow access from *all* domains;
Access-Control-Allow-Origin = "*"
"""
try:
self._create_context(request)
self._authenticate()
context = get_current_context()
self._parse_args()
if hasattr(self, '_before_handlers') and \
isinstance(self._before_handlers, (list, tuple)):
for handler in self._before_handlers:
handler(context)
context.handler_result = self._handle(context)
if hasattr(self, '_after_handlers') and \
isinstance(self._after_handlers, (list, tuple)):
for handler in self._after_handlers:
handler(context)
self._render()
response = context.response
# After calling ._render(), the response is ready to go, so we
# shouldn't need to handle any other exceptions beyond this point.
except AuthenticationError as e:
if hasattr(e, 'message') and e.message is not None:
message = e.message
else:
message = "You don't have permission to do that."
err = APIError.Forbidden(message)
response = self._response_class(*err.response)
response.headers["Content-Type"] = 'application/json'
except ArgumentError as e:
err = APIError.UnprocessableEntity(e.message)
response = self._response_class(*err.response)
response.headers["Content-Type"] = 'application/json'
except APIError as e:
response = self._response_class(*e.response)
response.headers["Content-Type"] = 'application/json'
except PaleRaisedResponse as r:
response = self._response_class(*r.response)
response.headers["Content-Type"] = 'application/json'
except Exception as e:
logging.exception("Failed to handle Pale Endpoint %s: %r", self.__class__.__name__,
e)
err = APIError.Exception(repr(e))
response = self._response_class(*err.response)
response.headers["Content-Type"] = 'application/json'
allow_cors = getattr(self, "_allow_cors", None)
if allow_cors is True:
response.headers['Access-Control-Allow-Origin'] = '*'
elif isinstance(allow_cors, basestring):
response.headers['Access-Control-Allow-Origin'] = allow_cors
context.response = response
try:
if hasattr(self, '_after_response_handlers') and \
isinstance(self._after_response_handlers, (list, tuple)):
for handler in self._after_response_handlers:
handler(context, response)
except Exception as e:
logging.exception(
"Failed to process _after_response_handlers for Endpoint %s",
self.__class__.__name__)
raise
return response | python | def _execute(self, request, **kwargs):
"""The top-level execute function for the endpoint.
This method is intended to remain as-is, and not be overridden.
It gets called by your HTTP framework's route handler, and performs
the following actions to process the request:
``authenticate_request``
Validate the Bearer token, populate the ``current_user``, and make
sure that the token covers the scope needed to call the requested
method.
*
*
``parse arguments``
The argument parser is responsible for:
- First, coercing and patching any parameters that might require
it due to versioning (i.e. the caller is using an old API
version that supports `index` as a parameter for pagination,
but the current version uses the name `offset`)
- Second, iterating through the endpoint's supported arguments
and validating that the params passed in comply with the
endpoint's requirements
- Third, populating the `context.args` array with the validated
arguments
If any of the arguments are invalid, then the Argument parser will
raise an ArgumentError that bubbles up to the `try/catch` block of
the execute method.
*
*
``before handler``
The before_handlers are specified by the Endpoint definition, and
are intended to supporty DRY-ing up your codebase. Have a set of
Endpoints that all need to grab an object from the ORM based on the
same parameter? Make them inherit from an Endpoint subclass that
performs that task in a before_handler!
*
*
``handle``
The core logic of your API endpoint, as implemented by you in your
Endpoint subclass. The API Framework expects ``handle`` to return
a dictionary specifying the response object and the JSON key that
it should hang off of, or a tuple of a dictionary and an HTTP status
code.
*
*
``after_handler``
Like the before_handlers, the ``after_handlers`` happen after the
handle method, and allow the endpoint developer to re-use code for
post-processing data from an endpoint.
*
*
``render response``
Like the argument parser, the response renderer is responsible for
a few things:
- First, it converts the ORM objects into JSON-serializable
Python dictionaries using the Resource objects defined by the
API implementation,
- Second, it does any version parameter coersion, renaming and
reformatting the edge version of the response to match the
version requested by the API caller,
- and Third, it serializes the Python dictionary into the response
format requested by the API caller (right now, we only support
JSON responses, but it'd be reasonble to support something like
HTML or XML or whatever in the future).
The rendered JSON text is then returned as the response that should
be sent by your HTTP framework's routing handler.
*
*
``_after_response_handler``
The `_after_response_handlers` are specified by the Endpoint
definition, and enable manipulation of the response object before it
is returned to the client, but after the response is rendered.
Because these are instancemethods, they may share instance data
from `self` specified in the endpoint's `_handle` method.
``_finalize_content``
The `_finalize_content` method is overridden by the Endpoint and is called
after the response is rendered into a serializable result.
This method is called with two arguments, the context and the rendered content,
and expected to return updated rendered content.
For in-place modification of dicts, this method will still be expected
to return the given argument.
``_allow_cors``
This value is set to enable CORs for a given endpoint.
When set to a string it supplies an explicit value to
'Access-Control-Allow-Origin'.
Set to True, this will allow access from *all* domains;
Access-Control-Allow-Origin = "*"
"""
try:
self._create_context(request)
self._authenticate()
context = get_current_context()
self._parse_args()
if hasattr(self, '_before_handlers') and \
isinstance(self._before_handlers, (list, tuple)):
for handler in self._before_handlers:
handler(context)
context.handler_result = self._handle(context)
if hasattr(self, '_after_handlers') and \
isinstance(self._after_handlers, (list, tuple)):
for handler in self._after_handlers:
handler(context)
self._render()
response = context.response
# After calling ._render(), the response is ready to go, so we
# shouldn't need to handle any other exceptions beyond this point.
except AuthenticationError as e:
if hasattr(e, 'message') and e.message is not None:
message = e.message
else:
message = "You don't have permission to do that."
err = APIError.Forbidden(message)
response = self._response_class(*err.response)
response.headers["Content-Type"] = 'application/json'
except ArgumentError as e:
err = APIError.UnprocessableEntity(e.message)
response = self._response_class(*err.response)
response.headers["Content-Type"] = 'application/json'
except APIError as e:
response = self._response_class(*e.response)
response.headers["Content-Type"] = 'application/json'
except PaleRaisedResponse as r:
response = self._response_class(*r.response)
response.headers["Content-Type"] = 'application/json'
except Exception as e:
logging.exception("Failed to handle Pale Endpoint %s: %r", self.__class__.__name__,
e)
err = APIError.Exception(repr(e))
response = self._response_class(*err.response)
response.headers["Content-Type"] = 'application/json'
allow_cors = getattr(self, "_allow_cors", None)
if allow_cors is True:
response.headers['Access-Control-Allow-Origin'] = '*'
elif isinstance(allow_cors, basestring):
response.headers['Access-Control-Allow-Origin'] = allow_cors
context.response = response
try:
if hasattr(self, '_after_response_handlers') and \
isinstance(self._after_response_handlers, (list, tuple)):
for handler in self._after_response_handlers:
handler(context, response)
except Exception as e:
logging.exception(
"Failed to process _after_response_handlers for Endpoint %s",
self.__class__.__name__)
raise
return response | [
"def",
"_execute",
"(",
"self",
",",
"request",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"self",
".",
"_create_context",
"(",
"request",
")",
"self",
".",
"_authenticate",
"(",
")",
"context",
"=",
"get_current_context",
"(",
")",
"self",
".",
"_parse_args",
"(",
")",
"if",
"hasattr",
"(",
"self",
",",
"'_before_handlers'",
")",
"and",
"isinstance",
"(",
"self",
".",
"_before_handlers",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"handler",
"in",
"self",
".",
"_before_handlers",
":",
"handler",
"(",
"context",
")",
"context",
".",
"handler_result",
"=",
"self",
".",
"_handle",
"(",
"context",
")",
"if",
"hasattr",
"(",
"self",
",",
"'_after_handlers'",
")",
"and",
"isinstance",
"(",
"self",
".",
"_after_handlers",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"handler",
"in",
"self",
".",
"_after_handlers",
":",
"handler",
"(",
"context",
")",
"self",
".",
"_render",
"(",
")",
"response",
"=",
"context",
".",
"response",
"# After calling ._render(), the response is ready to go, so we",
"# shouldn't need to handle any other exceptions beyond this point.",
"except",
"AuthenticationError",
"as",
"e",
":",
"if",
"hasattr",
"(",
"e",
",",
"'message'",
")",
"and",
"e",
".",
"message",
"is",
"not",
"None",
":",
"message",
"=",
"e",
".",
"message",
"else",
":",
"message",
"=",
"\"You don't have permission to do that.\"",
"err",
"=",
"APIError",
".",
"Forbidden",
"(",
"message",
")",
"response",
"=",
"self",
".",
"_response_class",
"(",
"*",
"err",
".",
"response",
")",
"response",
".",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"'application/json'",
"except",
"ArgumentError",
"as",
"e",
":",
"err",
"=",
"APIError",
".",
"UnprocessableEntity",
"(",
"e",
".",
"message",
")",
"response",
"=",
"self",
".",
"_response_class",
"(",
"*",
"err",
".",
"response",
")",
"response",
".",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"'application/json'",
"except",
"APIError",
"as",
"e",
":",
"response",
"=",
"self",
".",
"_response_class",
"(",
"*",
"e",
".",
"response",
")",
"response",
".",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"'application/json'",
"except",
"PaleRaisedResponse",
"as",
"r",
":",
"response",
"=",
"self",
".",
"_response_class",
"(",
"*",
"r",
".",
"response",
")",
"response",
".",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"'application/json'",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"exception",
"(",
"\"Failed to handle Pale Endpoint %s: %r\"",
",",
"self",
".",
"__class__",
".",
"__name__",
",",
"e",
")",
"err",
"=",
"APIError",
".",
"Exception",
"(",
"repr",
"(",
"e",
")",
")",
"response",
"=",
"self",
".",
"_response_class",
"(",
"*",
"err",
".",
"response",
")",
"response",
".",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"'application/json'",
"allow_cors",
"=",
"getattr",
"(",
"self",
",",
"\"_allow_cors\"",
",",
"None",
")",
"if",
"allow_cors",
"is",
"True",
":",
"response",
".",
"headers",
"[",
"'Access-Control-Allow-Origin'",
"]",
"=",
"'*'",
"elif",
"isinstance",
"(",
"allow_cors",
",",
"basestring",
")",
":",
"response",
".",
"headers",
"[",
"'Access-Control-Allow-Origin'",
"]",
"=",
"allow_cors",
"context",
".",
"response",
"=",
"response",
"try",
":",
"if",
"hasattr",
"(",
"self",
",",
"'_after_response_handlers'",
")",
"and",
"isinstance",
"(",
"self",
".",
"_after_response_handlers",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"handler",
"in",
"self",
".",
"_after_response_handlers",
":",
"handler",
"(",
"context",
",",
"response",
")",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"exception",
"(",
"\"Failed to process _after_response_handlers for Endpoint %s\"",
",",
"self",
".",
"__class__",
".",
"__name__",
")",
"raise",
"return",
"response"
] | The top-level execute function for the endpoint.
This method is intended to remain as-is, and not be overridden.
It gets called by your HTTP framework's route handler, and performs
the following actions to process the request:
``authenticate_request``
Validate the Bearer token, populate the ``current_user``, and make
sure that the token covers the scope needed to call the requested
method.
*
*
``parse arguments``
The argument parser is responsible for:
- First, coercing and patching any parameters that might require
it due to versioning (i.e. the caller is using an old API
version that supports `index` as a parameter for pagination,
but the current version uses the name `offset`)
- Second, iterating through the endpoint's supported arguments
and validating that the params passed in comply with the
endpoint's requirements
- Third, populating the `context.args` array with the validated
arguments
If any of the arguments are invalid, then the Argument parser will
raise an ArgumentError that bubbles up to the `try/catch` block of
the execute method.
*
*
``before handler``
The before_handlers are specified by the Endpoint definition, and
are intended to supporty DRY-ing up your codebase. Have a set of
Endpoints that all need to grab an object from the ORM based on the
same parameter? Make them inherit from an Endpoint subclass that
performs that task in a before_handler!
*
*
``handle``
The core logic of your API endpoint, as implemented by you in your
Endpoint subclass. The API Framework expects ``handle`` to return
a dictionary specifying the response object and the JSON key that
it should hang off of, or a tuple of a dictionary and an HTTP status
code.
*
*
``after_handler``
Like the before_handlers, the ``after_handlers`` happen after the
handle method, and allow the endpoint developer to re-use code for
post-processing data from an endpoint.
*
*
``render response``
Like the argument parser, the response renderer is responsible for
a few things:
- First, it converts the ORM objects into JSON-serializable
Python dictionaries using the Resource objects defined by the
API implementation,
- Second, it does any version parameter coersion, renaming and
reformatting the edge version of the response to match the
version requested by the API caller,
- and Third, it serializes the Python dictionary into the response
format requested by the API caller (right now, we only support
JSON responses, but it'd be reasonble to support something like
HTML or XML or whatever in the future).
The rendered JSON text is then returned as the response that should
be sent by your HTTP framework's routing handler.
*
*
``_after_response_handler``
The `_after_response_handlers` are specified by the Endpoint
definition, and enable manipulation of the response object before it
is returned to the client, but after the response is rendered.
Because these are instancemethods, they may share instance data
from `self` specified in the endpoint's `_handle` method.
``_finalize_content``
The `_finalize_content` method is overridden by the Endpoint and is called
after the response is rendered into a serializable result.
This method is called with two arguments, the context and the rendered content,
and expected to return updated rendered content.
For in-place modification of dicts, this method will still be expected
to return the given argument.
``_allow_cors``
This value is set to enable CORs for a given endpoint.
When set to a string it supplies an explicit value to
'Access-Control-Allow-Origin'.
Set to True, this will allow access from *all* domains;
Access-Control-Allow-Origin = "*" | [
"The",
"top",
"-",
"level",
"execute",
"function",
"for",
"the",
"endpoint",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/endpoint.py#L122-L286 | train |
AASHE/python-membersuite-api-client | membersuite_api_client/client.py | ConciergeClient.construct_concierge_header | def construct_concierge_header(self, url):
"""
Constructs the Concierge Request Header lxml object to be used as the
'_soapheaders' argument for WSDL methods.
"""
concierge_request_header = (
etree.Element(
etree.QName(XHTML_NAMESPACE, "ConciergeRequestHeader"),
nsmap={'sch': XHTML_NAMESPACE}))
if self.session_id:
session = (
etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE, "SessionId")))
session.text = self.session_id
access_key = (
etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE, "AccessKeyId")))
access_key.text = self.access_key
association_id = (etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE,
"AssociationId")))
association_id.text = self.association_id
signature = (
etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE, "Signature")))
signature.text = self.get_hashed_signature(url=url)
return concierge_request_header | python | def construct_concierge_header(self, url):
"""
Constructs the Concierge Request Header lxml object to be used as the
'_soapheaders' argument for WSDL methods.
"""
concierge_request_header = (
etree.Element(
etree.QName(XHTML_NAMESPACE, "ConciergeRequestHeader"),
nsmap={'sch': XHTML_NAMESPACE}))
if self.session_id:
session = (
etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE, "SessionId")))
session.text = self.session_id
access_key = (
etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE, "AccessKeyId")))
access_key.text = self.access_key
association_id = (etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE,
"AssociationId")))
association_id.text = self.association_id
signature = (
etree.SubElement(concierge_request_header,
etree.QName(XHTML_NAMESPACE, "Signature")))
signature.text = self.get_hashed_signature(url=url)
return concierge_request_header | [
"def",
"construct_concierge_header",
"(",
"self",
",",
"url",
")",
":",
"concierge_request_header",
"=",
"(",
"etree",
".",
"Element",
"(",
"etree",
".",
"QName",
"(",
"XHTML_NAMESPACE",
",",
"\"ConciergeRequestHeader\"",
")",
",",
"nsmap",
"=",
"{",
"'sch'",
":",
"XHTML_NAMESPACE",
"}",
")",
")",
"if",
"self",
".",
"session_id",
":",
"session",
"=",
"(",
"etree",
".",
"SubElement",
"(",
"concierge_request_header",
",",
"etree",
".",
"QName",
"(",
"XHTML_NAMESPACE",
",",
"\"SessionId\"",
")",
")",
")",
"session",
".",
"text",
"=",
"self",
".",
"session_id",
"access_key",
"=",
"(",
"etree",
".",
"SubElement",
"(",
"concierge_request_header",
",",
"etree",
".",
"QName",
"(",
"XHTML_NAMESPACE",
",",
"\"AccessKeyId\"",
")",
")",
")",
"access_key",
".",
"text",
"=",
"self",
".",
"access_key",
"association_id",
"=",
"(",
"etree",
".",
"SubElement",
"(",
"concierge_request_header",
",",
"etree",
".",
"QName",
"(",
"XHTML_NAMESPACE",
",",
"\"AssociationId\"",
")",
")",
")",
"association_id",
".",
"text",
"=",
"self",
".",
"association_id",
"signature",
"=",
"(",
"etree",
".",
"SubElement",
"(",
"concierge_request_header",
",",
"etree",
".",
"QName",
"(",
"XHTML_NAMESPACE",
",",
"\"Signature\"",
")",
")",
")",
"signature",
".",
"text",
"=",
"self",
".",
"get_hashed_signature",
"(",
"url",
"=",
"url",
")",
"return",
"concierge_request_header"
] | Constructs the Concierge Request Header lxml object to be used as the
'_soapheaders' argument for WSDL methods. | [
"Constructs",
"the",
"Concierge",
"Request",
"Header",
"lxml",
"object",
"to",
"be",
"used",
"as",
"the",
"_soapheaders",
"argument",
"for",
"WSDL",
"methods",
"."
] | 221f5ed8bc7d4424237a4669c5af9edc11819ee9 | https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/client.py#L65-L96 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | options_string_builder | def options_string_builder(option_mapping, args):
"""Return arguments for CLI invocation of kal."""
options_string = ""
for option, flag in option_mapping.items():
if option in args:
options_string += str(" %s %s" % (flag, str(args[option])))
return options_string | python | def options_string_builder(option_mapping, args):
"""Return arguments for CLI invocation of kal."""
options_string = ""
for option, flag in option_mapping.items():
if option in args:
options_string += str(" %s %s" % (flag, str(args[option])))
return options_string | [
"def",
"options_string_builder",
"(",
"option_mapping",
",",
"args",
")",
":",
"options_string",
"=",
"\"\"",
"for",
"option",
",",
"flag",
"in",
"option_mapping",
".",
"items",
"(",
")",
":",
"if",
"option",
"in",
"args",
":",
"options_string",
"+=",
"str",
"(",
"\" %s %s\"",
"%",
"(",
"flag",
",",
"str",
"(",
"args",
"[",
"option",
"]",
")",
")",
")",
"return",
"options_string"
] | Return arguments for CLI invocation of kal. | [
"Return",
"arguments",
"for",
"CLI",
"invocation",
"of",
"kal",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L6-L12 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | build_kal_scan_band_string | def build_kal_scan_band_string(kal_bin, band, args):
"""Return string for CLI invocation of kal, for band scan."""
option_mapping = {"gain": "-g",
"device": "-d",
"error": "-e"}
if not sanity.scan_band_is_valid(band):
err_txt = "Unsupported band designation: %" % band
raise ValueError(err_txt)
base_string = "%s -v -s %s" % (kal_bin, band)
base_string += options_string_builder(option_mapping, args)
return(base_string) | python | def build_kal_scan_band_string(kal_bin, band, args):
"""Return string for CLI invocation of kal, for band scan."""
option_mapping = {"gain": "-g",
"device": "-d",
"error": "-e"}
if not sanity.scan_band_is_valid(band):
err_txt = "Unsupported band designation: %" % band
raise ValueError(err_txt)
base_string = "%s -v -s %s" % (kal_bin, band)
base_string += options_string_builder(option_mapping, args)
return(base_string) | [
"def",
"build_kal_scan_band_string",
"(",
"kal_bin",
",",
"band",
",",
"args",
")",
":",
"option_mapping",
"=",
"{",
"\"gain\"",
":",
"\"-g\"",
",",
"\"device\"",
":",
"\"-d\"",
",",
"\"error\"",
":",
"\"-e\"",
"}",
"if",
"not",
"sanity",
".",
"scan_band_is_valid",
"(",
"band",
")",
":",
"err_txt",
"=",
"\"Unsupported band designation: %\"",
"%",
"band",
"raise",
"ValueError",
"(",
"err_txt",
")",
"base_string",
"=",
"\"%s -v -s %s\"",
"%",
"(",
"kal_bin",
",",
"band",
")",
"base_string",
"+=",
"options_string_builder",
"(",
"option_mapping",
",",
"args",
")",
"return",
"(",
"base_string",
")"
] | Return string for CLI invocation of kal, for band scan. | [
"Return",
"string",
"for",
"CLI",
"invocation",
"of",
"kal",
"for",
"band",
"scan",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L15-L25 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | build_kal_scan_channel_string | def build_kal_scan_channel_string(kal_bin, channel, args):
"""Return string for CLI invocation of kal, for channel scan."""
option_mapping = {"gain": "-g",
"device": "-d",
"error": "-e"}
base_string = "%s -v -c %s" % (kal_bin, channel)
base_string += options_string_builder(option_mapping, args)
return(base_string) | python | def build_kal_scan_channel_string(kal_bin, channel, args):
"""Return string for CLI invocation of kal, for channel scan."""
option_mapping = {"gain": "-g",
"device": "-d",
"error": "-e"}
base_string = "%s -v -c %s" % (kal_bin, channel)
base_string += options_string_builder(option_mapping, args)
return(base_string) | [
"def",
"build_kal_scan_channel_string",
"(",
"kal_bin",
",",
"channel",
",",
"args",
")",
":",
"option_mapping",
"=",
"{",
"\"gain\"",
":",
"\"-g\"",
",",
"\"device\"",
":",
"\"-d\"",
",",
"\"error\"",
":",
"\"-e\"",
"}",
"base_string",
"=",
"\"%s -v -c %s\"",
"%",
"(",
"kal_bin",
",",
"channel",
")",
"base_string",
"+=",
"options_string_builder",
"(",
"option_mapping",
",",
"args",
")",
"return",
"(",
"base_string",
")"
] | Return string for CLI invocation of kal, for channel scan. | [
"Return",
"string",
"for",
"CLI",
"invocation",
"of",
"kal",
"for",
"channel",
"scan",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L28-L35 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | determine_final_freq | def determine_final_freq(base, direction, modifier):
"""Return integer for frequency."""
result = 0
if direction == "+":
result = base + modifier
elif direction == "-":
result = base - modifier
return(result) | python | def determine_final_freq(base, direction, modifier):
"""Return integer for frequency."""
result = 0
if direction == "+":
result = base + modifier
elif direction == "-":
result = base - modifier
return(result) | [
"def",
"determine_final_freq",
"(",
"base",
",",
"direction",
",",
"modifier",
")",
":",
"result",
"=",
"0",
"if",
"direction",
"==",
"\"+\"",
":",
"result",
"=",
"base",
"+",
"modifier",
"elif",
"direction",
"==",
"\"-\"",
":",
"result",
"=",
"base",
"-",
"modifier",
"return",
"(",
"result",
")"
] | Return integer for frequency. | [
"Return",
"integer",
"for",
"frequency",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L55-L62 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | to_eng | def to_eng(num_in):
"""Return number in engineering notation."""
x = decimal.Decimal(str(num_in))
eng_not = x.normalize().to_eng_string()
return(eng_not) | python | def to_eng(num_in):
"""Return number in engineering notation."""
x = decimal.Decimal(str(num_in))
eng_not = x.normalize().to_eng_string()
return(eng_not) | [
"def",
"to_eng",
"(",
"num_in",
")",
":",
"x",
"=",
"decimal",
".",
"Decimal",
"(",
"str",
"(",
"num_in",
")",
")",
"eng_not",
"=",
"x",
".",
"normalize",
"(",
")",
".",
"to_eng_string",
"(",
")",
"return",
"(",
"eng_not",
")"
] | Return number in engineering notation. | [
"Return",
"number",
"in",
"engineering",
"notation",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L65-L69 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | determine_device | def determine_device(kal_out):
"""Extract and return device from scan results."""
device = ""
while device == "":
for line in kal_out.splitlines():
if "Using device " in line:
device = str(line.split(' ', 2)[-1])
if device == "":
device = None
return device | python | def determine_device(kal_out):
"""Extract and return device from scan results."""
device = ""
while device == "":
for line in kal_out.splitlines():
if "Using device " in line:
device = str(line.split(' ', 2)[-1])
if device == "":
device = None
return device | [
"def",
"determine_device",
"(",
"kal_out",
")",
":",
"device",
"=",
"\"\"",
"while",
"device",
"==",
"\"\"",
":",
"for",
"line",
"in",
"kal_out",
".",
"splitlines",
"(",
")",
":",
"if",
"\"Using device \"",
"in",
"line",
":",
"device",
"=",
"str",
"(",
"line",
".",
"split",
"(",
"' '",
",",
"2",
")",
"[",
"-",
"1",
"]",
")",
"if",
"device",
"==",
"\"\"",
":",
"device",
"=",
"None",
"return",
"device"
] | Extract and return device from scan results. | [
"Extract",
"and",
"return",
"device",
"from",
"scan",
"results",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L81-L90 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | extract_value_from_output | def extract_value_from_output(canary, split_offset, kal_out):
"""Return value parsed from output.
Args:
canary(str): This string must exist in the target line.
split_offset(int): Split offset for target value in string.
kal_out(int): Output from kal.
"""
retval = ""
while retval == "":
for line in kal_out.splitlines():
if canary in line:
retval = str(line.split()[split_offset])
if retval == "":
retval = None
return retval | python | def extract_value_from_output(canary, split_offset, kal_out):
"""Return value parsed from output.
Args:
canary(str): This string must exist in the target line.
split_offset(int): Split offset for target value in string.
kal_out(int): Output from kal.
"""
retval = ""
while retval == "":
for line in kal_out.splitlines():
if canary in line:
retval = str(line.split()[split_offset])
if retval == "":
retval = None
return retval | [
"def",
"extract_value_from_output",
"(",
"canary",
",",
"split_offset",
",",
"kal_out",
")",
":",
"retval",
"=",
"\"\"",
"while",
"retval",
"==",
"\"\"",
":",
"for",
"line",
"in",
"kal_out",
".",
"splitlines",
"(",
")",
":",
"if",
"canary",
"in",
"line",
":",
"retval",
"=",
"str",
"(",
"line",
".",
"split",
"(",
")",
"[",
"split_offset",
"]",
")",
"if",
"retval",
"==",
"\"\"",
":",
"retval",
"=",
"None",
"return",
"retval"
] | Return value parsed from output.
Args:
canary(str): This string must exist in the target line.
split_offset(int): Split offset for target value in string.
kal_out(int): Output from kal. | [
"Return",
"value",
"parsed",
"from",
"output",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L103-L118 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | determine_chan_detect_threshold | def determine_chan_detect_threshold(kal_out):
"""Return channel detect threshold from kal output."""
channel_detect_threshold = ""
while channel_detect_threshold == "":
for line in kal_out.splitlines():
if "channel detect threshold: " in line:
channel_detect_threshold = str(line.split()[-1])
if channel_detect_threshold == "":
print("Unable to parse sample rate")
channel_detect_threshold = None
return channel_detect_threshold | python | def determine_chan_detect_threshold(kal_out):
"""Return channel detect threshold from kal output."""
channel_detect_threshold = ""
while channel_detect_threshold == "":
for line in kal_out.splitlines():
if "channel detect threshold: " in line:
channel_detect_threshold = str(line.split()[-1])
if channel_detect_threshold == "":
print("Unable to parse sample rate")
channel_detect_threshold = None
return channel_detect_threshold | [
"def",
"determine_chan_detect_threshold",
"(",
"kal_out",
")",
":",
"channel_detect_threshold",
"=",
"\"\"",
"while",
"channel_detect_threshold",
"==",
"\"\"",
":",
"for",
"line",
"in",
"kal_out",
".",
"splitlines",
"(",
")",
":",
"if",
"\"channel detect threshold: \"",
"in",
"line",
":",
"channel_detect_threshold",
"=",
"str",
"(",
"line",
".",
"split",
"(",
")",
"[",
"-",
"1",
"]",
")",
"if",
"channel_detect_threshold",
"==",
"\"\"",
":",
"print",
"(",
"\"Unable to parse sample rate\"",
")",
"channel_detect_threshold",
"=",
"None",
"return",
"channel_detect_threshold"
] | Return channel detect threshold from kal output. | [
"Return",
"channel",
"detect",
"threshold",
"from",
"kal",
"output",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L127-L137 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | determine_band_channel | def determine_band_channel(kal_out):
"""Return band, channel, target frequency from kal output."""
band = ""
channel = ""
tgt_freq = ""
while band == "":
for line in kal_out.splitlines():
if "Using " in line and " channel " in line:
band = str(line.split()[1])
channel = str(line.split()[3])
tgt_freq = str(line.split()[4]).replace(
"(", "").replace(")", "")
if band == "":
band = None
return(band, channel, tgt_freq) | python | def determine_band_channel(kal_out):
"""Return band, channel, target frequency from kal output."""
band = ""
channel = ""
tgt_freq = ""
while band == "":
for line in kal_out.splitlines():
if "Using " in line and " channel " in line:
band = str(line.split()[1])
channel = str(line.split()[3])
tgt_freq = str(line.split()[4]).replace(
"(", "").replace(")", "")
if band == "":
band = None
return(band, channel, tgt_freq) | [
"def",
"determine_band_channel",
"(",
"kal_out",
")",
":",
"band",
"=",
"\"\"",
"channel",
"=",
"\"\"",
"tgt_freq",
"=",
"\"\"",
"while",
"band",
"==",
"\"\"",
":",
"for",
"line",
"in",
"kal_out",
".",
"splitlines",
"(",
")",
":",
"if",
"\"Using \"",
"in",
"line",
"and",
"\" channel \"",
"in",
"line",
":",
"band",
"=",
"str",
"(",
"line",
".",
"split",
"(",
")",
"[",
"1",
"]",
")",
"channel",
"=",
"str",
"(",
"line",
".",
"split",
"(",
")",
"[",
"3",
"]",
")",
"tgt_freq",
"=",
"str",
"(",
"line",
".",
"split",
"(",
")",
"[",
"4",
"]",
")",
".",
"replace",
"(",
"\"(\"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\")\"",
",",
"\"\"",
")",
"if",
"band",
"==",
"\"\"",
":",
"band",
"=",
"None",
"return",
"(",
"band",
",",
"channel",
",",
"tgt_freq",
")"
] | Return band, channel, target frequency from kal output. | [
"Return",
"band",
"channel",
"target",
"frequency",
"from",
"kal",
"output",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L140-L154 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | parse_kal_scan | def parse_kal_scan(kal_out):
"""Parse kal band scan output."""
kal_data = []
scan_band = determine_scan_band(kal_out)
scan_gain = determine_scan_gain(kal_out)
scan_device = determine_device(kal_out)
sample_rate = determine_sample_rate(kal_out)
chan_detect_threshold = determine_chan_detect_threshold(kal_out)
for line in kal_out.splitlines():
if "chan:" in line:
p_line = line.split(' ')
chan = str(p_line[1])
modifier = str(p_line[3])
power = str(p_line[5])
mod_raw = str(p_line[4]).replace(')\tpower:', '')
base_raw = str((p_line[2]).replace('(', ''))
mod_freq = herz_me(mod_raw)
base_freq = herz_me(base_raw)
final_freq = to_eng(determine_final_freq(base_freq, modifier,
mod_freq))
kal_run = {"channel": chan,
"base_freq": base_freq,
"mod_freq": mod_freq,
"modifier": modifier,
"final_freq": final_freq,
"power": power,
"band": scan_band,
"gain": scan_gain,
"device": scan_device,
"sample_rate": sample_rate,
"channel_detect_threshold": chan_detect_threshold}
kal_data.append(kal_run.copy())
return kal_data | python | def parse_kal_scan(kal_out):
"""Parse kal band scan output."""
kal_data = []
scan_band = determine_scan_band(kal_out)
scan_gain = determine_scan_gain(kal_out)
scan_device = determine_device(kal_out)
sample_rate = determine_sample_rate(kal_out)
chan_detect_threshold = determine_chan_detect_threshold(kal_out)
for line in kal_out.splitlines():
if "chan:" in line:
p_line = line.split(' ')
chan = str(p_line[1])
modifier = str(p_line[3])
power = str(p_line[5])
mod_raw = str(p_line[4]).replace(')\tpower:', '')
base_raw = str((p_line[2]).replace('(', ''))
mod_freq = herz_me(mod_raw)
base_freq = herz_me(base_raw)
final_freq = to_eng(determine_final_freq(base_freq, modifier,
mod_freq))
kal_run = {"channel": chan,
"base_freq": base_freq,
"mod_freq": mod_freq,
"modifier": modifier,
"final_freq": final_freq,
"power": power,
"band": scan_band,
"gain": scan_gain,
"device": scan_device,
"sample_rate": sample_rate,
"channel_detect_threshold": chan_detect_threshold}
kal_data.append(kal_run.copy())
return kal_data | [
"def",
"parse_kal_scan",
"(",
"kal_out",
")",
":",
"kal_data",
"=",
"[",
"]",
"scan_band",
"=",
"determine_scan_band",
"(",
"kal_out",
")",
"scan_gain",
"=",
"determine_scan_gain",
"(",
"kal_out",
")",
"scan_device",
"=",
"determine_device",
"(",
"kal_out",
")",
"sample_rate",
"=",
"determine_sample_rate",
"(",
"kal_out",
")",
"chan_detect_threshold",
"=",
"determine_chan_detect_threshold",
"(",
"kal_out",
")",
"for",
"line",
"in",
"kal_out",
".",
"splitlines",
"(",
")",
":",
"if",
"\"chan:\"",
"in",
"line",
":",
"p_line",
"=",
"line",
".",
"split",
"(",
"' '",
")",
"chan",
"=",
"str",
"(",
"p_line",
"[",
"1",
"]",
")",
"modifier",
"=",
"str",
"(",
"p_line",
"[",
"3",
"]",
")",
"power",
"=",
"str",
"(",
"p_line",
"[",
"5",
"]",
")",
"mod_raw",
"=",
"str",
"(",
"p_line",
"[",
"4",
"]",
")",
".",
"replace",
"(",
"')\\tpower:'",
",",
"''",
")",
"base_raw",
"=",
"str",
"(",
"(",
"p_line",
"[",
"2",
"]",
")",
".",
"replace",
"(",
"'('",
",",
"''",
")",
")",
"mod_freq",
"=",
"herz_me",
"(",
"mod_raw",
")",
"base_freq",
"=",
"herz_me",
"(",
"base_raw",
")",
"final_freq",
"=",
"to_eng",
"(",
"determine_final_freq",
"(",
"base_freq",
",",
"modifier",
",",
"mod_freq",
")",
")",
"kal_run",
"=",
"{",
"\"channel\"",
":",
"chan",
",",
"\"base_freq\"",
":",
"base_freq",
",",
"\"mod_freq\"",
":",
"mod_freq",
",",
"\"modifier\"",
":",
"modifier",
",",
"\"final_freq\"",
":",
"final_freq",
",",
"\"power\"",
":",
"power",
",",
"\"band\"",
":",
"scan_band",
",",
"\"gain\"",
":",
"scan_gain",
",",
"\"device\"",
":",
"scan_device",
",",
"\"sample_rate\"",
":",
"sample_rate",
",",
"\"channel_detect_threshold\"",
":",
"chan_detect_threshold",
"}",
"kal_data",
".",
"append",
"(",
"kal_run",
".",
"copy",
"(",
")",
")",
"return",
"kal_data"
] | Parse kal band scan output. | [
"Parse",
"kal",
"band",
"scan",
"output",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L157-L189 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | parse_kal_channel | def parse_kal_channel(kal_out):
"""Parse kal channel scan output."""
scan_band, scan_channel, tgt_freq = determine_band_channel(kal_out)
kal_data = {"device": determine_device(kal_out),
"sample_rate": determine_sample_rate(kal_out),
"gain": determine_scan_gain(kal_out),
"band": scan_band,
"channel": scan_channel,
"frequency": tgt_freq,
"avg_absolute_error": determine_avg_absolute_error(kal_out),
"measurements" : get_measurements_from_kal_scan(kal_out),
"raw_scan_result": kal_out}
return kal_data | python | def parse_kal_channel(kal_out):
"""Parse kal channel scan output."""
scan_band, scan_channel, tgt_freq = determine_band_channel(kal_out)
kal_data = {"device": determine_device(kal_out),
"sample_rate": determine_sample_rate(kal_out),
"gain": determine_scan_gain(kal_out),
"band": scan_band,
"channel": scan_channel,
"frequency": tgt_freq,
"avg_absolute_error": determine_avg_absolute_error(kal_out),
"measurements" : get_measurements_from_kal_scan(kal_out),
"raw_scan_result": kal_out}
return kal_data | [
"def",
"parse_kal_channel",
"(",
"kal_out",
")",
":",
"scan_band",
",",
"scan_channel",
",",
"tgt_freq",
"=",
"determine_band_channel",
"(",
"kal_out",
")",
"kal_data",
"=",
"{",
"\"device\"",
":",
"determine_device",
"(",
"kal_out",
")",
",",
"\"sample_rate\"",
":",
"determine_sample_rate",
"(",
"kal_out",
")",
",",
"\"gain\"",
":",
"determine_scan_gain",
"(",
"kal_out",
")",
",",
"\"band\"",
":",
"scan_band",
",",
"\"channel\"",
":",
"scan_channel",
",",
"\"frequency\"",
":",
"tgt_freq",
",",
"\"avg_absolute_error\"",
":",
"determine_avg_absolute_error",
"(",
"kal_out",
")",
",",
"\"measurements\"",
":",
"get_measurements_from_kal_scan",
"(",
"kal_out",
")",
",",
"\"raw_scan_result\"",
":",
"kal_out",
"}",
"return",
"kal_data"
] | Parse kal channel scan output. | [
"Parse",
"kal",
"channel",
"scan",
"output",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L192-L204 | train |
ashmastaflash/kal-wrapper | kalibrate/fn.py | get_measurements_from_kal_scan | def get_measurements_from_kal_scan(kal_out):
"""Return a list of all measurements from kalibrate channel scan."""
result = []
for line in kal_out.splitlines():
if "offset " in line:
p_line = line.split(' ')
result.append(p_line[-1])
return result | python | def get_measurements_from_kal_scan(kal_out):
"""Return a list of all measurements from kalibrate channel scan."""
result = []
for line in kal_out.splitlines():
if "offset " in line:
p_line = line.split(' ')
result.append(p_line[-1])
return result | [
"def",
"get_measurements_from_kal_scan",
"(",
"kal_out",
")",
":",
"result",
"=",
"[",
"]",
"for",
"line",
"in",
"kal_out",
".",
"splitlines",
"(",
")",
":",
"if",
"\"offset \"",
"in",
"line",
":",
"p_line",
"=",
"line",
".",
"split",
"(",
"' '",
")",
"result",
".",
"append",
"(",
"p_line",
"[",
"-",
"1",
"]",
")",
"return",
"result"
] | Return a list of all measurements from kalibrate channel scan. | [
"Return",
"a",
"list",
"of",
"all",
"measurements",
"from",
"kalibrate",
"channel",
"scan",
"."
] | 80ee03ab7bd3172ac26b769d6b442960f3424b0e | https://github.com/ashmastaflash/kal-wrapper/blob/80ee03ab7bd3172ac26b769d6b442960f3424b0e/kalibrate/fn.py#L206-L213 | train |
Loudr/pale | pale/fields/base.py | BaseField.render | def render(self, obj, name, context):
"""The default field renderer.
This basic renderer assumes that the object has an attribute with
the same name as the field, unless a different field is specified
as a `property_name`.
The renderer is also passed the context so that it can be
propagated to the `_render_serializable` method of nested
resources (or, for example, if you decide to implement attribute
hiding at the field level instead of at the object level).
Callable attributes of `obj` will be called to fetch value.
This is useful for fields computed from lambda functions
or instance methods.
"""
if self.value_lambda is not None:
val = self.value_lambda(obj)
else:
attr_name = name
if self.property_name is not None:
attr_name = self.property_name
if isinstance(obj, dict):
val = obj.get(attr_name, None)
else:
val = getattr(obj, attr_name, None)
if callable(val):
try:
val = val()
except:
logging.exception("Attempted to call `%s` on obj of type %s.",
attr_name, type(obj))
raise
return val | python | def render(self, obj, name, context):
"""The default field renderer.
This basic renderer assumes that the object has an attribute with
the same name as the field, unless a different field is specified
as a `property_name`.
The renderer is also passed the context so that it can be
propagated to the `_render_serializable` method of nested
resources (or, for example, if you decide to implement attribute
hiding at the field level instead of at the object level).
Callable attributes of `obj` will be called to fetch value.
This is useful for fields computed from lambda functions
or instance methods.
"""
if self.value_lambda is not None:
val = self.value_lambda(obj)
else:
attr_name = name
if self.property_name is not None:
attr_name = self.property_name
if isinstance(obj, dict):
val = obj.get(attr_name, None)
else:
val = getattr(obj, attr_name, None)
if callable(val):
try:
val = val()
except:
logging.exception("Attempted to call `%s` on obj of type %s.",
attr_name, type(obj))
raise
return val | [
"def",
"render",
"(",
"self",
",",
"obj",
",",
"name",
",",
"context",
")",
":",
"if",
"self",
".",
"value_lambda",
"is",
"not",
"None",
":",
"val",
"=",
"self",
".",
"value_lambda",
"(",
"obj",
")",
"else",
":",
"attr_name",
"=",
"name",
"if",
"self",
".",
"property_name",
"is",
"not",
"None",
":",
"attr_name",
"=",
"self",
".",
"property_name",
"if",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"val",
"=",
"obj",
".",
"get",
"(",
"attr_name",
",",
"None",
")",
"else",
":",
"val",
"=",
"getattr",
"(",
"obj",
",",
"attr_name",
",",
"None",
")",
"if",
"callable",
"(",
"val",
")",
":",
"try",
":",
"val",
"=",
"val",
"(",
")",
"except",
":",
"logging",
".",
"exception",
"(",
"\"Attempted to call `%s` on obj of type %s.\"",
",",
"attr_name",
",",
"type",
"(",
"obj",
")",
")",
"raise",
"return",
"val"
] | The default field renderer.
This basic renderer assumes that the object has an attribute with
the same name as the field, unless a different field is specified
as a `property_name`.
The renderer is also passed the context so that it can be
propagated to the `_render_serializable` method of nested
resources (or, for example, if you decide to implement attribute
hiding at the field level instead of at the object level).
Callable attributes of `obj` will be called to fetch value.
This is useful for fields computed from lambda functions
or instance methods. | [
"The",
"default",
"field",
"renderer",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/fields/base.py#L58-L93 | train |
Loudr/pale | pale/fields/base.py | BaseField.doc_dict | def doc_dict(self):
"""Generate the documentation for this field."""
doc = {
'type': self.value_type,
'description': self.description,
'extended_description': self.details
}
return doc | python | def doc_dict(self):
"""Generate the documentation for this field."""
doc = {
'type': self.value_type,
'description': self.description,
'extended_description': self.details
}
return doc | [
"def",
"doc_dict",
"(",
"self",
")",
":",
"doc",
"=",
"{",
"'type'",
":",
"self",
".",
"value_type",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'extended_description'",
":",
"self",
".",
"details",
"}",
"return",
"doc"
] | Generate the documentation for this field. | [
"Generate",
"the",
"documentation",
"for",
"this",
"field",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/fields/base.py#L96-L103 | train |
a1ezzz/wasp-general | wasp_general/capability.py | WCapabilitiesHolder.capability | def capability(self, cap_name):
""" Return capability by its name
:param cap_name: name of a capability to return
:return: bounded method or None (if a capability is not found)
"""
if cap_name in self.__class_capabilities__:
function_name = self.__class_capabilities__[cap_name]
return getattr(self, function_name) | python | def capability(self, cap_name):
""" Return capability by its name
:param cap_name: name of a capability to return
:return: bounded method or None (if a capability is not found)
"""
if cap_name in self.__class_capabilities__:
function_name = self.__class_capabilities__[cap_name]
return getattr(self, function_name) | [
"def",
"capability",
"(",
"self",
",",
"cap_name",
")",
":",
"if",
"cap_name",
"in",
"self",
".",
"__class_capabilities__",
":",
"function_name",
"=",
"self",
".",
"__class_capabilities__",
"[",
"cap_name",
"]",
"return",
"getattr",
"(",
"self",
",",
"function_name",
")"
] | Return capability by its name
:param cap_name: name of a capability to return
:return: bounded method or None (if a capability is not found) | [
"Return",
"capability",
"by",
"its",
"name"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/capability.py#L100-L108 | train |
a1ezzz/wasp-general | wasp_general/capability.py | WCapabilitiesHolder.has_capabilities | def has_capabilities(self, *cap_names):
""" Check if class has all of the specified capabilities
:param cap_names: capabilities names to check
:return: bool
"""
for name in cap_names:
if name not in self.__class_capabilities__:
return False
return True | python | def has_capabilities(self, *cap_names):
""" Check if class has all of the specified capabilities
:param cap_names: capabilities names to check
:return: bool
"""
for name in cap_names:
if name not in self.__class_capabilities__:
return False
return True | [
"def",
"has_capabilities",
"(",
"self",
",",
"*",
"cap_names",
")",
":",
"for",
"name",
"in",
"cap_names",
":",
"if",
"name",
"not",
"in",
"self",
".",
"__class_capabilities__",
":",
"return",
"False",
"return",
"True"
] | Check if class has all of the specified capabilities
:param cap_names: capabilities names to check
:return: bool | [
"Check",
"if",
"class",
"has",
"all",
"of",
"the",
"specified",
"capabilities"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/capability.py#L111-L121 | train |
projectshift/shift-schema | shiftschema/result.py | Result.add_entity_errors | def add_entity_errors(
self,
property_name,
direct_errors=None,
schema_errors=None
):
"""
Attach nested entity errors
Accepts a list errors coming from validators attached directly,
or a dict of errors produced by a nested schema.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param schema_errors: dict, errors from nested schema
:return: shiftschema.result.Result
"""
if direct_errors is None and schema_errors is None:
return self
# direct errors
if direct_errors is not None:
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'direct' not in self.errors[property_name]:
self.errors[property_name]['direct'] = []
if type(direct_errors) is not list:
direct_errors = [direct_errors]
for error in direct_errors:
if not isinstance(error, Error):
err = 'Error must be of type {}'
raise x.InvalidErrorType(err.format(Error))
self.errors[property_name]['direct'].append(error)
# schema errors
if schema_errors is not None:
if isinstance(schema_errors, Result):
schema_errors = schema_errors.errors
if not schema_errors:
return self
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'schema' not in self.errors[property_name]:
self.errors[property_name]['schema'] = schema_errors
else:
self.errors[property_name]['schema'] = self.merge_errors(
self.errors[property_name]['schema'],
schema_errors
)
return self | python | def add_entity_errors(
self,
property_name,
direct_errors=None,
schema_errors=None
):
"""
Attach nested entity errors
Accepts a list errors coming from validators attached directly,
or a dict of errors produced by a nested schema.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param schema_errors: dict, errors from nested schema
:return: shiftschema.result.Result
"""
if direct_errors is None and schema_errors is None:
return self
# direct errors
if direct_errors is not None:
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'direct' not in self.errors[property_name]:
self.errors[property_name]['direct'] = []
if type(direct_errors) is not list:
direct_errors = [direct_errors]
for error in direct_errors:
if not isinstance(error, Error):
err = 'Error must be of type {}'
raise x.InvalidErrorType(err.format(Error))
self.errors[property_name]['direct'].append(error)
# schema errors
if schema_errors is not None:
if isinstance(schema_errors, Result):
schema_errors = schema_errors.errors
if not schema_errors:
return self
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'schema' not in self.errors[property_name]:
self.errors[property_name]['schema'] = schema_errors
else:
self.errors[property_name]['schema'] = self.merge_errors(
self.errors[property_name]['schema'],
schema_errors
)
return self | [
"def",
"add_entity_errors",
"(",
"self",
",",
"property_name",
",",
"direct_errors",
"=",
"None",
",",
"schema_errors",
"=",
"None",
")",
":",
"if",
"direct_errors",
"is",
"None",
"and",
"schema_errors",
"is",
"None",
":",
"return",
"self",
"# direct errors",
"if",
"direct_errors",
"is",
"not",
"None",
":",
"if",
"property_name",
"not",
"in",
"self",
".",
"errors",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"=",
"dict",
"(",
")",
"if",
"'direct'",
"not",
"in",
"self",
".",
"errors",
"[",
"property_name",
"]",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'direct'",
"]",
"=",
"[",
"]",
"if",
"type",
"(",
"direct_errors",
")",
"is",
"not",
"list",
":",
"direct_errors",
"=",
"[",
"direct_errors",
"]",
"for",
"error",
"in",
"direct_errors",
":",
"if",
"not",
"isinstance",
"(",
"error",
",",
"Error",
")",
":",
"err",
"=",
"'Error must be of type {}'",
"raise",
"x",
".",
"InvalidErrorType",
"(",
"err",
".",
"format",
"(",
"Error",
")",
")",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'direct'",
"]",
".",
"append",
"(",
"error",
")",
"# schema errors",
"if",
"schema_errors",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"schema_errors",
",",
"Result",
")",
":",
"schema_errors",
"=",
"schema_errors",
".",
"errors",
"if",
"not",
"schema_errors",
":",
"return",
"self",
"if",
"property_name",
"not",
"in",
"self",
".",
"errors",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"=",
"dict",
"(",
")",
"if",
"'schema'",
"not",
"in",
"self",
".",
"errors",
"[",
"property_name",
"]",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'schema'",
"]",
"=",
"schema_errors",
"else",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'schema'",
"]",
"=",
"self",
".",
"merge_errors",
"(",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'schema'",
"]",
",",
"schema_errors",
")",
"return",
"self"
] | Attach nested entity errors
Accepts a list errors coming from validators attached directly,
or a dict of errors produced by a nested schema.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param schema_errors: dict, errors from nested schema
:return: shiftschema.result.Result | [
"Attach",
"nested",
"entity",
"errors",
"Accepts",
"a",
"list",
"errors",
"coming",
"from",
"validators",
"attached",
"directly",
"or",
"a",
"dict",
"of",
"errors",
"produced",
"by",
"a",
"nested",
"schema",
"."
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/result.py#L106-L161 | train |
projectshift/shift-schema | shiftschema/result.py | Result.add_collection_errors | def add_collection_errors(
self,
property_name,
direct_errors=None,
collection_errors=None
):
"""
Add collection errors
Accepts a list errors coming from validators attached directly,
or a list of schema results for each item in the collection.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param collection_errors: list of results for collection members
:return: shiftschema.result.Result
"""
if direct_errors is None and collection_errors is None:
return self
# direct errors
if direct_errors is not None:
if type(direct_errors) is not list:
direct_errors = [direct_errors]
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'direct' not in self.errors[property_name]:
self.errors[property_name]['direct'] = []
for error in direct_errors:
if not isinstance(error, Error):
err = 'Error must be of type {}'
raise x.InvalidErrorType(err.format(Error))
self.errors[property_name]['direct'].append(error)
# collection errors
if collection_errors:
enum = enumerate(collection_errors)
errors_dict = {i: e for i, e in enum if not bool(e)}
if not errors_dict:
return self
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'collection' not in self.errors[property_name]:
self.errors[property_name]['collection'] = errors_dict
else:
local = self.errors[property_name]['collection']
remote = errors_dict
for index, result in remote.items():
if index not in local:
self.errors[property_name]['collection'][index] = result
else:
merged = self.merge_errors(
local[index].errors,
remote[index].errors
)
self.errors[property_name]['collection'][index] = merged
return self | python | def add_collection_errors(
self,
property_name,
direct_errors=None,
collection_errors=None
):
"""
Add collection errors
Accepts a list errors coming from validators attached directly,
or a list of schema results for each item in the collection.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param collection_errors: list of results for collection members
:return: shiftschema.result.Result
"""
if direct_errors is None and collection_errors is None:
return self
# direct errors
if direct_errors is not None:
if type(direct_errors) is not list:
direct_errors = [direct_errors]
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'direct' not in self.errors[property_name]:
self.errors[property_name]['direct'] = []
for error in direct_errors:
if not isinstance(error, Error):
err = 'Error must be of type {}'
raise x.InvalidErrorType(err.format(Error))
self.errors[property_name]['direct'].append(error)
# collection errors
if collection_errors:
enum = enumerate(collection_errors)
errors_dict = {i: e for i, e in enum if not bool(e)}
if not errors_dict:
return self
if property_name not in self.errors:
self.errors[property_name] = dict()
if 'collection' not in self.errors[property_name]:
self.errors[property_name]['collection'] = errors_dict
else:
local = self.errors[property_name]['collection']
remote = errors_dict
for index, result in remote.items():
if index not in local:
self.errors[property_name]['collection'][index] = result
else:
merged = self.merge_errors(
local[index].errors,
remote[index].errors
)
self.errors[property_name]['collection'][index] = merged
return self | [
"def",
"add_collection_errors",
"(",
"self",
",",
"property_name",
",",
"direct_errors",
"=",
"None",
",",
"collection_errors",
"=",
"None",
")",
":",
"if",
"direct_errors",
"is",
"None",
"and",
"collection_errors",
"is",
"None",
":",
"return",
"self",
"# direct errors",
"if",
"direct_errors",
"is",
"not",
"None",
":",
"if",
"type",
"(",
"direct_errors",
")",
"is",
"not",
"list",
":",
"direct_errors",
"=",
"[",
"direct_errors",
"]",
"if",
"property_name",
"not",
"in",
"self",
".",
"errors",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"=",
"dict",
"(",
")",
"if",
"'direct'",
"not",
"in",
"self",
".",
"errors",
"[",
"property_name",
"]",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'direct'",
"]",
"=",
"[",
"]",
"for",
"error",
"in",
"direct_errors",
":",
"if",
"not",
"isinstance",
"(",
"error",
",",
"Error",
")",
":",
"err",
"=",
"'Error must be of type {}'",
"raise",
"x",
".",
"InvalidErrorType",
"(",
"err",
".",
"format",
"(",
"Error",
")",
")",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'direct'",
"]",
".",
"append",
"(",
"error",
")",
"# collection errors",
"if",
"collection_errors",
":",
"enum",
"=",
"enumerate",
"(",
"collection_errors",
")",
"errors_dict",
"=",
"{",
"i",
":",
"e",
"for",
"i",
",",
"e",
"in",
"enum",
"if",
"not",
"bool",
"(",
"e",
")",
"}",
"if",
"not",
"errors_dict",
":",
"return",
"self",
"if",
"property_name",
"not",
"in",
"self",
".",
"errors",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"=",
"dict",
"(",
")",
"if",
"'collection'",
"not",
"in",
"self",
".",
"errors",
"[",
"property_name",
"]",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'collection'",
"]",
"=",
"errors_dict",
"else",
":",
"local",
"=",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'collection'",
"]",
"remote",
"=",
"errors_dict",
"for",
"index",
",",
"result",
"in",
"remote",
".",
"items",
"(",
")",
":",
"if",
"index",
"not",
"in",
"local",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'collection'",
"]",
"[",
"index",
"]",
"=",
"result",
"else",
":",
"merged",
"=",
"self",
".",
"merge_errors",
"(",
"local",
"[",
"index",
"]",
".",
"errors",
",",
"remote",
"[",
"index",
"]",
".",
"errors",
")",
"self",
".",
"errors",
"[",
"property_name",
"]",
"[",
"'collection'",
"]",
"[",
"index",
"]",
"=",
"merged",
"return",
"self"
] | Add collection errors
Accepts a list errors coming from validators attached directly,
or a list of schema results for each item in the collection.
:param property_name: str, property name
:param direct_errors: list, errors from validators attached directly
:param collection_errors: list of results for collection members
:return: shiftschema.result.Result | [
"Add",
"collection",
"errors",
"Accepts",
"a",
"list",
"errors",
"coming",
"from",
"validators",
"attached",
"directly",
"or",
"a",
"list",
"of",
"schema",
"results",
"for",
"each",
"item",
"in",
"the",
"collection",
"."
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/result.py#L163-L220 | train |
projectshift/shift-schema | shiftschema/result.py | Result.merge_errors | def merge_errors(self, errors_local, errors_remote):
"""
Merge errors
Recursively traverses error graph to merge remote errors into local
errors to return a new joined graph.
:param errors_local: dict, local errors, will be updated
:param errors_remote: dict, remote errors, provides updates
:return: dict
"""
for prop in errors_remote:
# create if doesn't exist
if prop not in errors_local:
errors_local[prop] = errors_remote[prop]
continue
local = errors_local[prop]
local = local.errors if isinstance(local, Result) else local
remote = errors_remote[prop]
remote = remote.errors if isinstance(remote, Result) else remote
# check compatibility
if not isinstance(local, type(remote)):
msg = 'Type mismatch on property [{}] when merging errors. '
msg += 'Unable to merge [{}] into [{}]'
raise x.UnableToMergeResultsType(msg.format(
prop,
type(errors_remote[prop]),
type(self.errors[prop])
))
mismatch = 'Unable to merge nested entity errors with nested '
mismatch += 'collection errors on property [{}]'
if 'schema' in local and 'collection' in remote:
raise x.UnableToMergeResultsType(mismatch.format(prop))
if 'collection' in local and 'schema' in remote:
raise x.UnableToMergeResultsType(mismatch.format(prop))
# merge simple & state
if type(remote) is list:
errors_local[prop].extend(remote)
continue
# merge direct errors on nested entities and collection
if 'direct' in remote and 'direct' in local:
errors_local[prop]['direct'].extend(remote['direct'])
# merge nested schema errors
if 'schema' in remote and 'schema' in local:
errors_local[prop]['schema'] = self.merge_errors(
errors_local[prop]['schema'],
remote['schema']
)
# merge nested collections errors
if 'collection' in remote and 'collection' in local:
for index, result in remote['collection'].items():
if index not in local['collection']:
errors_local[prop]['collection'][index] = result
else:
merged = self.merge_errors(
errors_local[prop]['collection'][index].errors,
errors_remote[prop]['collection'][index].errors,
)
errors_local[prop]['collection'][index] = merged
# and return
return errors_local | python | def merge_errors(self, errors_local, errors_remote):
"""
Merge errors
Recursively traverses error graph to merge remote errors into local
errors to return a new joined graph.
:param errors_local: dict, local errors, will be updated
:param errors_remote: dict, remote errors, provides updates
:return: dict
"""
for prop in errors_remote:
# create if doesn't exist
if prop not in errors_local:
errors_local[prop] = errors_remote[prop]
continue
local = errors_local[prop]
local = local.errors if isinstance(local, Result) else local
remote = errors_remote[prop]
remote = remote.errors if isinstance(remote, Result) else remote
# check compatibility
if not isinstance(local, type(remote)):
msg = 'Type mismatch on property [{}] when merging errors. '
msg += 'Unable to merge [{}] into [{}]'
raise x.UnableToMergeResultsType(msg.format(
prop,
type(errors_remote[prop]),
type(self.errors[prop])
))
mismatch = 'Unable to merge nested entity errors with nested '
mismatch += 'collection errors on property [{}]'
if 'schema' in local and 'collection' in remote:
raise x.UnableToMergeResultsType(mismatch.format(prop))
if 'collection' in local and 'schema' in remote:
raise x.UnableToMergeResultsType(mismatch.format(prop))
# merge simple & state
if type(remote) is list:
errors_local[prop].extend(remote)
continue
# merge direct errors on nested entities and collection
if 'direct' in remote and 'direct' in local:
errors_local[prop]['direct'].extend(remote['direct'])
# merge nested schema errors
if 'schema' in remote and 'schema' in local:
errors_local[prop]['schema'] = self.merge_errors(
errors_local[prop]['schema'],
remote['schema']
)
# merge nested collections errors
if 'collection' in remote and 'collection' in local:
for index, result in remote['collection'].items():
if index not in local['collection']:
errors_local[prop]['collection'][index] = result
else:
merged = self.merge_errors(
errors_local[prop]['collection'][index].errors,
errors_remote[prop]['collection'][index].errors,
)
errors_local[prop]['collection'][index] = merged
# and return
return errors_local | [
"def",
"merge_errors",
"(",
"self",
",",
"errors_local",
",",
"errors_remote",
")",
":",
"for",
"prop",
"in",
"errors_remote",
":",
"# create if doesn't exist",
"if",
"prop",
"not",
"in",
"errors_local",
":",
"errors_local",
"[",
"prop",
"]",
"=",
"errors_remote",
"[",
"prop",
"]",
"continue",
"local",
"=",
"errors_local",
"[",
"prop",
"]",
"local",
"=",
"local",
".",
"errors",
"if",
"isinstance",
"(",
"local",
",",
"Result",
")",
"else",
"local",
"remote",
"=",
"errors_remote",
"[",
"prop",
"]",
"remote",
"=",
"remote",
".",
"errors",
"if",
"isinstance",
"(",
"remote",
",",
"Result",
")",
"else",
"remote",
"# check compatibility",
"if",
"not",
"isinstance",
"(",
"local",
",",
"type",
"(",
"remote",
")",
")",
":",
"msg",
"=",
"'Type mismatch on property [{}] when merging errors. '",
"msg",
"+=",
"'Unable to merge [{}] into [{}]'",
"raise",
"x",
".",
"UnableToMergeResultsType",
"(",
"msg",
".",
"format",
"(",
"prop",
",",
"type",
"(",
"errors_remote",
"[",
"prop",
"]",
")",
",",
"type",
"(",
"self",
".",
"errors",
"[",
"prop",
"]",
")",
")",
")",
"mismatch",
"=",
"'Unable to merge nested entity errors with nested '",
"mismatch",
"+=",
"'collection errors on property [{}]'",
"if",
"'schema'",
"in",
"local",
"and",
"'collection'",
"in",
"remote",
":",
"raise",
"x",
".",
"UnableToMergeResultsType",
"(",
"mismatch",
".",
"format",
"(",
"prop",
")",
")",
"if",
"'collection'",
"in",
"local",
"and",
"'schema'",
"in",
"remote",
":",
"raise",
"x",
".",
"UnableToMergeResultsType",
"(",
"mismatch",
".",
"format",
"(",
"prop",
")",
")",
"# merge simple & state",
"if",
"type",
"(",
"remote",
")",
"is",
"list",
":",
"errors_local",
"[",
"prop",
"]",
".",
"extend",
"(",
"remote",
")",
"continue",
"# merge direct errors on nested entities and collection",
"if",
"'direct'",
"in",
"remote",
"and",
"'direct'",
"in",
"local",
":",
"errors_local",
"[",
"prop",
"]",
"[",
"'direct'",
"]",
".",
"extend",
"(",
"remote",
"[",
"'direct'",
"]",
")",
"# merge nested schema errors",
"if",
"'schema'",
"in",
"remote",
"and",
"'schema'",
"in",
"local",
":",
"errors_local",
"[",
"prop",
"]",
"[",
"'schema'",
"]",
"=",
"self",
".",
"merge_errors",
"(",
"errors_local",
"[",
"prop",
"]",
"[",
"'schema'",
"]",
",",
"remote",
"[",
"'schema'",
"]",
")",
"# merge nested collections errors",
"if",
"'collection'",
"in",
"remote",
"and",
"'collection'",
"in",
"local",
":",
"for",
"index",
",",
"result",
"in",
"remote",
"[",
"'collection'",
"]",
".",
"items",
"(",
")",
":",
"if",
"index",
"not",
"in",
"local",
"[",
"'collection'",
"]",
":",
"errors_local",
"[",
"prop",
"]",
"[",
"'collection'",
"]",
"[",
"index",
"]",
"=",
"result",
"else",
":",
"merged",
"=",
"self",
".",
"merge_errors",
"(",
"errors_local",
"[",
"prop",
"]",
"[",
"'collection'",
"]",
"[",
"index",
"]",
".",
"errors",
",",
"errors_remote",
"[",
"prop",
"]",
"[",
"'collection'",
"]",
"[",
"index",
"]",
".",
"errors",
",",
")",
"errors_local",
"[",
"prop",
"]",
"[",
"'collection'",
"]",
"[",
"index",
"]",
"=",
"merged",
"# and return",
"return",
"errors_local"
] | Merge errors
Recursively traverses error graph to merge remote errors into local
errors to return a new joined graph.
:param errors_local: dict, local errors, will be updated
:param errors_remote: dict, remote errors, provides updates
:return: dict | [
"Merge",
"errors",
"Recursively",
"traverses",
"error",
"graph",
"to",
"merge",
"remote",
"errors",
"into",
"local",
"errors",
"to",
"return",
"a",
"new",
"joined",
"graph",
"."
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/result.py#L222-L290 | train |
projectshift/shift-schema | shiftschema/result.py | Result.merge | def merge(self, another):
""" Merges another validation result graph into itself"""
if isinstance(another, Result):
another = another.errors
self.errors = self.merge_errors(self.errors, another) | python | def merge(self, another):
""" Merges another validation result graph into itself"""
if isinstance(another, Result):
another = another.errors
self.errors = self.merge_errors(self.errors, another) | [
"def",
"merge",
"(",
"self",
",",
"another",
")",
":",
"if",
"isinstance",
"(",
"another",
",",
"Result",
")",
":",
"another",
"=",
"another",
".",
"errors",
"self",
".",
"errors",
"=",
"self",
".",
"merge_errors",
"(",
"self",
".",
"errors",
",",
"another",
")"
] | Merges another validation result graph into itself | [
"Merges",
"another",
"validation",
"result",
"graph",
"into",
"itself"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/result.py#L292-L296 | train |
projectshift/shift-schema | shiftschema/result.py | Result.get_messages | def get_messages(self, locale=None):
""" Get a dictionary of translated messages """
if locale is None:
locale = self.locale
if self.translator:
def translate(error):
return self.translator.translate(error, locale)
else:
def translate(error):
return error
errors = deepcopy(self.errors)
errors = self._translate_errors(errors, translate)
return errors | python | def get_messages(self, locale=None):
""" Get a dictionary of translated messages """
if locale is None:
locale = self.locale
if self.translator:
def translate(error):
return self.translator.translate(error, locale)
else:
def translate(error):
return error
errors = deepcopy(self.errors)
errors = self._translate_errors(errors, translate)
return errors | [
"def",
"get_messages",
"(",
"self",
",",
"locale",
"=",
"None",
")",
":",
"if",
"locale",
"is",
"None",
":",
"locale",
"=",
"self",
".",
"locale",
"if",
"self",
".",
"translator",
":",
"def",
"translate",
"(",
"error",
")",
":",
"return",
"self",
".",
"translator",
".",
"translate",
"(",
"error",
",",
"locale",
")",
"else",
":",
"def",
"translate",
"(",
"error",
")",
":",
"return",
"error",
"errors",
"=",
"deepcopy",
"(",
"self",
".",
"errors",
")",
"errors",
"=",
"self",
".",
"_translate_errors",
"(",
"errors",
",",
"translate",
")",
"return",
"errors"
] | Get a dictionary of translated messages | [
"Get",
"a",
"dictionary",
"of",
"translated",
"messages"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/result.py#L298-L312 | train |
projectshift/shift-schema | shiftschema/result.py | Result._translate_errors | def _translate_errors(self, errors, translate):
""" Recursively apply translate callback to each error message"""
for prop in errors:
prop_errors = errors[prop]
# state and simple
if type(prop_errors) is list:
for index, error in enumerate(prop_errors):
message = translate(error.message)
message = self.format_error(message, error.kwargs)
errors[prop][index] = message
# entity and collection direct
if type(prop_errors) is dict and 'direct' in prop_errors:
for index, error in enumerate(prop_errors['direct']):
message = translate(error.message)
message = self.format_error(message, error.kwargs)
errors[prop]['direct'][index] = message
# entity schema
if type(prop_errors) is dict and 'schema' in prop_errors:
errors[prop]['schema'] = self._translate_errors(
prop_errors['schema'],
translate
)
# collection schema
if type(prop_errors) is dict and 'collection' in prop_errors:
translated = dict()
for index, result in prop_errors['collection'].items():
translated[index] = self._translate_errors(
result.errors,
translate
)
errors[prop]['collection'] = translated
return errors | python | def _translate_errors(self, errors, translate):
""" Recursively apply translate callback to each error message"""
for prop in errors:
prop_errors = errors[prop]
# state and simple
if type(prop_errors) is list:
for index, error in enumerate(prop_errors):
message = translate(error.message)
message = self.format_error(message, error.kwargs)
errors[prop][index] = message
# entity and collection direct
if type(prop_errors) is dict and 'direct' in prop_errors:
for index, error in enumerate(prop_errors['direct']):
message = translate(error.message)
message = self.format_error(message, error.kwargs)
errors[prop]['direct'][index] = message
# entity schema
if type(prop_errors) is dict and 'schema' in prop_errors:
errors[prop]['schema'] = self._translate_errors(
prop_errors['schema'],
translate
)
# collection schema
if type(prop_errors) is dict and 'collection' in prop_errors:
translated = dict()
for index, result in prop_errors['collection'].items():
translated[index] = self._translate_errors(
result.errors,
translate
)
errors[prop]['collection'] = translated
return errors | [
"def",
"_translate_errors",
"(",
"self",
",",
"errors",
",",
"translate",
")",
":",
"for",
"prop",
"in",
"errors",
":",
"prop_errors",
"=",
"errors",
"[",
"prop",
"]",
"# state and simple",
"if",
"type",
"(",
"prop_errors",
")",
"is",
"list",
":",
"for",
"index",
",",
"error",
"in",
"enumerate",
"(",
"prop_errors",
")",
":",
"message",
"=",
"translate",
"(",
"error",
".",
"message",
")",
"message",
"=",
"self",
".",
"format_error",
"(",
"message",
",",
"error",
".",
"kwargs",
")",
"errors",
"[",
"prop",
"]",
"[",
"index",
"]",
"=",
"message",
"# entity and collection direct",
"if",
"type",
"(",
"prop_errors",
")",
"is",
"dict",
"and",
"'direct'",
"in",
"prop_errors",
":",
"for",
"index",
",",
"error",
"in",
"enumerate",
"(",
"prop_errors",
"[",
"'direct'",
"]",
")",
":",
"message",
"=",
"translate",
"(",
"error",
".",
"message",
")",
"message",
"=",
"self",
".",
"format_error",
"(",
"message",
",",
"error",
".",
"kwargs",
")",
"errors",
"[",
"prop",
"]",
"[",
"'direct'",
"]",
"[",
"index",
"]",
"=",
"message",
"# entity schema",
"if",
"type",
"(",
"prop_errors",
")",
"is",
"dict",
"and",
"'schema'",
"in",
"prop_errors",
":",
"errors",
"[",
"prop",
"]",
"[",
"'schema'",
"]",
"=",
"self",
".",
"_translate_errors",
"(",
"prop_errors",
"[",
"'schema'",
"]",
",",
"translate",
")",
"# collection schema",
"if",
"type",
"(",
"prop_errors",
")",
"is",
"dict",
"and",
"'collection'",
"in",
"prop_errors",
":",
"translated",
"=",
"dict",
"(",
")",
"for",
"index",
",",
"result",
"in",
"prop_errors",
"[",
"'collection'",
"]",
".",
"items",
"(",
")",
":",
"translated",
"[",
"index",
"]",
"=",
"self",
".",
"_translate_errors",
"(",
"result",
".",
"errors",
",",
"translate",
")",
"errors",
"[",
"prop",
"]",
"[",
"'collection'",
"]",
"=",
"translated",
"return",
"errors"
] | Recursively apply translate callback to each error message | [
"Recursively",
"apply",
"translate",
"callback",
"to",
"each",
"error",
"message"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/result.py#L314-L350 | train |
sublee/etc | etc/adapters/etcd.py | EtcdAdapter.make_url | def make_url(self, path, api_root=u'/v2/'):
"""Gets a full URL from just path."""
return urljoin(urljoin(self.url, api_root), path) | python | def make_url(self, path, api_root=u'/v2/'):
"""Gets a full URL from just path."""
return urljoin(urljoin(self.url, api_root), path) | [
"def",
"make_url",
"(",
"self",
",",
"path",
",",
"api_root",
"=",
"u'/v2/'",
")",
":",
"return",
"urljoin",
"(",
"urljoin",
"(",
"self",
".",
"url",
",",
"api_root",
")",
",",
"path",
")"
] | Gets a full URL from just path. | [
"Gets",
"a",
"full",
"URL",
"from",
"just",
"path",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/etcd.py#L40-L42 | train |
sublee/etc | etc/adapters/etcd.py | EtcdAdapter.make_key_url | def make_key_url(self, key):
"""Gets a URL for a key."""
if type(key) is bytes:
key = key.decode('utf-8')
buf = io.StringIO()
buf.write(u'keys')
if not key.startswith(u'/'):
buf.write(u'/')
buf.write(key)
return self.make_url(buf.getvalue()) | python | def make_key_url(self, key):
"""Gets a URL for a key."""
if type(key) is bytes:
key = key.decode('utf-8')
buf = io.StringIO()
buf.write(u'keys')
if not key.startswith(u'/'):
buf.write(u'/')
buf.write(key)
return self.make_url(buf.getvalue()) | [
"def",
"make_key_url",
"(",
"self",
",",
"key",
")",
":",
"if",
"type",
"(",
"key",
")",
"is",
"bytes",
":",
"key",
"=",
"key",
".",
"decode",
"(",
"'utf-8'",
")",
"buf",
"=",
"io",
".",
"StringIO",
"(",
")",
"buf",
".",
"write",
"(",
"u'keys'",
")",
"if",
"not",
"key",
".",
"startswith",
"(",
"u'/'",
")",
":",
"buf",
".",
"write",
"(",
"u'/'",
")",
"buf",
".",
"write",
"(",
"key",
")",
"return",
"self",
".",
"make_url",
"(",
"buf",
".",
"getvalue",
"(",
")",
")"
] | Gets a URL for a key. | [
"Gets",
"a",
"URL",
"for",
"a",
"key",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/etcd.py#L44-L53 | train |
sublee/etc | etc/adapters/etcd.py | EtcdAdapter.get | def get(self, key, recursive=False, sorted=False, quorum=False,
wait=False, wait_index=None, timeout=None):
"""Requests to get a node by the given key."""
url = self.make_key_url(key)
params = self.build_args({
'recursive': (bool, recursive or None),
'sorted': (bool, sorted or None),
'quorum': (bool, quorum or None),
'wait': (bool, wait or None),
'waitIndex': (int, wait_index),
})
if timeout is None:
# Try again when :exc:`TimedOut` thrown.
while True:
try:
try:
res = self.session.get(url, params=params)
except:
self.erred()
except (TimedOut, ChunkedEncodingError):
continue
else:
break
else:
try:
res = self.session.get(url, params=params, timeout=timeout)
except ChunkedEncodingError:
raise TimedOut
except:
self.erred()
return self.wrap_response(res) | python | def get(self, key, recursive=False, sorted=False, quorum=False,
wait=False, wait_index=None, timeout=None):
"""Requests to get a node by the given key."""
url = self.make_key_url(key)
params = self.build_args({
'recursive': (bool, recursive or None),
'sorted': (bool, sorted or None),
'quorum': (bool, quorum or None),
'wait': (bool, wait or None),
'waitIndex': (int, wait_index),
})
if timeout is None:
# Try again when :exc:`TimedOut` thrown.
while True:
try:
try:
res = self.session.get(url, params=params)
except:
self.erred()
except (TimedOut, ChunkedEncodingError):
continue
else:
break
else:
try:
res = self.session.get(url, params=params, timeout=timeout)
except ChunkedEncodingError:
raise TimedOut
except:
self.erred()
return self.wrap_response(res) | [
"def",
"get",
"(",
"self",
",",
"key",
",",
"recursive",
"=",
"False",
",",
"sorted",
"=",
"False",
",",
"quorum",
"=",
"False",
",",
"wait",
"=",
"False",
",",
"wait_index",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"make_key_url",
"(",
"key",
")",
"params",
"=",
"self",
".",
"build_args",
"(",
"{",
"'recursive'",
":",
"(",
"bool",
",",
"recursive",
"or",
"None",
")",
",",
"'sorted'",
":",
"(",
"bool",
",",
"sorted",
"or",
"None",
")",
",",
"'quorum'",
":",
"(",
"bool",
",",
"quorum",
"or",
"None",
")",
",",
"'wait'",
":",
"(",
"bool",
",",
"wait",
"or",
"None",
")",
",",
"'waitIndex'",
":",
"(",
"int",
",",
"wait_index",
")",
",",
"}",
")",
"if",
"timeout",
"is",
"None",
":",
"# Try again when :exc:`TimedOut` thrown.",
"while",
"True",
":",
"try",
":",
"try",
":",
"res",
"=",
"self",
".",
"session",
".",
"get",
"(",
"url",
",",
"params",
"=",
"params",
")",
"except",
":",
"self",
".",
"erred",
"(",
")",
"except",
"(",
"TimedOut",
",",
"ChunkedEncodingError",
")",
":",
"continue",
"else",
":",
"break",
"else",
":",
"try",
":",
"res",
"=",
"self",
".",
"session",
".",
"get",
"(",
"url",
",",
"params",
"=",
"params",
",",
"timeout",
"=",
"timeout",
")",
"except",
"ChunkedEncodingError",
":",
"raise",
"TimedOut",
"except",
":",
"self",
".",
"erred",
"(",
")",
"return",
"self",
".",
"wrap_response",
"(",
"res",
")"
] | Requests to get a node by the given key. | [
"Requests",
"to",
"get",
"a",
"node",
"by",
"the",
"given",
"key",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/etcd.py#L155-L185 | train |
sublee/etc | etc/adapters/etcd.py | EtcdAdapter.delete | def delete(self, key, dir=False, recursive=False,
prev_value=None, prev_index=None, timeout=None):
"""Requests to delete a node by the given key."""
url = self.make_key_url(key)
params = self.build_args({
'dir': (bool, dir or None),
'recursive': (bool, recursive or None),
'prevValue': (six.text_type, prev_value),
'prevIndex': (int, prev_index),
})
try:
res = self.session.delete(url, params=params, timeout=timeout)
except:
self.erred()
return self.wrap_response(res) | python | def delete(self, key, dir=False, recursive=False,
prev_value=None, prev_index=None, timeout=None):
"""Requests to delete a node by the given key."""
url = self.make_key_url(key)
params = self.build_args({
'dir': (bool, dir or None),
'recursive': (bool, recursive or None),
'prevValue': (six.text_type, prev_value),
'prevIndex': (int, prev_index),
})
try:
res = self.session.delete(url, params=params, timeout=timeout)
except:
self.erred()
return self.wrap_response(res) | [
"def",
"delete",
"(",
"self",
",",
"key",
",",
"dir",
"=",
"False",
",",
"recursive",
"=",
"False",
",",
"prev_value",
"=",
"None",
",",
"prev_index",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"make_key_url",
"(",
"key",
")",
"params",
"=",
"self",
".",
"build_args",
"(",
"{",
"'dir'",
":",
"(",
"bool",
",",
"dir",
"or",
"None",
")",
",",
"'recursive'",
":",
"(",
"bool",
",",
"recursive",
"or",
"None",
")",
",",
"'prevValue'",
":",
"(",
"six",
".",
"text_type",
",",
"prev_value",
")",
",",
"'prevIndex'",
":",
"(",
"int",
",",
"prev_index",
")",
",",
"}",
")",
"try",
":",
"res",
"=",
"self",
".",
"session",
".",
"delete",
"(",
"url",
",",
"params",
"=",
"params",
",",
"timeout",
"=",
"timeout",
")",
"except",
":",
"self",
".",
"erred",
"(",
")",
"return",
"self",
".",
"wrap_response",
"(",
"res",
")"
] | Requests to delete a node by the given key. | [
"Requests",
"to",
"delete",
"a",
"node",
"by",
"the",
"given",
"key",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/etcd.py#L220-L234 | train |
AASHE/python-membersuite-api-client | membersuite_api_client/security/services.py | login_to_portal | def login_to_portal(username, password, client, retries=2, delay=0):
"""Log `username` into the MemberSuite Portal.
Returns a PortalUser object if successful, raises
LoginToPortalError if not.
Will retry logging in if a GeneralException occurs, up to `retries`.
Will pause `delay` seconds between retries.
"""
if not client.session_id:
client.request_session()
concierge_request_header = client.construct_concierge_header(
url=("http://membersuite.com/contracts/IConciergeAPIService/"
"LoginToPortal"))
attempts = 0
while attempts < retries:
if attempts:
time.sleep(delay)
result = client.client.service.LoginToPortal(
_soapheaders=[concierge_request_header],
portalUserName=username,
portalPassword=password)
login_to_portal_result = result["body"]["LoginToPortalResult"]
if login_to_portal_result["Success"]:
portal_user = login_to_portal_result["ResultValue"]["PortalUser"]
session_id = get_session_id(result=result)
return PortalUser(membersuite_object_data=portal_user,
session_id=session_id)
else:
attempts += 1
try:
error_code = login_to_portal_result[
"Errors"]["ConciergeError"][0]["Code"]
except IndexError: # Not a ConciergeError
continue
else:
if attempts < retries and error_code == "GeneralException":
continue
raise LoginToPortalError(result=result) | python | def login_to_portal(username, password, client, retries=2, delay=0):
"""Log `username` into the MemberSuite Portal.
Returns a PortalUser object if successful, raises
LoginToPortalError if not.
Will retry logging in if a GeneralException occurs, up to `retries`.
Will pause `delay` seconds between retries.
"""
if not client.session_id:
client.request_session()
concierge_request_header = client.construct_concierge_header(
url=("http://membersuite.com/contracts/IConciergeAPIService/"
"LoginToPortal"))
attempts = 0
while attempts < retries:
if attempts:
time.sleep(delay)
result = client.client.service.LoginToPortal(
_soapheaders=[concierge_request_header],
portalUserName=username,
portalPassword=password)
login_to_portal_result = result["body"]["LoginToPortalResult"]
if login_to_portal_result["Success"]:
portal_user = login_to_portal_result["ResultValue"]["PortalUser"]
session_id = get_session_id(result=result)
return PortalUser(membersuite_object_data=portal_user,
session_id=session_id)
else:
attempts += 1
try:
error_code = login_to_portal_result[
"Errors"]["ConciergeError"][0]["Code"]
except IndexError: # Not a ConciergeError
continue
else:
if attempts < retries and error_code == "GeneralException":
continue
raise LoginToPortalError(result=result) | [
"def",
"login_to_portal",
"(",
"username",
",",
"password",
",",
"client",
",",
"retries",
"=",
"2",
",",
"delay",
"=",
"0",
")",
":",
"if",
"not",
"client",
".",
"session_id",
":",
"client",
".",
"request_session",
"(",
")",
"concierge_request_header",
"=",
"client",
".",
"construct_concierge_header",
"(",
"url",
"=",
"(",
"\"http://membersuite.com/contracts/IConciergeAPIService/\"",
"\"LoginToPortal\"",
")",
")",
"attempts",
"=",
"0",
"while",
"attempts",
"<",
"retries",
":",
"if",
"attempts",
":",
"time",
".",
"sleep",
"(",
"delay",
")",
"result",
"=",
"client",
".",
"client",
".",
"service",
".",
"LoginToPortal",
"(",
"_soapheaders",
"=",
"[",
"concierge_request_header",
"]",
",",
"portalUserName",
"=",
"username",
",",
"portalPassword",
"=",
"password",
")",
"login_to_portal_result",
"=",
"result",
"[",
"\"body\"",
"]",
"[",
"\"LoginToPortalResult\"",
"]",
"if",
"login_to_portal_result",
"[",
"\"Success\"",
"]",
":",
"portal_user",
"=",
"login_to_portal_result",
"[",
"\"ResultValue\"",
"]",
"[",
"\"PortalUser\"",
"]",
"session_id",
"=",
"get_session_id",
"(",
"result",
"=",
"result",
")",
"return",
"PortalUser",
"(",
"membersuite_object_data",
"=",
"portal_user",
",",
"session_id",
"=",
"session_id",
")",
"else",
":",
"attempts",
"+=",
"1",
"try",
":",
"error_code",
"=",
"login_to_portal_result",
"[",
"\"Errors\"",
"]",
"[",
"\"ConciergeError\"",
"]",
"[",
"0",
"]",
"[",
"\"Code\"",
"]",
"except",
"IndexError",
":",
"# Not a ConciergeError",
"continue",
"else",
":",
"if",
"attempts",
"<",
"retries",
"and",
"error_code",
"==",
"\"GeneralException\"",
":",
"continue",
"raise",
"LoginToPortalError",
"(",
"result",
"=",
"result",
")"
] | Log `username` into the MemberSuite Portal.
Returns a PortalUser object if successful, raises
LoginToPortalError if not.
Will retry logging in if a GeneralException occurs, up to `retries`.
Will pause `delay` seconds between retries. | [
"Log",
"username",
"into",
"the",
"MemberSuite",
"Portal",
"."
] | 221f5ed8bc7d4424237a4669c5af9edc11819ee9 | https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/security/services.py#L10-L56 | train |
AASHE/python-membersuite-api-client | membersuite_api_client/security/services.py | logout | def logout(client):
"""Log out the currently logged-in user.
There's a really crappy side-effect here - the session_id
attribute of the `client` passed in will be reset to None if the
logout succeeds, which is going to be almost always, let's hope.
"""
if not client.session_id:
client.request_session()
concierge_request_header = client.construct_concierge_header(
url=("http://membersuite.com/contracts/IConciergeAPIService/"
"Logout"))
logout_result = client.client.service.Logout(
_soapheaders=[concierge_request_header])
result = logout_result["body"]["LogoutResult"]
if result["SessionID"] is None: # Success!
client.session_id = None
else: # Failure . . .
raise LogoutError(result=result) | python | def logout(client):
"""Log out the currently logged-in user.
There's a really crappy side-effect here - the session_id
attribute of the `client` passed in will be reset to None if the
logout succeeds, which is going to be almost always, let's hope.
"""
if not client.session_id:
client.request_session()
concierge_request_header = client.construct_concierge_header(
url=("http://membersuite.com/contracts/IConciergeAPIService/"
"Logout"))
logout_result = client.client.service.Logout(
_soapheaders=[concierge_request_header])
result = logout_result["body"]["LogoutResult"]
if result["SessionID"] is None: # Success!
client.session_id = None
else: # Failure . . .
raise LogoutError(result=result) | [
"def",
"logout",
"(",
"client",
")",
":",
"if",
"not",
"client",
".",
"session_id",
":",
"client",
".",
"request_session",
"(",
")",
"concierge_request_header",
"=",
"client",
".",
"construct_concierge_header",
"(",
"url",
"=",
"(",
"\"http://membersuite.com/contracts/IConciergeAPIService/\"",
"\"Logout\"",
")",
")",
"logout_result",
"=",
"client",
".",
"client",
".",
"service",
".",
"Logout",
"(",
"_soapheaders",
"=",
"[",
"concierge_request_header",
"]",
")",
"result",
"=",
"logout_result",
"[",
"\"body\"",
"]",
"[",
"\"LogoutResult\"",
"]",
"if",
"result",
"[",
"\"SessionID\"",
"]",
"is",
"None",
":",
"# Success!",
"client",
".",
"session_id",
"=",
"None",
"else",
":",
"# Failure . . .",
"raise",
"LogoutError",
"(",
"result",
"=",
"result",
")"
] | Log out the currently logged-in user.
There's a really crappy side-effect here - the session_id
attribute of the `client` passed in will be reset to None if the
logout succeeds, which is going to be almost always, let's hope. | [
"Log",
"out",
"the",
"currently",
"logged",
"-",
"in",
"user",
"."
] | 221f5ed8bc7d4424237a4669c5af9edc11819ee9 | https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/security/services.py#L59-L82 | train |
AASHE/python-membersuite-api-client | membersuite_api_client/security/services.py | get_user_for_membersuite_entity | def get_user_for_membersuite_entity(membersuite_entity):
"""Returns a User for `membersuite_entity`.
membersuite_entity is any MemberSuite object that has the fields
membersuite_id, email_address, first_name, and last_name, e.g.,
PortalUser or Individual.
"""
user = None
user_created = False
# First, try to match on username.
user_username = generate_username(membersuite_entity)
try:
user = User.objects.get(username=user_username)
except User.DoesNotExist:
pass
# Next, try to match on email address.
if not user:
try:
user = User.objects.filter(
email=membersuite_entity.email_address)[0]
except IndexError:
pass
# No match? Create one.
if not user:
user = User.objects.create(
username=user_username,
email=membersuite_entity.email_address,
first_name=membersuite_entity.first_name,
last_name=membersuite_entity.last_name)
user_created = True
return user, user_created | python | def get_user_for_membersuite_entity(membersuite_entity):
"""Returns a User for `membersuite_entity`.
membersuite_entity is any MemberSuite object that has the fields
membersuite_id, email_address, first_name, and last_name, e.g.,
PortalUser or Individual.
"""
user = None
user_created = False
# First, try to match on username.
user_username = generate_username(membersuite_entity)
try:
user = User.objects.get(username=user_username)
except User.DoesNotExist:
pass
# Next, try to match on email address.
if not user:
try:
user = User.objects.filter(
email=membersuite_entity.email_address)[0]
except IndexError:
pass
# No match? Create one.
if not user:
user = User.objects.create(
username=user_username,
email=membersuite_entity.email_address,
first_name=membersuite_entity.first_name,
last_name=membersuite_entity.last_name)
user_created = True
return user, user_created | [
"def",
"get_user_for_membersuite_entity",
"(",
"membersuite_entity",
")",
":",
"user",
"=",
"None",
"user_created",
"=",
"False",
"# First, try to match on username.",
"user_username",
"=",
"generate_username",
"(",
"membersuite_entity",
")",
"try",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"user_username",
")",
"except",
"User",
".",
"DoesNotExist",
":",
"pass",
"# Next, try to match on email address.",
"if",
"not",
"user",
":",
"try",
":",
"user",
"=",
"User",
".",
"objects",
".",
"filter",
"(",
"email",
"=",
"membersuite_entity",
".",
"email_address",
")",
"[",
"0",
"]",
"except",
"IndexError",
":",
"pass",
"# No match? Create one.",
"if",
"not",
"user",
":",
"user",
"=",
"User",
".",
"objects",
".",
"create",
"(",
"username",
"=",
"user_username",
",",
"email",
"=",
"membersuite_entity",
".",
"email_address",
",",
"first_name",
"=",
"membersuite_entity",
".",
"first_name",
",",
"last_name",
"=",
"membersuite_entity",
".",
"last_name",
")",
"user_created",
"=",
"True",
"return",
"user",
",",
"user_created"
] | Returns a User for `membersuite_entity`.
membersuite_entity is any MemberSuite object that has the fields
membersuite_id, email_address, first_name, and last_name, e.g.,
PortalUser or Individual. | [
"Returns",
"a",
"User",
"for",
"membersuite_entity",
"."
] | 221f5ed8bc7d4424237a4669c5af9edc11819ee9 | https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/security/services.py#L89-L124 | train |
projectshift/shift-schema | shiftschema/property.py | SimpleProperty.add_validator | def add_validator(self, validator):
"""
Add validator to property
:param validator: object, extending from AbstractValidator
:return: None
"""
if not isinstance(validator, AbstractValidator):
err = 'Validator must be of type {}'.format(AbstractValidator)
raise InvalidValidator(err)
self.validators.append(validator)
return self | python | def add_validator(self, validator):
"""
Add validator to property
:param validator: object, extending from AbstractValidator
:return: None
"""
if not isinstance(validator, AbstractValidator):
err = 'Validator must be of type {}'.format(AbstractValidator)
raise InvalidValidator(err)
self.validators.append(validator)
return self | [
"def",
"add_validator",
"(",
"self",
",",
"validator",
")",
":",
"if",
"not",
"isinstance",
"(",
"validator",
",",
"AbstractValidator",
")",
":",
"err",
"=",
"'Validator must be of type {}'",
".",
"format",
"(",
"AbstractValidator",
")",
"raise",
"InvalidValidator",
"(",
"err",
")",
"self",
".",
"validators",
".",
"append",
"(",
"validator",
")",
"return",
"self"
] | Add validator to property
:param validator: object, extending from AbstractValidator
:return: None | [
"Add",
"validator",
"to",
"property"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L42-L54 | train |
projectshift/shift-schema | shiftschema/property.py | SimpleProperty.filter | def filter(self, value=None, model=None, context=None):
"""
Sequentially applies all the filters to provided value
:param value: a value to filter
:param model: parent entity
:param context: filtering context, usually parent entity
:return: filtered value
"""
if value is None:
return value
for filter_obj in self.filters:
value = filter_obj.filter(
value=value,
model=model,
context=context if self.use_context else None
)
return value | python | def filter(self, value=None, model=None, context=None):
"""
Sequentially applies all the filters to provided value
:param value: a value to filter
:param model: parent entity
:param context: filtering context, usually parent entity
:return: filtered value
"""
if value is None:
return value
for filter_obj in self.filters:
value = filter_obj.filter(
value=value,
model=model,
context=context if self.use_context else None
)
return value | [
"def",
"filter",
"(",
"self",
",",
"value",
"=",
"None",
",",
"model",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"value",
"for",
"filter_obj",
"in",
"self",
".",
"filters",
":",
"value",
"=",
"filter_obj",
".",
"filter",
"(",
"value",
"=",
"value",
",",
"model",
"=",
"model",
",",
"context",
"=",
"context",
"if",
"self",
".",
"use_context",
"else",
"None",
")",
"return",
"value"
] | Sequentially applies all the filters to provided value
:param value: a value to filter
:param model: parent entity
:param context: filtering context, usually parent entity
:return: filtered value | [
"Sequentially",
"applies",
"all",
"the",
"filters",
"to",
"provided",
"value"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L56-L73 | train |
projectshift/shift-schema | shiftschema/property.py | SimpleProperty.validate | def validate(self, value=None, model=None, context=None):
"""
Sequentially apply each validator to value and collect errors.
:param value: a value to validate
:param model: parent entity
:param context: validation context, usually parent entity
:return: list of errors (if any)
"""
errors = []
for validator in self.validators:
if value is None and not isinstance(validator, Required):
continue
error = validator.run(
value=value,
model=model,
context=context if self.use_context else None
)
if error:
errors.append(error)
return errors | python | def validate(self, value=None, model=None, context=None):
"""
Sequentially apply each validator to value and collect errors.
:param value: a value to validate
:param model: parent entity
:param context: validation context, usually parent entity
:return: list of errors (if any)
"""
errors = []
for validator in self.validators:
if value is None and not isinstance(validator, Required):
continue
error = validator.run(
value=value,
model=model,
context=context if self.use_context else None
)
if error:
errors.append(error)
return errors | [
"def",
"validate",
"(",
"self",
",",
"value",
"=",
"None",
",",
"model",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"errors",
"=",
"[",
"]",
"for",
"validator",
"in",
"self",
".",
"validators",
":",
"if",
"value",
"is",
"None",
"and",
"not",
"isinstance",
"(",
"validator",
",",
"Required",
")",
":",
"continue",
"error",
"=",
"validator",
".",
"run",
"(",
"value",
"=",
"value",
",",
"model",
"=",
"model",
",",
"context",
"=",
"context",
"if",
"self",
".",
"use_context",
"else",
"None",
")",
"if",
"error",
":",
"errors",
".",
"append",
"(",
"error",
")",
"return",
"errors"
] | Sequentially apply each validator to value and collect errors.
:param value: a value to validate
:param model: parent entity
:param context: validation context, usually parent entity
:return: list of errors (if any) | [
"Sequentially",
"apply",
"each",
"validator",
"to",
"value",
"and",
"collect",
"errors",
"."
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L75-L97 | train |
projectshift/shift-schema | shiftschema/property.py | EntityProperty.filter_with_schema | def filter_with_schema(self, model=None, context=None):
""" Perform model filtering with schema """
if model is None or self.schema is None:
return
self._schema.filter(
model=model,
context=context if self.use_context else None
) | python | def filter_with_schema(self, model=None, context=None):
""" Perform model filtering with schema """
if model is None or self.schema is None:
return
self._schema.filter(
model=model,
context=context if self.use_context else None
) | [
"def",
"filter_with_schema",
"(",
"self",
",",
"model",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"model",
"is",
"None",
"or",
"self",
".",
"schema",
"is",
"None",
":",
"return",
"self",
".",
"_schema",
".",
"filter",
"(",
"model",
"=",
"model",
",",
"context",
"=",
"context",
"if",
"self",
".",
"use_context",
"else",
"None",
")"
] | Perform model filtering with schema | [
"Perform",
"model",
"filtering",
"with",
"schema"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L125-L133 | train |
projectshift/shift-schema | shiftschema/property.py | EntityProperty.validate_with_schema | def validate_with_schema(self, model=None, context=None):
""" Perform model validation with schema"""
if self._schema is None or model is None:
return
result = self._schema.validate(
model=model,
context=context if self.use_context else None
)
return result | python | def validate_with_schema(self, model=None, context=None):
""" Perform model validation with schema"""
if self._schema is None or model is None:
return
result = self._schema.validate(
model=model,
context=context if self.use_context else None
)
return result | [
"def",
"validate_with_schema",
"(",
"self",
",",
"model",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"self",
".",
"_schema",
"is",
"None",
"or",
"model",
"is",
"None",
":",
"return",
"result",
"=",
"self",
".",
"_schema",
".",
"validate",
"(",
"model",
"=",
"model",
",",
"context",
"=",
"context",
"if",
"self",
".",
"use_context",
"else",
"None",
")",
"return",
"result"
] | Perform model validation with schema | [
"Perform",
"model",
"validation",
"with",
"schema"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L135-L144 | train |
projectshift/shift-schema | shiftschema/property.py | CollectionProperty.filter_with_schema | def filter_with_schema(self, collection=None, context=None):
""" Perform collection items filtering with schema """
if collection is None or self.schema is None:
return
try:
for item in collection:
self._schema.filter(
model=item,
context=context if self.use_context else None
)
except TypeError:
pass | python | def filter_with_schema(self, collection=None, context=None):
""" Perform collection items filtering with schema """
if collection is None or self.schema is None:
return
try:
for item in collection:
self._schema.filter(
model=item,
context=context if self.use_context else None
)
except TypeError:
pass | [
"def",
"filter_with_schema",
"(",
"self",
",",
"collection",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"collection",
"is",
"None",
"or",
"self",
".",
"schema",
"is",
"None",
":",
"return",
"try",
":",
"for",
"item",
"in",
"collection",
":",
"self",
".",
"_schema",
".",
"filter",
"(",
"model",
"=",
"item",
",",
"context",
"=",
"context",
"if",
"self",
".",
"use_context",
"else",
"None",
")",
"except",
"TypeError",
":",
"pass"
] | Perform collection items filtering with schema | [
"Perform",
"collection",
"items",
"filtering",
"with",
"schema"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L155-L167 | train |
projectshift/shift-schema | shiftschema/property.py | CollectionProperty.validate_with_schema | def validate_with_schema(self, collection=None, context=None):
""" Validate each item in collection with our schema"""
if self._schema is None or not collection:
return
result = []
try:
for index, item in enumerate(collection):
item_result = self._schema.validate(
model=item,
context=context if self.use_context else None
)
result.append(item_result)
except TypeError:
pass
return result | python | def validate_with_schema(self, collection=None, context=None):
""" Validate each item in collection with our schema"""
if self._schema is None or not collection:
return
result = []
try:
for index, item in enumerate(collection):
item_result = self._schema.validate(
model=item,
context=context if self.use_context else None
)
result.append(item_result)
except TypeError:
pass
return result | [
"def",
"validate_with_schema",
"(",
"self",
",",
"collection",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"self",
".",
"_schema",
"is",
"None",
"or",
"not",
"collection",
":",
"return",
"result",
"=",
"[",
"]",
"try",
":",
"for",
"index",
",",
"item",
"in",
"enumerate",
"(",
"collection",
")",
":",
"item_result",
"=",
"self",
".",
"_schema",
".",
"validate",
"(",
"model",
"=",
"item",
",",
"context",
"=",
"context",
"if",
"self",
".",
"use_context",
"else",
"None",
")",
"result",
".",
"append",
"(",
"item_result",
")",
"except",
"TypeError",
":",
"pass",
"return",
"result"
] | Validate each item in collection with our schema | [
"Validate",
"each",
"item",
"in",
"collection",
"with",
"our",
"schema"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/property.py#L169-L185 | train |
shaypal5/strct | strct/hash/_hash.py | json_based_stable_hash | def json_based_stable_hash(obj):
"""Computes a cross-kernel stable hash value for the given object.
The supported data structure are the built-in list, tuple and dict types.
Any included tuple or list, whether outer or nested, may only contain
values of the following built-in types: bool, int, float, complex, str,
list, tuple and dict.
Any included dict, whether outer or nested, may only contain keys of a
single type, which can be one the following built-in types: bool, int,
float, str, and may only contain values of only the following built-in
types: bool, int, float, complex, str, list, tuple, dict.
Parameters
---------
obj : bool/int/float/complex/str/dict/list/tuple
The object for which to compute a hash value.
Returns
-------
int
The computed hash value.
"""
encoded_str = json.dumps(
obj=obj,
skipkeys=False,
ensure_ascii=False,
check_circular=True,
allow_nan=True,
cls=None,
indent=0,
separators=(',', ':'),
default=None,
sort_keys=True,
).encode('utf-8')
return hashlib.sha256(encoded_str).hexdigest() | python | def json_based_stable_hash(obj):
"""Computes a cross-kernel stable hash value for the given object.
The supported data structure are the built-in list, tuple and dict types.
Any included tuple or list, whether outer or nested, may only contain
values of the following built-in types: bool, int, float, complex, str,
list, tuple and dict.
Any included dict, whether outer or nested, may only contain keys of a
single type, which can be one the following built-in types: bool, int,
float, str, and may only contain values of only the following built-in
types: bool, int, float, complex, str, list, tuple, dict.
Parameters
---------
obj : bool/int/float/complex/str/dict/list/tuple
The object for which to compute a hash value.
Returns
-------
int
The computed hash value.
"""
encoded_str = json.dumps(
obj=obj,
skipkeys=False,
ensure_ascii=False,
check_circular=True,
allow_nan=True,
cls=None,
indent=0,
separators=(',', ':'),
default=None,
sort_keys=True,
).encode('utf-8')
return hashlib.sha256(encoded_str).hexdigest() | [
"def",
"json_based_stable_hash",
"(",
"obj",
")",
":",
"encoded_str",
"=",
"json",
".",
"dumps",
"(",
"obj",
"=",
"obj",
",",
"skipkeys",
"=",
"False",
",",
"ensure_ascii",
"=",
"False",
",",
"check_circular",
"=",
"True",
",",
"allow_nan",
"=",
"True",
",",
"cls",
"=",
"None",
",",
"indent",
"=",
"0",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
",",
"default",
"=",
"None",
",",
"sort_keys",
"=",
"True",
",",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"hashlib",
".",
"sha256",
"(",
"encoded_str",
")",
".",
"hexdigest",
"(",
")"
] | Computes a cross-kernel stable hash value for the given object.
The supported data structure are the built-in list, tuple and dict types.
Any included tuple or list, whether outer or nested, may only contain
values of the following built-in types: bool, int, float, complex, str,
list, tuple and dict.
Any included dict, whether outer or nested, may only contain keys of a
single type, which can be one the following built-in types: bool, int,
float, str, and may only contain values of only the following built-in
types: bool, int, float, complex, str, list, tuple, dict.
Parameters
---------
obj : bool/int/float/complex/str/dict/list/tuple
The object for which to compute a hash value.
Returns
-------
int
The computed hash value. | [
"Computes",
"a",
"cross",
"-",
"kernel",
"stable",
"hash",
"value",
"for",
"the",
"given",
"object",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/hash/_hash.py#L86-L122 | train |
a1ezzz/wasp-general | wasp_general/network/web/session.py | WWebSessionBase.read_request_line | def read_request_line(self, request_line):
""" Read HTTP-request line
:param request_line: line to parse
for HTTP/0.9 is GET <Request-URI>
for HTTP/1.0 and 1.1 is <METHOD> <Request-URI> HTTP/<HTTP-Version>, where HTTP-Version is 1.0
or 1.1.
for HTTP/2: binary headers are used
"""
request = self.__request_cls.parse_request_line(self, request_line)
protocol_version = self.protocol_version()
if protocol_version == '0.9':
if request.method() != 'GET':
raise Exception('HTTP/0.9 standard violation')
elif protocol_version == '1.0' or protocol_version == '1.1':
pass
elif protocol_version == '2':
pass
else:
raise RuntimeError('Unsupported HTTP-protocol') | python | def read_request_line(self, request_line):
""" Read HTTP-request line
:param request_line: line to parse
for HTTP/0.9 is GET <Request-URI>
for HTTP/1.0 and 1.1 is <METHOD> <Request-URI> HTTP/<HTTP-Version>, where HTTP-Version is 1.0
or 1.1.
for HTTP/2: binary headers are used
"""
request = self.__request_cls.parse_request_line(self, request_line)
protocol_version = self.protocol_version()
if protocol_version == '0.9':
if request.method() != 'GET':
raise Exception('HTTP/0.9 standard violation')
elif protocol_version == '1.0' or protocol_version == '1.1':
pass
elif protocol_version == '2':
pass
else:
raise RuntimeError('Unsupported HTTP-protocol') | [
"def",
"read_request_line",
"(",
"self",
",",
"request_line",
")",
":",
"request",
"=",
"self",
".",
"__request_cls",
".",
"parse_request_line",
"(",
"self",
",",
"request_line",
")",
"protocol_version",
"=",
"self",
".",
"protocol_version",
"(",
")",
"if",
"protocol_version",
"==",
"'0.9'",
":",
"if",
"request",
".",
"method",
"(",
")",
"!=",
"'GET'",
":",
"raise",
"Exception",
"(",
"'HTTP/0.9 standard violation'",
")",
"elif",
"protocol_version",
"==",
"'1.0'",
"or",
"protocol_version",
"==",
"'1.1'",
":",
"pass",
"elif",
"protocol_version",
"==",
"'2'",
":",
"pass",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Unsupported HTTP-protocol'",
")"
] | Read HTTP-request line
:param request_line: line to parse
for HTTP/0.9 is GET <Request-URI>
for HTTP/1.0 and 1.1 is <METHOD> <Request-URI> HTTP/<HTTP-Version>, where HTTP-Version is 1.0
or 1.1.
for HTTP/2: binary headers are used | [
"Read",
"HTTP",
"-",
"request",
"line"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/session.py#L67-L88 | train |
contains-io/typet | typet/meta.py | metaclass | def metaclass(*metaclasses):
# type: (*type) -> Callable[[type], type]
"""Create the class using all metaclasses.
Args:
metaclasses: A tuple of metaclasses that will be used to generate and
replace a specified class.
Returns:
A decorator that will recreate the class using the specified
metaclasses.
"""
def _inner(cls):
# pragma pylint: disable=unused-variable
metabases = tuple(
collections.OrderedDict( # noqa: F841
(c, None) for c in (metaclasses + (type(cls),))
).keys()
)
# pragma pylint: enable=unused-variable
_Meta = metabases[0]
for base in metabases[1:]:
class _Meta(base, _Meta): # pylint: disable=function-redefined
pass
return six.add_metaclass(_Meta)(cls)
return _inner | python | def metaclass(*metaclasses):
# type: (*type) -> Callable[[type], type]
"""Create the class using all metaclasses.
Args:
metaclasses: A tuple of metaclasses that will be used to generate and
replace a specified class.
Returns:
A decorator that will recreate the class using the specified
metaclasses.
"""
def _inner(cls):
# pragma pylint: disable=unused-variable
metabases = tuple(
collections.OrderedDict( # noqa: F841
(c, None) for c in (metaclasses + (type(cls),))
).keys()
)
# pragma pylint: enable=unused-variable
_Meta = metabases[0]
for base in metabases[1:]:
class _Meta(base, _Meta): # pylint: disable=function-redefined
pass
return six.add_metaclass(_Meta)(cls)
return _inner | [
"def",
"metaclass",
"(",
"*",
"metaclasses",
")",
":",
"# type: (*type) -> Callable[[type], type]",
"def",
"_inner",
"(",
"cls",
")",
":",
"# pragma pylint: disable=unused-variable",
"metabases",
"=",
"tuple",
"(",
"collections",
".",
"OrderedDict",
"(",
"# noqa: F841",
"(",
"c",
",",
"None",
")",
"for",
"c",
"in",
"(",
"metaclasses",
"+",
"(",
"type",
"(",
"cls",
")",
",",
")",
")",
")",
".",
"keys",
"(",
")",
")",
"# pragma pylint: enable=unused-variable",
"_Meta",
"=",
"metabases",
"[",
"0",
"]",
"for",
"base",
"in",
"metabases",
"[",
"1",
":",
"]",
":",
"class",
"_Meta",
"(",
"base",
",",
"_Meta",
")",
":",
"# pylint: disable=function-redefined",
"pass",
"return",
"six",
".",
"add_metaclass",
"(",
"_Meta",
")",
"(",
"cls",
")",
"return",
"_inner"
] | Create the class using all metaclasses.
Args:
metaclasses: A tuple of metaclasses that will be used to generate and
replace a specified class.
Returns:
A decorator that will recreate the class using the specified
metaclasses. | [
"Create",
"the",
"class",
"using",
"all",
"metaclasses",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/meta.py#L38-L67 | train |
vecnet/vecnet.openmalaria | vecnet/openmalaria/scenario/interventions.py | ITN.get_attrition_in_years | def get_attrition_in_years(self):
"""
Function for the Basic UI
"""
attrition_of_nets = self.itn.find("attritionOfNets")
function = attrition_of_nets.attrib["function"]
if function != "step":
return None
L = attrition_of_nets.attrib["L"]
return L | python | def get_attrition_in_years(self):
"""
Function for the Basic UI
"""
attrition_of_nets = self.itn.find("attritionOfNets")
function = attrition_of_nets.attrib["function"]
if function != "step":
return None
L = attrition_of_nets.attrib["L"]
return L | [
"def",
"get_attrition_in_years",
"(",
"self",
")",
":",
"attrition_of_nets",
"=",
"self",
".",
"itn",
".",
"find",
"(",
"\"attritionOfNets\"",
")",
"function",
"=",
"attrition_of_nets",
".",
"attrib",
"[",
"\"function\"",
"]",
"if",
"function",
"!=",
"\"step\"",
":",
"return",
"None",
"L",
"=",
"attrition_of_nets",
".",
"attrib",
"[",
"\"L\"",
"]",
"return",
"L"
] | Function for the Basic UI | [
"Function",
"for",
"the",
"Basic",
"UI"
] | 795bc9d1b81a6c664f14879edda7a7c41188e95a | https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/interventions.py#L374-L383 | train |
vecnet/vecnet.openmalaria | vecnet/openmalaria/scenario/interventions.py | VectorPop.add | def add(self, intervention, name=None):
"""
Add an intervention to vectorPop section.
intervention is either ElementTree or xml snippet
"""
if self.et is None:
return
assert isinstance(intervention, six.string_types)
et = ElementTree.fromstring(intervention)
vector_pop = VectorPopIntervention(et)
assert isinstance(vector_pop.name, six.string_types)
if name is not None:
assert isinstance(name, six.string_types)
et.attrib["name"] = name
index = len(self.et.findall("intervention"))
self.et.insert(index, et) | python | def add(self, intervention, name=None):
"""
Add an intervention to vectorPop section.
intervention is either ElementTree or xml snippet
"""
if self.et is None:
return
assert isinstance(intervention, six.string_types)
et = ElementTree.fromstring(intervention)
vector_pop = VectorPopIntervention(et)
assert isinstance(vector_pop.name, six.string_types)
if name is not None:
assert isinstance(name, six.string_types)
et.attrib["name"] = name
index = len(self.et.findall("intervention"))
self.et.insert(index, et) | [
"def",
"add",
"(",
"self",
",",
"intervention",
",",
"name",
"=",
"None",
")",
":",
"if",
"self",
".",
"et",
"is",
"None",
":",
"return",
"assert",
"isinstance",
"(",
"intervention",
",",
"six",
".",
"string_types",
")",
"et",
"=",
"ElementTree",
".",
"fromstring",
"(",
"intervention",
")",
"vector_pop",
"=",
"VectorPopIntervention",
"(",
"et",
")",
"assert",
"isinstance",
"(",
"vector_pop",
".",
"name",
",",
"six",
".",
"string_types",
")",
"if",
"name",
"is",
"not",
"None",
":",
"assert",
"isinstance",
"(",
"name",
",",
"six",
".",
"string_types",
")",
"et",
".",
"attrib",
"[",
"\"name\"",
"]",
"=",
"name",
"index",
"=",
"len",
"(",
"self",
".",
"et",
".",
"findall",
"(",
"\"intervention\"",
")",
")",
"self",
".",
"et",
".",
"insert",
"(",
"index",
",",
"et",
")"
] | Add an intervention to vectorPop section.
intervention is either ElementTree or xml snippet | [
"Add",
"an",
"intervention",
"to",
"vectorPop",
"section",
".",
"intervention",
"is",
"either",
"ElementTree",
"or",
"xml",
"snippet"
] | 795bc9d1b81a6c664f14879edda7a7c41188e95a | https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/interventions.py#L1046-L1065 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleHistory.add | def add(self, value):
""" Add new record to history. Record will be added to the end
:param value: new record
:return: int record position in history
"""
index = len(self.__history)
self.__history.append(value)
return index | python | def add(self, value):
""" Add new record to history. Record will be added to the end
:param value: new record
:return: int record position in history
"""
index = len(self.__history)
self.__history.append(value)
return index | [
"def",
"add",
"(",
"self",
",",
"value",
")",
":",
"index",
"=",
"len",
"(",
"self",
".",
"__history",
")",
"self",
".",
"__history",
".",
"append",
"(",
"value",
")",
"return",
"index"
] | Add new record to history. Record will be added to the end
:param value: new record
:return: int record position in history | [
"Add",
"new",
"record",
"to",
"history",
".",
"Record",
"will",
"be",
"added",
"to",
"the",
"end"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L69-L77 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleProto.start_session | def start_session(self):
""" Start new session and prepare environment for new row editing process
:return: None
"""
self.__current_row = ''
self.__history_mode = False
self.__editable_history = deepcopy(self.__history)
self.__prompt_show = True
self.refresh_window() | python | def start_session(self):
""" Start new session and prepare environment for new row editing process
:return: None
"""
self.__current_row = ''
self.__history_mode = False
self.__editable_history = deepcopy(self.__history)
self.__prompt_show = True
self.refresh_window() | [
"def",
"start_session",
"(",
"self",
")",
":",
"self",
".",
"__current_row",
"=",
"''",
"self",
".",
"__history_mode",
"=",
"False",
"self",
".",
"__editable_history",
"=",
"deepcopy",
"(",
"self",
".",
"__history",
")",
"self",
".",
"__prompt_show",
"=",
"True",
"self",
".",
"refresh_window",
"(",
")"
] | Start new session and prepare environment for new row editing process
:return: None | [
"Start",
"new",
"session",
"and",
"prepare",
"environment",
"for",
"new",
"row",
"editing",
"process"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L139-L148 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleProto.fin_session | def fin_session(self):
""" Finalize current session
:return: None
"""
self.__prompt_show = False
self.__history.add(self.row())
self.exec(self.row()) | python | def fin_session(self):
""" Finalize current session
:return: None
"""
self.__prompt_show = False
self.__history.add(self.row())
self.exec(self.row()) | [
"def",
"fin_session",
"(",
"self",
")",
":",
"self",
".",
"__prompt_show",
"=",
"False",
"self",
".",
"__history",
".",
"add",
"(",
"self",
".",
"row",
"(",
")",
")",
"self",
".",
"exec",
"(",
"self",
".",
"row",
"(",
")",
")"
] | Finalize current session
:return: None | [
"Finalize",
"current",
"session"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L150-L157 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleWindowProto.data | def data(
self, previous_data=False, prompt=False, console_row=False,
console_row_to_cursor=False, console_row_from_cursor=False
):
""" Return output data. Flags specifies what data to append. If no flags was specified
nul-length string returned
:param previous_data: If True, then previous output appends
:param prompt: If True, then console prompt appends. If console_row or console_row_to_cursor is True, \
then this value is omitted
:param console_row: If True, then console prompt and current input appends.
:param console_row_to_cursor: If True, then console prompt and current input till cursor appends. \
If console_row is True, then this value is omitted
:param console_row_from_cursor: If True, then current input from cursor appends. \
If console_row is True, then this value is omitted
:return: str
"""
result = ''
if previous_data:
result += self.__previous_data
if prompt or console_row or console_row_to_cursor:
result += self.console().prompt()
if console_row or (console_row_from_cursor and console_row_to_cursor):
result += self.console().row()
elif console_row_to_cursor:
result += self.console().row()[:self.cursor()]
elif console_row_from_cursor:
result += self.console().row()[self.cursor():]
return result | python | def data(
self, previous_data=False, prompt=False, console_row=False,
console_row_to_cursor=False, console_row_from_cursor=False
):
""" Return output data. Flags specifies what data to append. If no flags was specified
nul-length string returned
:param previous_data: If True, then previous output appends
:param prompt: If True, then console prompt appends. If console_row or console_row_to_cursor is True, \
then this value is omitted
:param console_row: If True, then console prompt and current input appends.
:param console_row_to_cursor: If True, then console prompt and current input till cursor appends. \
If console_row is True, then this value is omitted
:param console_row_from_cursor: If True, then current input from cursor appends. \
If console_row is True, then this value is omitted
:return: str
"""
result = ''
if previous_data:
result += self.__previous_data
if prompt or console_row or console_row_to_cursor:
result += self.console().prompt()
if console_row or (console_row_from_cursor and console_row_to_cursor):
result += self.console().row()
elif console_row_to_cursor:
result += self.console().row()[:self.cursor()]
elif console_row_from_cursor:
result += self.console().row()[self.cursor():]
return result | [
"def",
"data",
"(",
"self",
",",
"previous_data",
"=",
"False",
",",
"prompt",
"=",
"False",
",",
"console_row",
"=",
"False",
",",
"console_row_to_cursor",
"=",
"False",
",",
"console_row_from_cursor",
"=",
"False",
")",
":",
"result",
"=",
"''",
"if",
"previous_data",
":",
"result",
"+=",
"self",
".",
"__previous_data",
"if",
"prompt",
"or",
"console_row",
"or",
"console_row_to_cursor",
":",
"result",
"+=",
"self",
".",
"console",
"(",
")",
".",
"prompt",
"(",
")",
"if",
"console_row",
"or",
"(",
"console_row_from_cursor",
"and",
"console_row_to_cursor",
")",
":",
"result",
"+=",
"self",
".",
"console",
"(",
")",
".",
"row",
"(",
")",
"elif",
"console_row_to_cursor",
":",
"result",
"+=",
"self",
".",
"console",
"(",
")",
".",
"row",
"(",
")",
"[",
":",
"self",
".",
"cursor",
"(",
")",
"]",
"elif",
"console_row_from_cursor",
":",
"result",
"+=",
"self",
".",
"console",
"(",
")",
".",
"row",
"(",
")",
"[",
"self",
".",
"cursor",
"(",
")",
":",
"]",
"return",
"result"
] | Return output data. Flags specifies what data to append. If no flags was specified
nul-length string returned
:param previous_data: If True, then previous output appends
:param prompt: If True, then console prompt appends. If console_row or console_row_to_cursor is True, \
then this value is omitted
:param console_row: If True, then console prompt and current input appends.
:param console_row_to_cursor: If True, then console prompt and current input till cursor appends. \
If console_row is True, then this value is omitted
:param console_row_from_cursor: If True, then current input from cursor appends. \
If console_row is True, then this value is omitted
:return: str | [
"Return",
"output",
"data",
".",
"Flags",
"specifies",
"what",
"data",
"to",
"append",
".",
"If",
"no",
"flags",
"was",
"specified",
"nul",
"-",
"length",
"string",
"returned"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L299-L332 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleWindowProto.write_data | def write_data(self, data, start_position=0):
""" Write data from the specified line
:param data: string to write, each one on new line
:param start_position: starting line
:return:
"""
if len(data) > self.height():
raise ValueError('Data too long (too many strings)')
for i in range(len(data)):
self.write_line(start_position + i, data[i]) | python | def write_data(self, data, start_position=0):
""" Write data from the specified line
:param data: string to write, each one on new line
:param start_position: starting line
:return:
"""
if len(data) > self.height():
raise ValueError('Data too long (too many strings)')
for i in range(len(data)):
self.write_line(start_position + i, data[i]) | [
"def",
"write_data",
"(",
"self",
",",
"data",
",",
"start_position",
"=",
"0",
")",
":",
"if",
"len",
"(",
"data",
")",
">",
"self",
".",
"height",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Data too long (too many strings)'",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
")",
")",
":",
"self",
".",
"write_line",
"(",
"start_position",
"+",
"i",
",",
"data",
"[",
"i",
"]",
")"
] | Write data from the specified line
:param data: string to write, each one on new line
:param start_position: starting line
:return: | [
"Write",
"data",
"from",
"the",
"specified",
"line"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L359-L370 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleWindowProto.write_feedback | def write_feedback(self, feedback, cr=True):
""" Store feedback. Keep specified feedback as previous output
:param feedback: data to store
:param cr: whether to write carriage return to the end or not
:return: None
"""
self.__previous_data += feedback
if cr is True:
self.__previous_data += '\n' | python | def write_feedback(self, feedback, cr=True):
""" Store feedback. Keep specified feedback as previous output
:param feedback: data to store
:param cr: whether to write carriage return to the end or not
:return: None
"""
self.__previous_data += feedback
if cr is True:
self.__previous_data += '\n' | [
"def",
"write_feedback",
"(",
"self",
",",
"feedback",
",",
"cr",
"=",
"True",
")",
":",
"self",
".",
"__previous_data",
"+=",
"feedback",
"if",
"cr",
"is",
"True",
":",
"self",
".",
"__previous_data",
"+=",
"'\\n'"
] | Store feedback. Keep specified feedback as previous output
:param feedback: data to store
:param cr: whether to write carriage return to the end or not
:return: None | [
"Store",
"feedback",
".",
"Keep",
"specified",
"feedback",
"as",
"previous",
"output"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L421-L430 | train |
a1ezzz/wasp-general | wasp_general/cli/cli.py | WConsoleWindowBase.refresh | def refresh(self, prompt_show=True):
""" Refresh current window. Clear current window and redraw it with one of drawers
:param prompt_show: flag, that specifies, whether to show prompt and current row at the
windows end, or not
:return: None
"""
self.clear()
for drawer in self.__drawers:
if drawer.suitable(self, prompt_show=prompt_show):
drawer.draw(self, prompt_show=prompt_show)
return
raise RuntimeError('No suitable drawer was found') | python | def refresh(self, prompt_show=True):
""" Refresh current window. Clear current window and redraw it with one of drawers
:param prompt_show: flag, that specifies, whether to show prompt and current row at the
windows end, or not
:return: None
"""
self.clear()
for drawer in self.__drawers:
if drawer.suitable(self, prompt_show=prompt_show):
drawer.draw(self, prompt_show=prompt_show)
return
raise RuntimeError('No suitable drawer was found') | [
"def",
"refresh",
"(",
"self",
",",
"prompt_show",
"=",
"True",
")",
":",
"self",
".",
"clear",
"(",
")",
"for",
"drawer",
"in",
"self",
".",
"__drawers",
":",
"if",
"drawer",
".",
"suitable",
"(",
"self",
",",
"prompt_show",
"=",
"prompt_show",
")",
":",
"drawer",
".",
"draw",
"(",
"self",
",",
"prompt_show",
"=",
"prompt_show",
")",
"return",
"raise",
"RuntimeError",
"(",
"'No suitable drawer was found'",
")"
] | Refresh current window. Clear current window and redraw it with one of drawers
:param prompt_show: flag, that specifies, whether to show prompt and current row at the
windows end, or not
:return: None | [
"Refresh",
"current",
"window",
".",
"Clear",
"current",
"window",
"and",
"redraw",
"it",
"with",
"one",
"of",
"drawers"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/cli/cli.py#L484-L498 | train |
contains-io/typet | typet/path.py | is_dir | def is_dir(path):
"""Determine if a Path or string is a directory on the file system."""
try:
return path.expanduser().absolute().is_dir()
except AttributeError:
return os.path.isdir(os.path.abspath(os.path.expanduser(str(path)))) | python | def is_dir(path):
"""Determine if a Path or string is a directory on the file system."""
try:
return path.expanduser().absolute().is_dir()
except AttributeError:
return os.path.isdir(os.path.abspath(os.path.expanduser(str(path)))) | [
"def",
"is_dir",
"(",
"path",
")",
":",
"try",
":",
"return",
"path",
".",
"expanduser",
"(",
")",
".",
"absolute",
"(",
")",
".",
"is_dir",
"(",
")",
"except",
"AttributeError",
":",
"return",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"str",
"(",
"path",
")",
")",
")",
")"
] | Determine if a Path or string is a directory on the file system. | [
"Determine",
"if",
"a",
"Path",
"or",
"string",
"is",
"a",
"directory",
"on",
"the",
"file",
"system",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/path.py#L30-L35 | train |
contains-io/typet | typet/path.py | is_file | def is_file(path):
"""Determine if a Path or string is a file on the file system."""
try:
return path.expanduser().absolute().is_file()
except AttributeError:
return os.path.isfile(os.path.abspath(os.path.expanduser(str(path)))) | python | def is_file(path):
"""Determine if a Path or string is a file on the file system."""
try:
return path.expanduser().absolute().is_file()
except AttributeError:
return os.path.isfile(os.path.abspath(os.path.expanduser(str(path)))) | [
"def",
"is_file",
"(",
"path",
")",
":",
"try",
":",
"return",
"path",
".",
"expanduser",
"(",
")",
".",
"absolute",
"(",
")",
".",
"is_file",
"(",
")",
"except",
"AttributeError",
":",
"return",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"str",
"(",
"path",
")",
")",
")",
")"
] | Determine if a Path or string is a file on the file system. | [
"Determine",
"if",
"a",
"Path",
"or",
"string",
"is",
"a",
"file",
"on",
"the",
"file",
"system",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/path.py#L38-L43 | train |
contains-io/typet | typet/path.py | exists | def exists(path):
"""Determine if a Path or string is an existing path on the file system."""
try:
return path.expanduser().absolute().exists()
except AttributeError:
return os.path.exists(os.path.abspath(os.path.expanduser(str(path)))) | python | def exists(path):
"""Determine if a Path or string is an existing path on the file system."""
try:
return path.expanduser().absolute().exists()
except AttributeError:
return os.path.exists(os.path.abspath(os.path.expanduser(str(path)))) | [
"def",
"exists",
"(",
"path",
")",
":",
"try",
":",
"return",
"path",
".",
"expanduser",
"(",
")",
".",
"absolute",
"(",
")",
".",
"exists",
"(",
")",
"except",
"AttributeError",
":",
"return",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"str",
"(",
"path",
")",
")",
")",
")"
] | Determine if a Path or string is an existing path on the file system. | [
"Determine",
"if",
"a",
"Path",
"or",
"string",
"is",
"an",
"existing",
"path",
"on",
"the",
"file",
"system",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/path.py#L46-L51 | train |
olitheolix/qtmacs | qtmacs/extensions/qtmacstextedit_macros.py | YankPop.enableHook | def enableHook(self, msgObj):
"""
Enable yank-pop.
This method is connected to the 'yank-qtmacs_text_edit' hook
(triggered by the yank macro) to ensure that yank-pop only
gets activated afterwards.
"""
self.killListIdx = len(qte_global.kill_list) - 2
self.qteMain.qtesigKeyseqComplete.connect(self.disableHook) | python | def enableHook(self, msgObj):
"""
Enable yank-pop.
This method is connected to the 'yank-qtmacs_text_edit' hook
(triggered by the yank macro) to ensure that yank-pop only
gets activated afterwards.
"""
self.killListIdx = len(qte_global.kill_list) - 2
self.qteMain.qtesigKeyseqComplete.connect(self.disableHook) | [
"def",
"enableHook",
"(",
"self",
",",
"msgObj",
")",
":",
"self",
".",
"killListIdx",
"=",
"len",
"(",
"qte_global",
".",
"kill_list",
")",
"-",
"2",
"self",
".",
"qteMain",
".",
"qtesigKeyseqComplete",
".",
"connect",
"(",
"self",
".",
"disableHook",
")"
] | Enable yank-pop.
This method is connected to the 'yank-qtmacs_text_edit' hook
(triggered by the yank macro) to ensure that yank-pop only
gets activated afterwards. | [
"Enable",
"yank",
"-",
"pop",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacstextedit_macros.py#L483-L492 | train |
olitheolix/qtmacs | qtmacs/extensions/qtmacstextedit_macros.py | BracketMatching.cursorPositionChangedEvent | def cursorPositionChangedEvent(self):
"""
Update the highlighting.
This is an overloaded version of the native Qt slot of
``QTextEdit``.
In this class, the purpose of this slot is to check if the
character to the right of the cursor needs highlighting,
assuming there is a second character to pair with it.
|Args|
* **None**
|Returns|
* **None**
|Raises|
* **None**
"""
# Determine the sender and cursor position.
qteWidget = self.sender()
tc = qteWidget.textCursor()
origin = tc.position()
# Remove all the highlighting. Since this will move the
# cursor, first disconnect this very routine to avoid an
# infinite recursion.
qteWidget.cursorPositionChanged.disconnect(
self.cursorPositionChangedEvent)
self.qteRemoveHighlighting(qteWidget)
qteWidget.cursorPositionChanged.connect(
self.cursorPositionChangedEvent)
# If we are beyond the last character (for instance because
# the cursor was explicitly moved to the end of the buffer)
# then there is no character to the right and will result in
# an error when trying to fetch it.
if origin >= len(qteWidget.toPlainText()):
return
else:
# It is save to retrieve the character to the right of the
# cursor.
char = qteWidget.toPlainText()[origin]
# Return if the character is not in the matching list.
if char not in self.charToHighlight:
return
# Disconnect the 'cursorPositionChanged' signal from this
# function because it will make changes to the cursor position
# and would therefore immediately trigger itself, resulting in
# an infinite recursion.
qteWidget.cursorPositionChanged.disconnect(
self.cursorPositionChangedEvent)
# If we got until here "char" must be one of the two
# characters to highlight.
if char == self.charToHighlight[0]:
start = origin
# Found the first character, so now look for the second
# one. If this second character does not exist the
# function returns '-1' which is safe because the
# ``self.highlightCharacter`` method can deal with this.
stop = qteWidget.toPlainText().find(self.charToHighlight[1],
start + 1)
else:
# Found the second character so the start index is indeed
# the stop index.
stop = origin
# Search for the preceeding first character.
start = qteWidget.toPlainText().rfind(self.charToHighlight[0],
0, stop)
# Highlight the characters.
oldCharFormats = self.highlightCharacters(qteWidget, (start, stop),
QtCore.Qt.blue, 100)
# Store the positions of the changed character in the
# macroData structure of this widget.
data = self.qteMacroData(qteWidget)
data.matchingPositions = (start, stop)
data.oldCharFormats = oldCharFormats
self.qteSaveMacroData(data, qteWidget)
# Reconnect the 'cursorPositionChanged' signal.
qteWidget.cursorPositionChanged.connect(
self.cursorPositionChangedEvent) | python | def cursorPositionChangedEvent(self):
"""
Update the highlighting.
This is an overloaded version of the native Qt slot of
``QTextEdit``.
In this class, the purpose of this slot is to check if the
character to the right of the cursor needs highlighting,
assuming there is a second character to pair with it.
|Args|
* **None**
|Returns|
* **None**
|Raises|
* **None**
"""
# Determine the sender and cursor position.
qteWidget = self.sender()
tc = qteWidget.textCursor()
origin = tc.position()
# Remove all the highlighting. Since this will move the
# cursor, first disconnect this very routine to avoid an
# infinite recursion.
qteWidget.cursorPositionChanged.disconnect(
self.cursorPositionChangedEvent)
self.qteRemoveHighlighting(qteWidget)
qteWidget.cursorPositionChanged.connect(
self.cursorPositionChangedEvent)
# If we are beyond the last character (for instance because
# the cursor was explicitly moved to the end of the buffer)
# then there is no character to the right and will result in
# an error when trying to fetch it.
if origin >= len(qteWidget.toPlainText()):
return
else:
# It is save to retrieve the character to the right of the
# cursor.
char = qteWidget.toPlainText()[origin]
# Return if the character is not in the matching list.
if char not in self.charToHighlight:
return
# Disconnect the 'cursorPositionChanged' signal from this
# function because it will make changes to the cursor position
# and would therefore immediately trigger itself, resulting in
# an infinite recursion.
qteWidget.cursorPositionChanged.disconnect(
self.cursorPositionChangedEvent)
# If we got until here "char" must be one of the two
# characters to highlight.
if char == self.charToHighlight[0]:
start = origin
# Found the first character, so now look for the second
# one. If this second character does not exist the
# function returns '-1' which is safe because the
# ``self.highlightCharacter`` method can deal with this.
stop = qteWidget.toPlainText().find(self.charToHighlight[1],
start + 1)
else:
# Found the second character so the start index is indeed
# the stop index.
stop = origin
# Search for the preceeding first character.
start = qteWidget.toPlainText().rfind(self.charToHighlight[0],
0, stop)
# Highlight the characters.
oldCharFormats = self.highlightCharacters(qteWidget, (start, stop),
QtCore.Qt.blue, 100)
# Store the positions of the changed character in the
# macroData structure of this widget.
data = self.qteMacroData(qteWidget)
data.matchingPositions = (start, stop)
data.oldCharFormats = oldCharFormats
self.qteSaveMacroData(data, qteWidget)
# Reconnect the 'cursorPositionChanged' signal.
qteWidget.cursorPositionChanged.connect(
self.cursorPositionChangedEvent) | [
"def",
"cursorPositionChangedEvent",
"(",
"self",
")",
":",
"# Determine the sender and cursor position.",
"qteWidget",
"=",
"self",
".",
"sender",
"(",
")",
"tc",
"=",
"qteWidget",
".",
"textCursor",
"(",
")",
"origin",
"=",
"tc",
".",
"position",
"(",
")",
"# Remove all the highlighting. Since this will move the",
"# cursor, first disconnect this very routine to avoid an",
"# infinite recursion.",
"qteWidget",
".",
"cursorPositionChanged",
".",
"disconnect",
"(",
"self",
".",
"cursorPositionChangedEvent",
")",
"self",
".",
"qteRemoveHighlighting",
"(",
"qteWidget",
")",
"qteWidget",
".",
"cursorPositionChanged",
".",
"connect",
"(",
"self",
".",
"cursorPositionChangedEvent",
")",
"# If we are beyond the last character (for instance because",
"# the cursor was explicitly moved to the end of the buffer)",
"# then there is no character to the right and will result in",
"# an error when trying to fetch it.",
"if",
"origin",
">=",
"len",
"(",
"qteWidget",
".",
"toPlainText",
"(",
")",
")",
":",
"return",
"else",
":",
"# It is save to retrieve the character to the right of the",
"# cursor.",
"char",
"=",
"qteWidget",
".",
"toPlainText",
"(",
")",
"[",
"origin",
"]",
"# Return if the character is not in the matching list.",
"if",
"char",
"not",
"in",
"self",
".",
"charToHighlight",
":",
"return",
"# Disconnect the 'cursorPositionChanged' signal from this",
"# function because it will make changes to the cursor position",
"# and would therefore immediately trigger itself, resulting in",
"# an infinite recursion.",
"qteWidget",
".",
"cursorPositionChanged",
".",
"disconnect",
"(",
"self",
".",
"cursorPositionChangedEvent",
")",
"# If we got until here \"char\" must be one of the two",
"# characters to highlight.",
"if",
"char",
"==",
"self",
".",
"charToHighlight",
"[",
"0",
"]",
":",
"start",
"=",
"origin",
"# Found the first character, so now look for the second",
"# one. If this second character does not exist the",
"# function returns '-1' which is safe because the",
"# ``self.highlightCharacter`` method can deal with this.",
"stop",
"=",
"qteWidget",
".",
"toPlainText",
"(",
")",
".",
"find",
"(",
"self",
".",
"charToHighlight",
"[",
"1",
"]",
",",
"start",
"+",
"1",
")",
"else",
":",
"# Found the second character so the start index is indeed",
"# the stop index.",
"stop",
"=",
"origin",
"# Search for the preceeding first character.",
"start",
"=",
"qteWidget",
".",
"toPlainText",
"(",
")",
".",
"rfind",
"(",
"self",
".",
"charToHighlight",
"[",
"0",
"]",
",",
"0",
",",
"stop",
")",
"# Highlight the characters.",
"oldCharFormats",
"=",
"self",
".",
"highlightCharacters",
"(",
"qteWidget",
",",
"(",
"start",
",",
"stop",
")",
",",
"QtCore",
".",
"Qt",
".",
"blue",
",",
"100",
")",
"# Store the positions of the changed character in the",
"# macroData structure of this widget.",
"data",
"=",
"self",
".",
"qteMacroData",
"(",
"qteWidget",
")",
"data",
".",
"matchingPositions",
"=",
"(",
"start",
",",
"stop",
")",
"data",
".",
"oldCharFormats",
"=",
"oldCharFormats",
"self",
".",
"qteSaveMacroData",
"(",
"data",
",",
"qteWidget",
")",
"# Reconnect the 'cursorPositionChanged' signal.",
"qteWidget",
".",
"cursorPositionChanged",
".",
"connect",
"(",
"self",
".",
"cursorPositionChangedEvent",
")"
] | Update the highlighting.
This is an overloaded version of the native Qt slot of
``QTextEdit``.
In this class, the purpose of this slot is to check if the
character to the right of the cursor needs highlighting,
assuming there is a second character to pair with it.
|Args|
* **None**
|Returns|
* **None**
|Raises|
* **None** | [
"Update",
"the",
"highlighting",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacstextedit_macros.py#L899-L991 | train |
olitheolix/qtmacs | qtmacs/extensions/qtmacstextedit_macros.py | BracketMatching.qteRemoveHighlighting | def qteRemoveHighlighting(self, widgetObj):
"""
Remove the highlighting from previously highlighted characters.
The method access instance variables to determine which
characters are currently highlighted and have to be converted
to non-highlighted ones.
|Args|
* ``widgetObj`` (**QWidget**): the ``QTextEdit`` to use.
|Returns|
* **None**
|Raises|
* **None**
"""
# Retrieve the widget specific macro data.
data = self.qteMacroData(widgetObj)
if not data:
return
# If the data structure is empty then no previously
# highlighted characters exist in this particular widget, so
# do nothing.
if not data.matchingPositions:
return
# Restore the original character formats, ie. undo the
# highlighting changes.
self.highlightCharacters(widgetObj, data.matchingPositions,
QtCore.Qt.black, 50, data.oldCharFormats)
# Clear the data structure to indicate that no further
# highlighted characters exist in this particular widget.
data.matchingPositions = None
data.oldCharFormats = None
self.qteSaveMacroData(data, widgetObj) | python | def qteRemoveHighlighting(self, widgetObj):
"""
Remove the highlighting from previously highlighted characters.
The method access instance variables to determine which
characters are currently highlighted and have to be converted
to non-highlighted ones.
|Args|
* ``widgetObj`` (**QWidget**): the ``QTextEdit`` to use.
|Returns|
* **None**
|Raises|
* **None**
"""
# Retrieve the widget specific macro data.
data = self.qteMacroData(widgetObj)
if not data:
return
# If the data structure is empty then no previously
# highlighted characters exist in this particular widget, so
# do nothing.
if not data.matchingPositions:
return
# Restore the original character formats, ie. undo the
# highlighting changes.
self.highlightCharacters(widgetObj, data.matchingPositions,
QtCore.Qt.black, 50, data.oldCharFormats)
# Clear the data structure to indicate that no further
# highlighted characters exist in this particular widget.
data.matchingPositions = None
data.oldCharFormats = None
self.qteSaveMacroData(data, widgetObj) | [
"def",
"qteRemoveHighlighting",
"(",
"self",
",",
"widgetObj",
")",
":",
"# Retrieve the widget specific macro data.",
"data",
"=",
"self",
".",
"qteMacroData",
"(",
"widgetObj",
")",
"if",
"not",
"data",
":",
"return",
"# If the data structure is empty then no previously",
"# highlighted characters exist in this particular widget, so",
"# do nothing.",
"if",
"not",
"data",
".",
"matchingPositions",
":",
"return",
"# Restore the original character formats, ie. undo the",
"# highlighting changes.",
"self",
".",
"highlightCharacters",
"(",
"widgetObj",
",",
"data",
".",
"matchingPositions",
",",
"QtCore",
".",
"Qt",
".",
"black",
",",
"50",
",",
"data",
".",
"oldCharFormats",
")",
"# Clear the data structure to indicate that no further",
"# highlighted characters exist in this particular widget.",
"data",
".",
"matchingPositions",
"=",
"None",
"data",
".",
"oldCharFormats",
"=",
"None",
"self",
".",
"qteSaveMacroData",
"(",
"data",
",",
"widgetObj",
")"
] | Remove the highlighting from previously highlighted characters.
The method access instance variables to determine which
characters are currently highlighted and have to be converted
to non-highlighted ones.
|Args|
* ``widgetObj`` (**QWidget**): the ``QTextEdit`` to use.
|Returns|
* **None**
|Raises|
* **None** | [
"Remove",
"the",
"highlighting",
"from",
"previously",
"highlighted",
"characters",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacstextedit_macros.py#L993-L1034 | train |
olitheolix/qtmacs | qtmacs/extensions/qtmacstextedit_macros.py | BracketMatching.highlightCharacters | def highlightCharacters(self, widgetObj, setPos, colorCode,
fontWeight, charFormat=None):
"""
Change the character format of one or more characters.
If ``charFormat`` is **None** then only the color and font
weight of the characters are changed to ``colorCode`` and
``fontWeight``, respectively.
|Args|
* ``widgetObj`` (**QWidget**): the ``QTextEdit`` holding
the characters.
* ``setPos`` (**tuple** of **int**): character positions
inside the widget.
* ``colorCode`` (**QColor**): eg. ``QtCore.Qt.blue``
* ``fontWeight`` (**int**): font weight.
* ``charFormat`` (**QTextCharFormat**): the character
format to apply (see Qt documentation for details.)
|Returns|
* **list**: the original character format of the replaced
characters. This list has the same length as ``setPos``.
|Raises|
* **None**
"""
# Get the text cursor and character format.
textCursor = widgetObj.textCursor()
oldPos = textCursor.position()
retVal = []
# Change the character formats of all the characters placed at
# the positions ``setPos``.
for ii, pos in enumerate(setPos):
# Extract the position of the character to modify.
pos = setPos[ii]
# Ignore invalid positions. This can happen if the second
# character does not exist and the find-functions in the
# ``cursorPositionChangedEvent`` method returned
# '-1'. Also, store **None** as the format for this
# non-existent character.
if pos < 0:
retVal.append(None)
continue
# Move the text cursor to the specified character position
# and store its original character format (necessary to
# "undo" the highlighting once the cursor was moved away
# again).
textCursor.setPosition(pos)
retVal.append(textCursor.charFormat())
# Change the character format. Either use the supplied
# one, or use a generic one.
if charFormat:
# Use a specific character format (usually used to
# undo the changes a previous call to
# 'highlightCharacters' has made).
fmt = charFormat[ii]
else:
# Modify the color and weight of the current character format.
fmt = textCursor.charFormat()
# Get the brush and specify its foreground color and
# style. In order to see the characters it is
# necessary to explicitly specify a solidPattern style
# but I have no idea why.
myBrush = fmt.foreground()
myBrush.setColor(colorCode)
myBrush.setStyle(QtCore.Qt.SolidPattern)
fmt.setForeground(myBrush)
fmt.setFontWeight(fontWeight)
# Select the character and apply the selected format.
textCursor.movePosition(QtGui.QTextCursor.NextCharacter,
QtGui.QTextCursor.KeepAnchor)
textCursor.setCharFormat(fmt)
# Apply the textcursor to the current element.
textCursor.setPosition(oldPos)
widgetObj.setTextCursor(textCursor)
return retVal | python | def highlightCharacters(self, widgetObj, setPos, colorCode,
fontWeight, charFormat=None):
"""
Change the character format of one or more characters.
If ``charFormat`` is **None** then only the color and font
weight of the characters are changed to ``colorCode`` and
``fontWeight``, respectively.
|Args|
* ``widgetObj`` (**QWidget**): the ``QTextEdit`` holding
the characters.
* ``setPos`` (**tuple** of **int**): character positions
inside the widget.
* ``colorCode`` (**QColor**): eg. ``QtCore.Qt.blue``
* ``fontWeight`` (**int**): font weight.
* ``charFormat`` (**QTextCharFormat**): the character
format to apply (see Qt documentation for details.)
|Returns|
* **list**: the original character format of the replaced
characters. This list has the same length as ``setPos``.
|Raises|
* **None**
"""
# Get the text cursor and character format.
textCursor = widgetObj.textCursor()
oldPos = textCursor.position()
retVal = []
# Change the character formats of all the characters placed at
# the positions ``setPos``.
for ii, pos in enumerate(setPos):
# Extract the position of the character to modify.
pos = setPos[ii]
# Ignore invalid positions. This can happen if the second
# character does not exist and the find-functions in the
# ``cursorPositionChangedEvent`` method returned
# '-1'. Also, store **None** as the format for this
# non-existent character.
if pos < 0:
retVal.append(None)
continue
# Move the text cursor to the specified character position
# and store its original character format (necessary to
# "undo" the highlighting once the cursor was moved away
# again).
textCursor.setPosition(pos)
retVal.append(textCursor.charFormat())
# Change the character format. Either use the supplied
# one, or use a generic one.
if charFormat:
# Use a specific character format (usually used to
# undo the changes a previous call to
# 'highlightCharacters' has made).
fmt = charFormat[ii]
else:
# Modify the color and weight of the current character format.
fmt = textCursor.charFormat()
# Get the brush and specify its foreground color and
# style. In order to see the characters it is
# necessary to explicitly specify a solidPattern style
# but I have no idea why.
myBrush = fmt.foreground()
myBrush.setColor(colorCode)
myBrush.setStyle(QtCore.Qt.SolidPattern)
fmt.setForeground(myBrush)
fmt.setFontWeight(fontWeight)
# Select the character and apply the selected format.
textCursor.movePosition(QtGui.QTextCursor.NextCharacter,
QtGui.QTextCursor.KeepAnchor)
textCursor.setCharFormat(fmt)
# Apply the textcursor to the current element.
textCursor.setPosition(oldPos)
widgetObj.setTextCursor(textCursor)
return retVal | [
"def",
"highlightCharacters",
"(",
"self",
",",
"widgetObj",
",",
"setPos",
",",
"colorCode",
",",
"fontWeight",
",",
"charFormat",
"=",
"None",
")",
":",
"# Get the text cursor and character format.",
"textCursor",
"=",
"widgetObj",
".",
"textCursor",
"(",
")",
"oldPos",
"=",
"textCursor",
".",
"position",
"(",
")",
"retVal",
"=",
"[",
"]",
"# Change the character formats of all the characters placed at",
"# the positions ``setPos``.",
"for",
"ii",
",",
"pos",
"in",
"enumerate",
"(",
"setPos",
")",
":",
"# Extract the position of the character to modify.",
"pos",
"=",
"setPos",
"[",
"ii",
"]",
"# Ignore invalid positions. This can happen if the second",
"# character does not exist and the find-functions in the",
"# ``cursorPositionChangedEvent`` method returned",
"# '-1'. Also, store **None** as the format for this",
"# non-existent character.",
"if",
"pos",
"<",
"0",
":",
"retVal",
".",
"append",
"(",
"None",
")",
"continue",
"# Move the text cursor to the specified character position",
"# and store its original character format (necessary to",
"# \"undo\" the highlighting once the cursor was moved away",
"# again).",
"textCursor",
".",
"setPosition",
"(",
"pos",
")",
"retVal",
".",
"append",
"(",
"textCursor",
".",
"charFormat",
"(",
")",
")",
"# Change the character format. Either use the supplied",
"# one, or use a generic one.",
"if",
"charFormat",
":",
"# Use a specific character format (usually used to",
"# undo the changes a previous call to",
"# 'highlightCharacters' has made).",
"fmt",
"=",
"charFormat",
"[",
"ii",
"]",
"else",
":",
"# Modify the color and weight of the current character format.",
"fmt",
"=",
"textCursor",
".",
"charFormat",
"(",
")",
"# Get the brush and specify its foreground color and",
"# style. In order to see the characters it is",
"# necessary to explicitly specify a solidPattern style",
"# but I have no idea why.",
"myBrush",
"=",
"fmt",
".",
"foreground",
"(",
")",
"myBrush",
".",
"setColor",
"(",
"colorCode",
")",
"myBrush",
".",
"setStyle",
"(",
"QtCore",
".",
"Qt",
".",
"SolidPattern",
")",
"fmt",
".",
"setForeground",
"(",
"myBrush",
")",
"fmt",
".",
"setFontWeight",
"(",
"fontWeight",
")",
"# Select the character and apply the selected format.",
"textCursor",
".",
"movePosition",
"(",
"QtGui",
".",
"QTextCursor",
".",
"NextCharacter",
",",
"QtGui",
".",
"QTextCursor",
".",
"KeepAnchor",
")",
"textCursor",
".",
"setCharFormat",
"(",
"fmt",
")",
"# Apply the textcursor to the current element.",
"textCursor",
".",
"setPosition",
"(",
"oldPos",
")",
"widgetObj",
".",
"setTextCursor",
"(",
"textCursor",
")",
"return",
"retVal"
] | Change the character format of one or more characters.
If ``charFormat`` is **None** then only the color and font
weight of the characters are changed to ``colorCode`` and
``fontWeight``, respectively.
|Args|
* ``widgetObj`` (**QWidget**): the ``QTextEdit`` holding
the characters.
* ``setPos`` (**tuple** of **int**): character positions
inside the widget.
* ``colorCode`` (**QColor**): eg. ``QtCore.Qt.blue``
* ``fontWeight`` (**int**): font weight.
* ``charFormat`` (**QTextCharFormat**): the character
format to apply (see Qt documentation for details.)
|Returns|
* **list**: the original character format of the replaced
characters. This list has the same length as ``setPos``.
|Raises|
* **None** | [
"Change",
"the",
"character",
"format",
"of",
"one",
"or",
"more",
"characters",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacstextedit_macros.py#L1036-L1122 | train |
vecnet/vecnet.openmalaria | vecnet/openmalaria/experiment.py | ExperimentSpecification.scenarios | def scenarios(self, generate_seed=False):
"""
Generator function. Spits out scenarios for this experiment
"""
seed = prime_numbers(1000)
sweeps_all = self.experiment["sweeps"].keys()
if "combinations" in self.experiment:
if isinstance(self.experiment["combinations"], list):
# For backward compatibility with experiments1-4s
combinations_in_experiment = {" ": self.experiment["combinations"]}
# if self.experiment["combinations"] == []:
# # Special notation for fully-factorial experiments
# combinations_in_experiment = {" ":[[],[]]}
else:
# Combinations must be a dictionary in this particular case
combinations_in_experiment = self.experiment["combinations"]
else:
# Support no combinations element:
combinations_in_experiment = dict() # empty dict
# 1) calculate combinations_sweeps (depends on ALL combinations_ items)
# Get the list of fully factorial sweeps
all_combinations_sweeps = []
all_combinations = []
for key, combinations_ in combinations_in_experiment.items():
# generate all permutations of all combinations
if not combinations_:
# Fully factorial experiment, shortcut for "combinations":[[],[]]
combinations_sweeps = []
combinations = [[]]
else:
# First item in combinations list is a list of sweeps
combinations_sweeps = combinations_[0]
# then - all combinations
combinations = combinations_[1:]
for item in combinations_sweeps:
# TODO: error if sweep is already in this list?
all_combinations_sweeps.append(item)
all_combinations.append((combinations_sweeps, combinations))
sweeps_fully_factorial = list(set(sweeps_all) - set(all_combinations_sweeps))
# print "fully fact: %s" % sweeps_fully_factorial
# 2) produce a list of all combinations of fully factorial sweeps
# First sets of "combinations": the fully-factorial sweeps
for sweep in sweeps_fully_factorial:
all_combinations.append(([sweep], [[x] for x in self.experiment["sweeps"][sweep].keys()]))
# 3) take the dot (inner) product of the list above (fully factorial arm combinations)
# with the first combinations list, that with the second combination list, ...
# step-by-step reduce the list of combinations to a single item
# (dot-product of each list of combinations)
# this could use a lot of memory...
red_iter = 0
# print "all combinations:", red_iter, all_combinations
while len(all_combinations) > 1:
comb1 = all_combinations[0]
comb2 = all_combinations[1]
new_sweeps = comb1[0] + comb2[0]
new_combinations = [x+y for x in comb1[1] for y in comb2[1]]
all_combinations = [(new_sweeps, new_combinations)] + all_combinations[2:]
red_iter += 1
# print "all combinations:", red_iter, all_combinations
# 4) write out the document for each in (3), which should specify one arm for each
# sweep with no repetition of combinations
sweep_names = all_combinations[0][0]
combinations = all_combinations[0][1]
for combination in combinations:
scenario = Scenario(self._apply_combination(self.experiment["base"], sweep_names, combination))
scenario.parameters = dict(zip(sweep_names, combination))
if generate_seed:
# Replace seed if requested by the user
if "@seed@" in scenario.xml:
scenario.xml = scenario.xml.replace("@seed@", str(next(seed)))
else:
raise(RuntimeError("@seed@ placeholder is not found"))
yield scenario | python | def scenarios(self, generate_seed=False):
"""
Generator function. Spits out scenarios for this experiment
"""
seed = prime_numbers(1000)
sweeps_all = self.experiment["sweeps"].keys()
if "combinations" in self.experiment:
if isinstance(self.experiment["combinations"], list):
# For backward compatibility with experiments1-4s
combinations_in_experiment = {" ": self.experiment["combinations"]}
# if self.experiment["combinations"] == []:
# # Special notation for fully-factorial experiments
# combinations_in_experiment = {" ":[[],[]]}
else:
# Combinations must be a dictionary in this particular case
combinations_in_experiment = self.experiment["combinations"]
else:
# Support no combinations element:
combinations_in_experiment = dict() # empty dict
# 1) calculate combinations_sweeps (depends on ALL combinations_ items)
# Get the list of fully factorial sweeps
all_combinations_sweeps = []
all_combinations = []
for key, combinations_ in combinations_in_experiment.items():
# generate all permutations of all combinations
if not combinations_:
# Fully factorial experiment, shortcut for "combinations":[[],[]]
combinations_sweeps = []
combinations = [[]]
else:
# First item in combinations list is a list of sweeps
combinations_sweeps = combinations_[0]
# then - all combinations
combinations = combinations_[1:]
for item in combinations_sweeps:
# TODO: error if sweep is already in this list?
all_combinations_sweeps.append(item)
all_combinations.append((combinations_sweeps, combinations))
sweeps_fully_factorial = list(set(sweeps_all) - set(all_combinations_sweeps))
# print "fully fact: %s" % sweeps_fully_factorial
# 2) produce a list of all combinations of fully factorial sweeps
# First sets of "combinations": the fully-factorial sweeps
for sweep in sweeps_fully_factorial:
all_combinations.append(([sweep], [[x] for x in self.experiment["sweeps"][sweep].keys()]))
# 3) take the dot (inner) product of the list above (fully factorial arm combinations)
# with the first combinations list, that with the second combination list, ...
# step-by-step reduce the list of combinations to a single item
# (dot-product of each list of combinations)
# this could use a lot of memory...
red_iter = 0
# print "all combinations:", red_iter, all_combinations
while len(all_combinations) > 1:
comb1 = all_combinations[0]
comb2 = all_combinations[1]
new_sweeps = comb1[0] + comb2[0]
new_combinations = [x+y for x in comb1[1] for y in comb2[1]]
all_combinations = [(new_sweeps, new_combinations)] + all_combinations[2:]
red_iter += 1
# print "all combinations:", red_iter, all_combinations
# 4) write out the document for each in (3), which should specify one arm for each
# sweep with no repetition of combinations
sweep_names = all_combinations[0][0]
combinations = all_combinations[0][1]
for combination in combinations:
scenario = Scenario(self._apply_combination(self.experiment["base"], sweep_names, combination))
scenario.parameters = dict(zip(sweep_names, combination))
if generate_seed:
# Replace seed if requested by the user
if "@seed@" in scenario.xml:
scenario.xml = scenario.xml.replace("@seed@", str(next(seed)))
else:
raise(RuntimeError("@seed@ placeholder is not found"))
yield scenario | [
"def",
"scenarios",
"(",
"self",
",",
"generate_seed",
"=",
"False",
")",
":",
"seed",
"=",
"prime_numbers",
"(",
"1000",
")",
"sweeps_all",
"=",
"self",
".",
"experiment",
"[",
"\"sweeps\"",
"]",
".",
"keys",
"(",
")",
"if",
"\"combinations\"",
"in",
"self",
".",
"experiment",
":",
"if",
"isinstance",
"(",
"self",
".",
"experiment",
"[",
"\"combinations\"",
"]",
",",
"list",
")",
":",
"# For backward compatibility with experiments1-4s",
"combinations_in_experiment",
"=",
"{",
"\" \"",
":",
"self",
".",
"experiment",
"[",
"\"combinations\"",
"]",
"}",
"# if self.experiment[\"combinations\"] == []:",
"# # Special notation for fully-factorial experiments",
"# combinations_in_experiment = {\" \":[[],[]]}",
"else",
":",
"# Combinations must be a dictionary in this particular case",
"combinations_in_experiment",
"=",
"self",
".",
"experiment",
"[",
"\"combinations\"",
"]",
"else",
":",
"# Support no combinations element:",
"combinations_in_experiment",
"=",
"dict",
"(",
")",
"# empty dict ",
"# 1) calculate combinations_sweeps (depends on ALL combinations_ items)",
"# Get the list of fully factorial sweeps",
"all_combinations_sweeps",
"=",
"[",
"]",
"all_combinations",
"=",
"[",
"]",
"for",
"key",
",",
"combinations_",
"in",
"combinations_in_experiment",
".",
"items",
"(",
")",
":",
"# generate all permutations of all combinations",
"if",
"not",
"combinations_",
":",
"# Fully factorial experiment, shortcut for \"combinations\":[[],[]]",
"combinations_sweeps",
"=",
"[",
"]",
"combinations",
"=",
"[",
"[",
"]",
"]",
"else",
":",
"# First item in combinations list is a list of sweeps",
"combinations_sweeps",
"=",
"combinations_",
"[",
"0",
"]",
"# then - all combinations",
"combinations",
"=",
"combinations_",
"[",
"1",
":",
"]",
"for",
"item",
"in",
"combinations_sweeps",
":",
"# TODO: error if sweep is already in this list?",
"all_combinations_sweeps",
".",
"append",
"(",
"item",
")",
"all_combinations",
".",
"append",
"(",
"(",
"combinations_sweeps",
",",
"combinations",
")",
")",
"sweeps_fully_factorial",
"=",
"list",
"(",
"set",
"(",
"sweeps_all",
")",
"-",
"set",
"(",
"all_combinations_sweeps",
")",
")",
"# print \"fully fact: %s\" % sweeps_fully_factorial",
"# 2) produce a list of all combinations of fully factorial sweeps",
"# First sets of \"combinations\": the fully-factorial sweeps",
"for",
"sweep",
"in",
"sweeps_fully_factorial",
":",
"all_combinations",
".",
"append",
"(",
"(",
"[",
"sweep",
"]",
",",
"[",
"[",
"x",
"]",
"for",
"x",
"in",
"self",
".",
"experiment",
"[",
"\"sweeps\"",
"]",
"[",
"sweep",
"]",
".",
"keys",
"(",
")",
"]",
")",
")",
"# 3) take the dot (inner) product of the list above (fully factorial arm combinations)",
"# with the first combinations list, that with the second combination list, ...",
"# step-by-step reduce the list of combinations to a single item",
"# (dot-product of each list of combinations)",
"# this could use a lot of memory...",
"red_iter",
"=",
"0",
"# print \"all combinations:\", red_iter, all_combinations",
"while",
"len",
"(",
"all_combinations",
")",
">",
"1",
":",
"comb1",
"=",
"all_combinations",
"[",
"0",
"]",
"comb2",
"=",
"all_combinations",
"[",
"1",
"]",
"new_sweeps",
"=",
"comb1",
"[",
"0",
"]",
"+",
"comb2",
"[",
"0",
"]",
"new_combinations",
"=",
"[",
"x",
"+",
"y",
"for",
"x",
"in",
"comb1",
"[",
"1",
"]",
"for",
"y",
"in",
"comb2",
"[",
"1",
"]",
"]",
"all_combinations",
"=",
"[",
"(",
"new_sweeps",
",",
"new_combinations",
")",
"]",
"+",
"all_combinations",
"[",
"2",
":",
"]",
"red_iter",
"+=",
"1",
"# print \"all combinations:\", red_iter, all_combinations",
"# 4) write out the document for each in (3), which should specify one arm for each",
"# sweep with no repetition of combinations",
"sweep_names",
"=",
"all_combinations",
"[",
"0",
"]",
"[",
"0",
"]",
"combinations",
"=",
"all_combinations",
"[",
"0",
"]",
"[",
"1",
"]",
"for",
"combination",
"in",
"combinations",
":",
"scenario",
"=",
"Scenario",
"(",
"self",
".",
"_apply_combination",
"(",
"self",
".",
"experiment",
"[",
"\"base\"",
"]",
",",
"sweep_names",
",",
"combination",
")",
")",
"scenario",
".",
"parameters",
"=",
"dict",
"(",
"zip",
"(",
"sweep_names",
",",
"combination",
")",
")",
"if",
"generate_seed",
":",
"# Replace seed if requested by the user",
"if",
"\"@seed@\"",
"in",
"scenario",
".",
"xml",
":",
"scenario",
".",
"xml",
"=",
"scenario",
".",
"xml",
".",
"replace",
"(",
"\"@seed@\"",
",",
"str",
"(",
"next",
"(",
"seed",
")",
")",
")",
"else",
":",
"raise",
"(",
"RuntimeError",
"(",
"\"@seed@ placeholder is not found\"",
")",
")",
"yield",
"scenario"
] | Generator function. Spits out scenarios for this experiment | [
"Generator",
"function",
".",
"Spits",
"out",
"scenarios",
"for",
"this",
"experiment"
] | 795bc9d1b81a6c664f14879edda7a7c41188e95a | https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/experiment.py#L99-L177 | train |
a1ezzz/wasp-general | wasp_general/os/linux/lvm.py | WLVMInfoCommand.lvm_info | def lvm_info(self, name=None):
""" Call a program
:param name: if specified - program will return information for that lvm-entity only. otherwise -
all available entries are returned
:return: tuple of str (fields)
"""
cmd = [] if self.sudo() is False else ['sudo']
cmd.extend([self.command(), '-c'])
if name is not None:
cmd.append(name)
output = subprocess.check_output(cmd, timeout=self.cmd_timeout())
output = output.decode()
result = []
fields_count = self.fields_count()
for line in output.split('\n'):
line = line.strip()
fields = line.split(':')
if len(fields) == fields_count:
result.append(fields)
if name is not None and len(result) != 1:
raise RuntimeError('Unable to parse command result')
return tuple(result) | python | def lvm_info(self, name=None):
""" Call a program
:param name: if specified - program will return information for that lvm-entity only. otherwise -
all available entries are returned
:return: tuple of str (fields)
"""
cmd = [] if self.sudo() is False else ['sudo']
cmd.extend([self.command(), '-c'])
if name is not None:
cmd.append(name)
output = subprocess.check_output(cmd, timeout=self.cmd_timeout())
output = output.decode()
result = []
fields_count = self.fields_count()
for line in output.split('\n'):
line = line.strip()
fields = line.split(':')
if len(fields) == fields_count:
result.append(fields)
if name is not None and len(result) != 1:
raise RuntimeError('Unable to parse command result')
return tuple(result) | [
"def",
"lvm_info",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"cmd",
"=",
"[",
"]",
"if",
"self",
".",
"sudo",
"(",
")",
"is",
"False",
"else",
"[",
"'sudo'",
"]",
"cmd",
".",
"extend",
"(",
"[",
"self",
".",
"command",
"(",
")",
",",
"'-c'",
"]",
")",
"if",
"name",
"is",
"not",
"None",
":",
"cmd",
".",
"append",
"(",
"name",
")",
"output",
"=",
"subprocess",
".",
"check_output",
"(",
"cmd",
",",
"timeout",
"=",
"self",
".",
"cmd_timeout",
"(",
")",
")",
"output",
"=",
"output",
".",
"decode",
"(",
")",
"result",
"=",
"[",
"]",
"fields_count",
"=",
"self",
".",
"fields_count",
"(",
")",
"for",
"line",
"in",
"output",
".",
"split",
"(",
"'\\n'",
")",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"fields",
"=",
"line",
".",
"split",
"(",
"':'",
")",
"if",
"len",
"(",
"fields",
")",
"==",
"fields_count",
":",
"result",
".",
"append",
"(",
"fields",
")",
"if",
"name",
"is",
"not",
"None",
"and",
"len",
"(",
"result",
")",
"!=",
"1",
":",
"raise",
"RuntimeError",
"(",
"'Unable to parse command result'",
")",
"return",
"tuple",
"(",
"result",
")"
] | Call a program
:param name: if specified - program will return information for that lvm-entity only. otherwise -
all available entries are returned
:return: tuple of str (fields) | [
"Call",
"a",
"program"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/os/linux/lvm.py#L95-L119 | train |
a1ezzz/wasp-general | wasp_general/os/linux/lvm.py | WLogicalVolume.uuid | def uuid(self):
""" Return UUID of logical volume
:return: str
"""
uuid_file = '/sys/block/%s/dm/uuid' % os.path.basename(os.path.realpath(self.volume_path()))
lv_uuid = open(uuid_file).read().strip()
if lv_uuid.startswith('LVM-') is True:
return lv_uuid[4:]
return lv_uuid | python | def uuid(self):
""" Return UUID of logical volume
:return: str
"""
uuid_file = '/sys/block/%s/dm/uuid' % os.path.basename(os.path.realpath(self.volume_path()))
lv_uuid = open(uuid_file).read().strip()
if lv_uuid.startswith('LVM-') is True:
return lv_uuid[4:]
return lv_uuid | [
"def",
"uuid",
"(",
"self",
")",
":",
"uuid_file",
"=",
"'/sys/block/%s/dm/uuid'",
"%",
"os",
".",
"path",
".",
"basename",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"self",
".",
"volume_path",
"(",
")",
")",
")",
"lv_uuid",
"=",
"open",
"(",
"uuid_file",
")",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"if",
"lv_uuid",
".",
"startswith",
"(",
"'LVM-'",
")",
"is",
"True",
":",
"return",
"lv_uuid",
"[",
"4",
":",
"]",
"return",
"lv_uuid"
] | Return UUID of logical volume
:return: str | [
"Return",
"UUID",
"of",
"logical",
"volume"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/os/linux/lvm.py#L457-L466 | train |
a1ezzz/wasp-general | wasp_general/os/linux/lvm.py | WLogicalVolume.create_snapshot | def create_snapshot(self, snapshot_size, snapshot_suffix):
""" Create snapshot for this logical volume.
:param snapshot_size: size of newly created snapshot volume. This size is a fraction of the source \
logical volume space (of this logical volume)
:param snapshot_suffix: suffix for logical volume name (base part is the same as the original volume \
name)
:return: WLogicalVolume
"""
size_extent = math.ceil(self.extents_count() * snapshot_size)
size_kb = self.volume_group().extent_size() * size_extent
snapshot_name = self.volume_name() + snapshot_suffix
lvcreate_cmd = ['sudo'] if self.lvm_command().sudo() is True else []
lvcreate_cmd.extend([
'lvcreate', '-L', '%iK' % size_kb, '-s', '-n', snapshot_name, '-p', 'r', self.volume_path()
])
subprocess.check_output(lvcreate_cmd, timeout=self.__class__.__lvm_snapshot_create_cmd_timeout__)
return WLogicalVolume(self.volume_path() + snapshot_suffix, sudo=self.lvm_command().sudo()) | python | def create_snapshot(self, snapshot_size, snapshot_suffix):
""" Create snapshot for this logical volume.
:param snapshot_size: size of newly created snapshot volume. This size is a fraction of the source \
logical volume space (of this logical volume)
:param snapshot_suffix: suffix for logical volume name (base part is the same as the original volume \
name)
:return: WLogicalVolume
"""
size_extent = math.ceil(self.extents_count() * snapshot_size)
size_kb = self.volume_group().extent_size() * size_extent
snapshot_name = self.volume_name() + snapshot_suffix
lvcreate_cmd = ['sudo'] if self.lvm_command().sudo() is True else []
lvcreate_cmd.extend([
'lvcreate', '-L', '%iK' % size_kb, '-s', '-n', snapshot_name, '-p', 'r', self.volume_path()
])
subprocess.check_output(lvcreate_cmd, timeout=self.__class__.__lvm_snapshot_create_cmd_timeout__)
return WLogicalVolume(self.volume_path() + snapshot_suffix, sudo=self.lvm_command().sudo()) | [
"def",
"create_snapshot",
"(",
"self",
",",
"snapshot_size",
",",
"snapshot_suffix",
")",
":",
"size_extent",
"=",
"math",
".",
"ceil",
"(",
"self",
".",
"extents_count",
"(",
")",
"*",
"snapshot_size",
")",
"size_kb",
"=",
"self",
".",
"volume_group",
"(",
")",
".",
"extent_size",
"(",
")",
"*",
"size_extent",
"snapshot_name",
"=",
"self",
".",
"volume_name",
"(",
")",
"+",
"snapshot_suffix",
"lvcreate_cmd",
"=",
"[",
"'sudo'",
"]",
"if",
"self",
".",
"lvm_command",
"(",
")",
".",
"sudo",
"(",
")",
"is",
"True",
"else",
"[",
"]",
"lvcreate_cmd",
".",
"extend",
"(",
"[",
"'lvcreate'",
",",
"'-L'",
",",
"'%iK'",
"%",
"size_kb",
",",
"'-s'",
",",
"'-n'",
",",
"snapshot_name",
",",
"'-p'",
",",
"'r'",
",",
"self",
".",
"volume_path",
"(",
")",
"]",
")",
"subprocess",
".",
"check_output",
"(",
"lvcreate_cmd",
",",
"timeout",
"=",
"self",
".",
"__class__",
".",
"__lvm_snapshot_create_cmd_timeout__",
")",
"return",
"WLogicalVolume",
"(",
"self",
".",
"volume_path",
"(",
")",
"+",
"snapshot_suffix",
",",
"sudo",
"=",
"self",
".",
"lvm_command",
"(",
")",
".",
"sudo",
"(",
")",
")"
] | Create snapshot for this logical volume.
:param snapshot_size: size of newly created snapshot volume. This size is a fraction of the source \
logical volume space (of this logical volume)
:param snapshot_suffix: suffix for logical volume name (base part is the same as the original volume \
name)
:return: WLogicalVolume | [
"Create",
"snapshot",
"for",
"this",
"logical",
"volume",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/os/linux/lvm.py#L470-L490 | train |
a1ezzz/wasp-general | wasp_general/os/linux/lvm.py | WLogicalVolume.remove_volume | def remove_volume(self):
""" Remove this volume
:return: None
"""
lvremove_cmd = ['sudo'] if self.lvm_command().sudo() is True else []
lvremove_cmd.extend(['lvremove', '-f', self.volume_path()])
subprocess.check_output(lvremove_cmd, timeout=self.__class__.__lvm_snapshot_remove_cmd_timeout__) | python | def remove_volume(self):
""" Remove this volume
:return: None
"""
lvremove_cmd = ['sudo'] if self.lvm_command().sudo() is True else []
lvremove_cmd.extend(['lvremove', '-f', self.volume_path()])
subprocess.check_output(lvremove_cmd, timeout=self.__class__.__lvm_snapshot_remove_cmd_timeout__) | [
"def",
"remove_volume",
"(",
"self",
")",
":",
"lvremove_cmd",
"=",
"[",
"'sudo'",
"]",
"if",
"self",
".",
"lvm_command",
"(",
")",
".",
"sudo",
"(",
")",
"is",
"True",
"else",
"[",
"]",
"lvremove_cmd",
".",
"extend",
"(",
"[",
"'lvremove'",
",",
"'-f'",
",",
"self",
".",
"volume_path",
"(",
")",
"]",
")",
"subprocess",
".",
"check_output",
"(",
"lvremove_cmd",
",",
"timeout",
"=",
"self",
".",
"__class__",
".",
"__lvm_snapshot_remove_cmd_timeout__",
")"
] | Remove this volume
:return: None | [
"Remove",
"this",
"volume"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/os/linux/lvm.py#L492-L499 | train |
a1ezzz/wasp-general | wasp_general/os/linux/lvm.py | WLogicalVolume.logical_volume | def logical_volume(cls, file_path, sudo=False):
""" Return logical volume that stores the given path
:param file_path: target path to search
:param sudo: same as 'sudo' in :meth:`.WLogicalVolume.__init__`
:return: WLogicalVolume or None (if file path is outside current mount points)
"""
mp = WMountPoint.mount_point(file_path)
if mp is not None:
name_file = '/sys/block/%s/dm/name' % mp.device_name()
if os.path.exists(name_file):
lv_path = '/dev/mapper/%s' % open(name_file).read().strip()
return WLogicalVolume(lv_path, sudo=sudo) | python | def logical_volume(cls, file_path, sudo=False):
""" Return logical volume that stores the given path
:param file_path: target path to search
:param sudo: same as 'sudo' in :meth:`.WLogicalVolume.__init__`
:return: WLogicalVolume or None (if file path is outside current mount points)
"""
mp = WMountPoint.mount_point(file_path)
if mp is not None:
name_file = '/sys/block/%s/dm/name' % mp.device_name()
if os.path.exists(name_file):
lv_path = '/dev/mapper/%s' % open(name_file).read().strip()
return WLogicalVolume(lv_path, sudo=sudo) | [
"def",
"logical_volume",
"(",
"cls",
",",
"file_path",
",",
"sudo",
"=",
"False",
")",
":",
"mp",
"=",
"WMountPoint",
".",
"mount_point",
"(",
"file_path",
")",
"if",
"mp",
"is",
"not",
"None",
":",
"name_file",
"=",
"'/sys/block/%s/dm/name'",
"%",
"mp",
".",
"device_name",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"name_file",
")",
":",
"lv_path",
"=",
"'/dev/mapper/%s'",
"%",
"open",
"(",
"name_file",
")",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"return",
"WLogicalVolume",
"(",
"lv_path",
",",
"sudo",
"=",
"sudo",
")"
] | Return logical volume that stores the given path
:param file_path: target path to search
:param sudo: same as 'sudo' in :meth:`.WLogicalVolume.__init__`
:return: WLogicalVolume or None (if file path is outside current mount points) | [
"Return",
"logical",
"volume",
"that",
"stores",
"the",
"given",
"path"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/os/linux/lvm.py#L524-L536 | train |
a1ezzz/wasp-general | wasp_general/io.py | WAESWriter.write | def write(self, b):
""" Encrypt and write data
:param b: data to encrypt and write
:return: None
"""
self.__buffer += bytes(b)
bytes_written = 0
while len(self.__buffer) >= self.__cipher_block_size:
io.BufferedWriter.write(self, self.__cipher.encrypt_block(self.__buffer[:self.__cipher_block_size]))
self.__buffer = self.__buffer[self.__cipher_block_size:]
bytes_written += self.__cipher_block_size
return len(b) | python | def write(self, b):
""" Encrypt and write data
:param b: data to encrypt and write
:return: None
"""
self.__buffer += bytes(b)
bytes_written = 0
while len(self.__buffer) >= self.__cipher_block_size:
io.BufferedWriter.write(self, self.__cipher.encrypt_block(self.__buffer[:self.__cipher_block_size]))
self.__buffer = self.__buffer[self.__cipher_block_size:]
bytes_written += self.__cipher_block_size
return len(b) | [
"def",
"write",
"(",
"self",
",",
"b",
")",
":",
"self",
".",
"__buffer",
"+=",
"bytes",
"(",
"b",
")",
"bytes_written",
"=",
"0",
"while",
"len",
"(",
"self",
".",
"__buffer",
")",
">=",
"self",
".",
"__cipher_block_size",
":",
"io",
".",
"BufferedWriter",
".",
"write",
"(",
"self",
",",
"self",
".",
"__cipher",
".",
"encrypt_block",
"(",
"self",
".",
"__buffer",
"[",
":",
"self",
".",
"__cipher_block_size",
"]",
")",
")",
"self",
".",
"__buffer",
"=",
"self",
".",
"__buffer",
"[",
"self",
".",
"__cipher_block_size",
":",
"]",
"bytes_written",
"+=",
"self",
".",
"__cipher_block_size",
"return",
"len",
"(",
"b",
")"
] | Encrypt and write data
:param b: data to encrypt and write
:return: None | [
"Encrypt",
"and",
"write",
"data"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/io.py#L205-L218 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURI.reset_component | def reset_component(self, component):
""" Unset component in this URI
:param component: component name (or component type) to reset
:return: None
"""
if isinstance(component, str) is True:
component = WURI.Component(component)
self.__components[component] = None | python | def reset_component(self, component):
""" Unset component in this URI
:param component: component name (or component type) to reset
:return: None
"""
if isinstance(component, str) is True:
component = WURI.Component(component)
self.__components[component] = None | [
"def",
"reset_component",
"(",
"self",
",",
"component",
")",
":",
"if",
"isinstance",
"(",
"component",
",",
"str",
")",
"is",
"True",
":",
"component",
"=",
"WURI",
".",
"Component",
"(",
"component",
")",
"self",
".",
"__components",
"[",
"component",
"]",
"=",
"None"
] | Unset component in this URI
:param component: component name (or component type) to reset
:return: None | [
"Unset",
"component",
"in",
"this",
"URI"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L137-L146 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURI.parse | def parse(cls, uri):
""" Parse URI-string and return WURI object
:param uri: string to parse
:return: WURI
"""
uri_components = urlsplit(uri)
adapter_fn = lambda x: x if x is not None and (isinstance(x, str) is False or len(x)) > 0 else None
return cls(
scheme=adapter_fn(uri_components.scheme),
username=adapter_fn(uri_components.username),
password=adapter_fn(uri_components.password),
hostname=adapter_fn(uri_components.hostname),
port=adapter_fn(uri_components.port),
path=adapter_fn(uri_components.path),
query=adapter_fn(uri_components.query),
fragment=adapter_fn(uri_components.fragment),
) | python | def parse(cls, uri):
""" Parse URI-string and return WURI object
:param uri: string to parse
:return: WURI
"""
uri_components = urlsplit(uri)
adapter_fn = lambda x: x if x is not None and (isinstance(x, str) is False or len(x)) > 0 else None
return cls(
scheme=adapter_fn(uri_components.scheme),
username=adapter_fn(uri_components.username),
password=adapter_fn(uri_components.password),
hostname=adapter_fn(uri_components.hostname),
port=adapter_fn(uri_components.port),
path=adapter_fn(uri_components.path),
query=adapter_fn(uri_components.query),
fragment=adapter_fn(uri_components.fragment),
) | [
"def",
"parse",
"(",
"cls",
",",
"uri",
")",
":",
"uri_components",
"=",
"urlsplit",
"(",
"uri",
")",
"adapter_fn",
"=",
"lambda",
"x",
":",
"x",
"if",
"x",
"is",
"not",
"None",
"and",
"(",
"isinstance",
"(",
"x",
",",
"str",
")",
"is",
"False",
"or",
"len",
"(",
"x",
")",
")",
">",
"0",
"else",
"None",
"return",
"cls",
"(",
"scheme",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"scheme",
")",
",",
"username",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"username",
")",
",",
"password",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"password",
")",
",",
"hostname",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"hostname",
")",
",",
"port",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"port",
")",
",",
"path",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"path",
")",
",",
"query",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"query",
")",
",",
"fragment",
"=",
"adapter_fn",
"(",
"uri_components",
".",
"fragment",
")",
",",
")"
] | Parse URI-string and return WURI object
:param uri: string to parse
:return: WURI | [
"Parse",
"URI",
"-",
"string",
"and",
"return",
"WURI",
"object"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L150-L168 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURIQuery.add_parameter | def add_parameter(self, name, value=None):
""" Add new parameter value to this query. New value will be appended to previously added values.
:param name: parameter name
:param value: value to add (None to set null-value)
:return: None
"""
if name not in self.__query:
self.__query[name] = [value]
else:
self.__query[name].append(value) | python | def add_parameter(self, name, value=None):
""" Add new parameter value to this query. New value will be appended to previously added values.
:param name: parameter name
:param value: value to add (None to set null-value)
:return: None
"""
if name not in self.__query:
self.__query[name] = [value]
else:
self.__query[name].append(value) | [
"def",
"add_parameter",
"(",
"self",
",",
"name",
",",
"value",
"=",
"None",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"__query",
":",
"self",
".",
"__query",
"[",
"name",
"]",
"=",
"[",
"value",
"]",
"else",
":",
"self",
".",
"__query",
"[",
"name",
"]",
".",
"append",
"(",
"value",
")"
] | Add new parameter value to this query. New value will be appended to previously added values.
:param name: parameter name
:param value: value to add (None to set null-value)
:return: None | [
"Add",
"new",
"parameter",
"value",
"to",
"this",
"query",
".",
"New",
"value",
"will",
"be",
"appended",
"to",
"previously",
"added",
"values",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L202-L212 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURIQuery.remove_parameter | def remove_parameter(self, name):
""" Remove the specified parameter from this query
:param name: name of a parameter to remove
:return: None
"""
if name in self.__query:
self.__query.pop(name) | python | def remove_parameter(self, name):
""" Remove the specified parameter from this query
:param name: name of a parameter to remove
:return: None
"""
if name in self.__query:
self.__query.pop(name) | [
"def",
"remove_parameter",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"in",
"self",
".",
"__query",
":",
"self",
".",
"__query",
".",
"pop",
"(",
"name",
")"
] | Remove the specified parameter from this query
:param name: name of a parameter to remove
:return: None | [
"Remove",
"the",
"specified",
"parameter",
"from",
"this",
"query"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L215-L222 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURIQuery.parse | def parse(cls, query_str):
""" Parse string that represent query component from URI
:param query_str: string without '?'-sign
:return: WURIQuery
"""
parsed_query = parse_qs(query_str, keep_blank_values=True, strict_parsing=True)
result = cls()
for parameter_name in parsed_query.keys():
for parameter_value in parsed_query[parameter_name]:
result.add_parameter(
parameter_name,
parameter_value if len(parameter_value) > 0 else None
)
return result | python | def parse(cls, query_str):
""" Parse string that represent query component from URI
:param query_str: string without '?'-sign
:return: WURIQuery
"""
parsed_query = parse_qs(query_str, keep_blank_values=True, strict_parsing=True)
result = cls()
for parameter_name in parsed_query.keys():
for parameter_value in parsed_query[parameter_name]:
result.add_parameter(
parameter_name,
parameter_value if len(parameter_value) > 0 else None
)
return result | [
"def",
"parse",
"(",
"cls",
",",
"query_str",
")",
":",
"parsed_query",
"=",
"parse_qs",
"(",
"query_str",
",",
"keep_blank_values",
"=",
"True",
",",
"strict_parsing",
"=",
"True",
")",
"result",
"=",
"cls",
"(",
")",
"for",
"parameter_name",
"in",
"parsed_query",
".",
"keys",
"(",
")",
":",
"for",
"parameter_value",
"in",
"parsed_query",
"[",
"parameter_name",
"]",
":",
"result",
".",
"add_parameter",
"(",
"parameter_name",
",",
"parameter_value",
"if",
"len",
"(",
"parameter_value",
")",
">",
"0",
"else",
"None",
")",
"return",
"result"
] | Parse string that represent query component from URI
:param query_str: string without '?'-sign
:return: WURIQuery | [
"Parse",
"string",
"that",
"represent",
"query",
"component",
"from",
"URI"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L261-L275 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WStrictURIQuery.add_specification | def add_specification(self, specification):
""" Add a new query parameter specification. If this object already has a specification for the
specified parameter - exception is raised. No checks for the specified or any parameter are made
regarding specification appending
:param specification: new specification that will be added
:return: None
"""
name = specification.name()
if name in self.__specs:
raise ValueError('WStrictURIQuery object already has specification for parameter "%s" ' % name)
self.__specs[name] = specification | python | def add_specification(self, specification):
""" Add a new query parameter specification. If this object already has a specification for the
specified parameter - exception is raised. No checks for the specified or any parameter are made
regarding specification appending
:param specification: new specification that will be added
:return: None
"""
name = specification.name()
if name in self.__specs:
raise ValueError('WStrictURIQuery object already has specification for parameter "%s" ' % name)
self.__specs[name] = specification | [
"def",
"add_specification",
"(",
"self",
",",
"specification",
")",
":",
"name",
"=",
"specification",
".",
"name",
"(",
")",
"if",
"name",
"in",
"self",
".",
"__specs",
":",
"raise",
"ValueError",
"(",
"'WStrictURIQuery object already has specification for parameter \"%s\" '",
"%",
"name",
")",
"self",
".",
"__specs",
"[",
"name",
"]",
"=",
"specification"
] | Add a new query parameter specification. If this object already has a specification for the
specified parameter - exception is raised. No checks for the specified or any parameter are made
regarding specification appending
:param specification: new specification that will be added
:return: None | [
"Add",
"a",
"new",
"query",
"parameter",
"specification",
".",
"If",
"this",
"object",
"already",
"has",
"a",
"specification",
"for",
"the",
"specified",
"parameter",
"-",
"exception",
"is",
"raised",
".",
"No",
"checks",
"for",
"the",
"specified",
"or",
"any",
"parameter",
"are",
"made",
"regarding",
"specification",
"appending"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L380-L391 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WStrictURIQuery.remove_specification | def remove_specification(self, name):
""" Remove a specification that matches a query parameter. No checks for the specified or any parameter
are made regarding specification removing
:param name: parameter name to remove
:return: None
"""
if name in self.__specs:
self.__specs.pop(name) | python | def remove_specification(self, name):
""" Remove a specification that matches a query parameter. No checks for the specified or any parameter
are made regarding specification removing
:param name: parameter name to remove
:return: None
"""
if name in self.__specs:
self.__specs.pop(name) | [
"def",
"remove_specification",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"in",
"self",
".",
"__specs",
":",
"self",
".",
"__specs",
".",
"pop",
"(",
"name",
")"
] | Remove a specification that matches a query parameter. No checks for the specified or any parameter
are made regarding specification removing
:param name: parameter name to remove
:return: None | [
"Remove",
"a",
"specification",
"that",
"matches",
"a",
"query",
"parameter",
".",
"No",
"checks",
"for",
"the",
"specified",
"or",
"any",
"parameter",
"are",
"made",
"regarding",
"specification",
"removing"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L394-L402 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WStrictURIQuery.replace_parameter | def replace_parameter(self, name, value=None):
""" Replace a query parameter values with a new value. If a new value does not match current
specifications, then exception is raised
:param name: parameter name to replace
:param value: new parameter value. None is for empty (null) value
:return: None
"""
spec = self.__specs[name] if name in self.__specs else None
if self.extra_parameters() is False and spec is None:
raise ValueError('Extra parameters are forbidden for this WStrictURIQuery object')
if spec is not None and spec.nullable() is False and value is None:
raise ValueError('Nullable values is forbidden for parameter "%s"' % name)
if spec is not None and value is not None:
re_obj = spec.re_obj()
if re_obj is not None and re_obj.match(value) is None:
raise ValueError('Value does not match regular expression')
WURIQuery.replace_parameter(self, name, value=value) | python | def replace_parameter(self, name, value=None):
""" Replace a query parameter values with a new value. If a new value does not match current
specifications, then exception is raised
:param name: parameter name to replace
:param value: new parameter value. None is for empty (null) value
:return: None
"""
spec = self.__specs[name] if name in self.__specs else None
if self.extra_parameters() is False and spec is None:
raise ValueError('Extra parameters are forbidden for this WStrictURIQuery object')
if spec is not None and spec.nullable() is False and value is None:
raise ValueError('Nullable values is forbidden for parameter "%s"' % name)
if spec is not None and value is not None:
re_obj = spec.re_obj()
if re_obj is not None and re_obj.match(value) is None:
raise ValueError('Value does not match regular expression')
WURIQuery.replace_parameter(self, name, value=value) | [
"def",
"replace_parameter",
"(",
"self",
",",
"name",
",",
"value",
"=",
"None",
")",
":",
"spec",
"=",
"self",
".",
"__specs",
"[",
"name",
"]",
"if",
"name",
"in",
"self",
".",
"__specs",
"else",
"None",
"if",
"self",
".",
"extra_parameters",
"(",
")",
"is",
"False",
"and",
"spec",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Extra parameters are forbidden for this WStrictURIQuery object'",
")",
"if",
"spec",
"is",
"not",
"None",
"and",
"spec",
".",
"nullable",
"(",
")",
"is",
"False",
"and",
"value",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Nullable values is forbidden for parameter \"%s\"'",
"%",
"name",
")",
"if",
"spec",
"is",
"not",
"None",
"and",
"value",
"is",
"not",
"None",
":",
"re_obj",
"=",
"spec",
".",
"re_obj",
"(",
")",
"if",
"re_obj",
"is",
"not",
"None",
"and",
"re_obj",
".",
"match",
"(",
"value",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Value does not match regular expression'",
")",
"WURIQuery",
".",
"replace_parameter",
"(",
"self",
",",
"name",
",",
"value",
"=",
"value",
")"
] | Replace a query parameter values with a new value. If a new value does not match current
specifications, then exception is raised
:param name: parameter name to replace
:param value: new parameter value. None is for empty (null) value
:return: None | [
"Replace",
"a",
"query",
"parameter",
"values",
"with",
"a",
"new",
"value",
".",
"If",
"a",
"new",
"value",
"does",
"not",
"match",
"current",
"specifications",
"then",
"exception",
"is",
"raised"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L405-L425 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WStrictURIQuery.remove_parameter | def remove_parameter(self, name):
""" Remove parameter from this query. If a parameter is mandatory, then exception is raised
:param name: parameter name to remove
:return: None
"""
spec = self.__specs[name] if name in self.__specs else None
if spec is not None and spec.optional() is False:
raise ValueError('Unable to remove a required parameter "%s"' % name)
WURIQuery.remove_parameter(self, name) | python | def remove_parameter(self, name):
""" Remove parameter from this query. If a parameter is mandatory, then exception is raised
:param name: parameter name to remove
:return: None
"""
spec = self.__specs[name] if name in self.__specs else None
if spec is not None and spec.optional() is False:
raise ValueError('Unable to remove a required parameter "%s"' % name)
WURIQuery.remove_parameter(self, name) | [
"def",
"remove_parameter",
"(",
"self",
",",
"name",
")",
":",
"spec",
"=",
"self",
".",
"__specs",
"[",
"name",
"]",
"if",
"name",
"in",
"self",
".",
"__specs",
"else",
"None",
"if",
"spec",
"is",
"not",
"None",
"and",
"spec",
".",
"optional",
"(",
")",
"is",
"False",
":",
"raise",
"ValueError",
"(",
"'Unable to remove a required parameter \"%s\"'",
"%",
"name",
")",
"WURIQuery",
".",
"remove_parameter",
"(",
"self",
",",
"name",
")"
] | Remove parameter from this query. If a parameter is mandatory, then exception is raised
:param name: parameter name to remove
:return: None | [
"Remove",
"parameter",
"from",
"this",
"query",
".",
"If",
"a",
"parameter",
"is",
"mandatory",
"then",
"exception",
"is",
"raised"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L454-L464 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURIComponentVerifier.validate | def validate(self, uri):
""" Check an URI for compatibility with this specification. Return True if the URI is compatible.
:param uri: an URI to check
:return: bool
"""
requirement = self.requirement()
uri_component = uri.component(self.component())
if uri_component is None:
return requirement != WURIComponentVerifier.Requirement.required
if requirement == WURIComponentVerifier.Requirement.unsupported:
return False
re_obj = self.re_obj()
if re_obj is not None:
return re_obj.match(uri_component) is not None
return True | python | def validate(self, uri):
""" Check an URI for compatibility with this specification. Return True if the URI is compatible.
:param uri: an URI to check
:return: bool
"""
requirement = self.requirement()
uri_component = uri.component(self.component())
if uri_component is None:
return requirement != WURIComponentVerifier.Requirement.required
if requirement == WURIComponentVerifier.Requirement.unsupported:
return False
re_obj = self.re_obj()
if re_obj is not None:
return re_obj.match(uri_component) is not None
return True | [
"def",
"validate",
"(",
"self",
",",
"uri",
")",
":",
"requirement",
"=",
"self",
".",
"requirement",
"(",
")",
"uri_component",
"=",
"uri",
".",
"component",
"(",
"self",
".",
"component",
"(",
")",
")",
"if",
"uri_component",
"is",
"None",
":",
"return",
"requirement",
"!=",
"WURIComponentVerifier",
".",
"Requirement",
".",
"required",
"if",
"requirement",
"==",
"WURIComponentVerifier",
".",
"Requirement",
".",
"unsupported",
":",
"return",
"False",
"re_obj",
"=",
"self",
".",
"re_obj",
"(",
")",
"if",
"re_obj",
"is",
"not",
"None",
":",
"return",
"re_obj",
".",
"match",
"(",
"uri_component",
")",
"is",
"not",
"None",
"return",
"True"
] | Check an URI for compatibility with this specification. Return True if the URI is compatible.
:param uri: an URI to check
:return: bool | [
"Check",
"an",
"URI",
"for",
"compatibility",
"with",
"this",
"specification",
".",
"Return",
"True",
"if",
"the",
"URI",
"is",
"compatible",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L537-L555 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WURIQueryVerifier.validate | def validate(self, uri):
""" Check that an query part of an URI is compatible with this descriptor. Return True if the URI is
compatible.
:param uri: an URI to check
:return: bool
"""
if WURIComponentVerifier.validate(self, uri) is False:
return False
try:
WStrictURIQuery(
WURIQuery.parse(uri.component(self.component())),
*self.__specs,
extra_parameters=self.__extra_parameters
)
except ValueError:
return False
return True | python | def validate(self, uri):
""" Check that an query part of an URI is compatible with this descriptor. Return True if the URI is
compatible.
:param uri: an URI to check
:return: bool
"""
if WURIComponentVerifier.validate(self, uri) is False:
return False
try:
WStrictURIQuery(
WURIQuery.parse(uri.component(self.component())),
*self.__specs,
extra_parameters=self.__extra_parameters
)
except ValueError:
return False
return True | [
"def",
"validate",
"(",
"self",
",",
"uri",
")",
":",
"if",
"WURIComponentVerifier",
".",
"validate",
"(",
"self",
",",
"uri",
")",
"is",
"False",
":",
"return",
"False",
"try",
":",
"WStrictURIQuery",
"(",
"WURIQuery",
".",
"parse",
"(",
"uri",
".",
"component",
"(",
"self",
".",
"component",
"(",
")",
")",
")",
",",
"*",
"self",
".",
"__specs",
",",
"extra_parameters",
"=",
"self",
".",
"__extra_parameters",
")",
"except",
"ValueError",
":",
"return",
"False",
"return",
"True"
] | Check that an query part of an URI is compatible with this descriptor. Return True if the URI is
compatible.
:param uri: an URI to check
:return: bool | [
"Check",
"that",
"an",
"query",
"part",
"of",
"an",
"URI",
"is",
"compatible",
"with",
"this",
"descriptor",
".",
"Return",
"True",
"if",
"the",
"URI",
"is",
"compatible",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L576-L594 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WSchemeSpecification.is_compatible | def is_compatible(self, uri):
""" Check if URI is compatible with this specification. Compatible URI has scheme name that matches
specification scheme name, has all of the required components, does not have unsupported components
and may have optional components
:param uri: URI to check
:return: bool
"""
for component, component_value in uri:
if self.verifier(component).validate(uri) is False:
return False
return True | python | def is_compatible(self, uri):
""" Check if URI is compatible with this specification. Compatible URI has scheme name that matches
specification scheme name, has all of the required components, does not have unsupported components
and may have optional components
:param uri: URI to check
:return: bool
"""
for component, component_value in uri:
if self.verifier(component).validate(uri) is False:
return False
return True | [
"def",
"is_compatible",
"(",
"self",
",",
"uri",
")",
":",
"for",
"component",
",",
"component_value",
"in",
"uri",
":",
"if",
"self",
".",
"verifier",
"(",
"component",
")",
".",
"validate",
"(",
"uri",
")",
"is",
"False",
":",
"return",
"False",
"return",
"True"
] | Check if URI is compatible with this specification. Compatible URI has scheme name that matches
specification scheme name, has all of the required components, does not have unsupported components
and may have optional components
:param uri: URI to check
:return: bool | [
"Check",
"if",
"URI",
"is",
"compatible",
"with",
"this",
"specification",
".",
"Compatible",
"URI",
"has",
"scheme",
"name",
"that",
"matches",
"specification",
"scheme",
"name",
"has",
"all",
"of",
"the",
"required",
"components",
"does",
"not",
"have",
"unsupported",
"components",
"and",
"may",
"have",
"optional",
"components"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L658-L670 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WSchemeCollection.handler | def handler(self, scheme_name=None):
""" Return handler which scheme name matches the specified one
:param scheme_name: scheme name to search for
:return: WSchemeHandler class or None (if matching handler was not found)
"""
if scheme_name is None:
return self.__default_handler_cls
for handler in self.__handlers_cls:
if handler.scheme_specification().scheme_name() == scheme_name:
return handler | python | def handler(self, scheme_name=None):
""" Return handler which scheme name matches the specified one
:param scheme_name: scheme name to search for
:return: WSchemeHandler class or None (if matching handler was not found)
"""
if scheme_name is None:
return self.__default_handler_cls
for handler in self.__handlers_cls:
if handler.scheme_specification().scheme_name() == scheme_name:
return handler | [
"def",
"handler",
"(",
"self",
",",
"scheme_name",
"=",
"None",
")",
":",
"if",
"scheme_name",
"is",
"None",
":",
"return",
"self",
".",
"__default_handler_cls",
"for",
"handler",
"in",
"self",
".",
"__handlers_cls",
":",
"if",
"handler",
".",
"scheme_specification",
"(",
")",
".",
"scheme_name",
"(",
")",
"==",
"scheme_name",
":",
"return",
"handler"
] | Return handler which scheme name matches the specified one
:param scheme_name: scheme name to search for
:return: WSchemeHandler class or None (if matching handler was not found) | [
"Return",
"handler",
"which",
"scheme",
"name",
"matches",
"the",
"specified",
"one"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L752-L762 | train |
a1ezzz/wasp-general | wasp_general/uri.py | WSchemeCollection.open | def open(self, uri, **kwargs):
""" Return handler instance that matches the specified URI. WSchemeCollection.NoHandlerFound and
WSchemeCollection.SchemeIncompatible may be raised.
:param uri: URI to search handler for
:param kwargs: additional arguments that may be used by a handler specialization
:return: WSchemeHandler
"""
handler = self.handler(uri.scheme())
if handler is None:
raise WSchemeCollection.NoHandlerFound(uri)
if uri.scheme() is None:
uri.component('scheme', handler.scheme_specification().scheme_name())
if handler.scheme_specification().is_compatible(uri) is False:
raise WSchemeCollection.SchemeIncompatible(uri)
return handler.create_handler(uri, **kwargs) | python | def open(self, uri, **kwargs):
""" Return handler instance that matches the specified URI. WSchemeCollection.NoHandlerFound and
WSchemeCollection.SchemeIncompatible may be raised.
:param uri: URI to search handler for
:param kwargs: additional arguments that may be used by a handler specialization
:return: WSchemeHandler
"""
handler = self.handler(uri.scheme())
if handler is None:
raise WSchemeCollection.NoHandlerFound(uri)
if uri.scheme() is None:
uri.component('scheme', handler.scheme_specification().scheme_name())
if handler.scheme_specification().is_compatible(uri) is False:
raise WSchemeCollection.SchemeIncompatible(uri)
return handler.create_handler(uri, **kwargs) | [
"def",
"open",
"(",
"self",
",",
"uri",
",",
"*",
"*",
"kwargs",
")",
":",
"handler",
"=",
"self",
".",
"handler",
"(",
"uri",
".",
"scheme",
"(",
")",
")",
"if",
"handler",
"is",
"None",
":",
"raise",
"WSchemeCollection",
".",
"NoHandlerFound",
"(",
"uri",
")",
"if",
"uri",
".",
"scheme",
"(",
")",
"is",
"None",
":",
"uri",
".",
"component",
"(",
"'scheme'",
",",
"handler",
".",
"scheme_specification",
"(",
")",
".",
"scheme_name",
"(",
")",
")",
"if",
"handler",
".",
"scheme_specification",
"(",
")",
".",
"is_compatible",
"(",
"uri",
")",
"is",
"False",
":",
"raise",
"WSchemeCollection",
".",
"SchemeIncompatible",
"(",
"uri",
")",
"return",
"handler",
".",
"create_handler",
"(",
"uri",
",",
"*",
"*",
"kwargs",
")"
] | Return handler instance that matches the specified URI. WSchemeCollection.NoHandlerFound and
WSchemeCollection.SchemeIncompatible may be raised.
:param uri: URI to search handler for
:param kwargs: additional arguments that may be used by a handler specialization
:return: WSchemeHandler | [
"Return",
"handler",
"instance",
"that",
"matches",
"the",
"specified",
"URI",
".",
"WSchemeCollection",
".",
"NoHandlerFound",
"and",
"WSchemeCollection",
".",
"SchemeIncompatible",
"may",
"be",
"raised",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L765-L783 | train |
olitheolix/qtmacs | qtmacs/applets/webbrowser.py | WebBrowser.loadFile | def loadFile(self, fileName):
"""
Load the URL ``fileName``.
"""
self.fileName = fileName
self.qteWeb.load(QtCore.QUrl(fileName)) | python | def loadFile(self, fileName):
"""
Load the URL ``fileName``.
"""
self.fileName = fileName
self.qteWeb.load(QtCore.QUrl(fileName)) | [
"def",
"loadFile",
"(",
"self",
",",
"fileName",
")",
":",
"self",
".",
"fileName",
"=",
"fileName",
"self",
".",
"qteWeb",
".",
"load",
"(",
"QtCore",
".",
"QUrl",
"(",
"fileName",
")",
")"
] | Load the URL ``fileName``. | [
"Load",
"the",
"URL",
"fileName",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/applets/webbrowser.py#L85-L90 | train |
weijia/djangoautoconf | djangoautoconf/class_based_views/ajax_views.py | AjaxableViewMixin.render_to_response | def render_to_response(self, context, **response_kwargs):
"""
Returns a response with a template rendered with the given context.
"""
context["ajax_form_id"] = self.ajax_form_id
# context["base_template"] = "towel_bootstrap/modal.html"
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
**response_kwargs
) | python | def render_to_response(self, context, **response_kwargs):
"""
Returns a response with a template rendered with the given context.
"""
context["ajax_form_id"] = self.ajax_form_id
# context["base_template"] = "towel_bootstrap/modal.html"
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
**response_kwargs
) | [
"def",
"render_to_response",
"(",
"self",
",",
"context",
",",
"*",
"*",
"response_kwargs",
")",
":",
"context",
"[",
"\"ajax_form_id\"",
"]",
"=",
"self",
".",
"ajax_form_id",
"# context[\"base_template\"] = \"towel_bootstrap/modal.html\"",
"return",
"self",
".",
"response_class",
"(",
"request",
"=",
"self",
".",
"request",
",",
"template",
"=",
"self",
".",
"get_template_names",
"(",
")",
",",
"context",
"=",
"context",
",",
"*",
"*",
"response_kwargs",
")"
] | Returns a response with a template rendered with the given context. | [
"Returns",
"a",
"response",
"with",
"a",
"template",
"rendered",
"with",
"the",
"given",
"context",
"."
] | b7dbda2287ed8cb9de6d02cb3abaaa1c36b1ced0 | https://github.com/weijia/djangoautoconf/blob/b7dbda2287ed8cb9de6d02cb3abaaa1c36b1ced0/djangoautoconf/class_based_views/ajax_views.py#L47-L58 | train |
consbio/restle | restle/fields.py | TextField.to_python | def to_python(self, value, resource):
"""Converts to unicode if `self.encoding != None`, otherwise returns input without attempting to decode"""
if value is None:
return self._transform(value)
if isinstance(value, six.text_type):
return self._transform(value)
if self.encoding is None and isinstance(value, (six.text_type, six.binary_type)):
return self._transform(value)
if self.encoding is not None and isinstance(value, six.binary_type):
return self._transform(value.decode(self.encoding))
return self._transform(six.text_type(value)) | python | def to_python(self, value, resource):
"""Converts to unicode if `self.encoding != None`, otherwise returns input without attempting to decode"""
if value is None:
return self._transform(value)
if isinstance(value, six.text_type):
return self._transform(value)
if self.encoding is None and isinstance(value, (six.text_type, six.binary_type)):
return self._transform(value)
if self.encoding is not None and isinstance(value, six.binary_type):
return self._transform(value.decode(self.encoding))
return self._transform(six.text_type(value)) | [
"def",
"to_python",
"(",
"self",
",",
"value",
",",
"resource",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"self",
".",
"_transform",
"(",
"value",
")",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"text_type",
")",
":",
"return",
"self",
".",
"_transform",
"(",
"value",
")",
"if",
"self",
".",
"encoding",
"is",
"None",
"and",
"isinstance",
"(",
"value",
",",
"(",
"six",
".",
"text_type",
",",
"six",
".",
"binary_type",
")",
")",
":",
"return",
"self",
".",
"_transform",
"(",
"value",
")",
"if",
"self",
".",
"encoding",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"value",
",",
"six",
".",
"binary_type",
")",
":",
"return",
"self",
".",
"_transform",
"(",
"value",
".",
"decode",
"(",
"self",
".",
"encoding",
")",
")",
"return",
"self",
".",
"_transform",
"(",
"six",
".",
"text_type",
"(",
"value",
")",
")"
] | Converts to unicode if `self.encoding != None`, otherwise returns input without attempting to decode | [
"Converts",
"to",
"unicode",
"if",
"self",
".",
"encoding",
"!",
"=",
"None",
"otherwise",
"returns",
"input",
"without",
"attempting",
"to",
"decode"
] | 60d100da034c612d4910f4f79eaa57a76eb3dcc6 | https://github.com/consbio/restle/blob/60d100da034c612d4910f4f79eaa57a76eb3dcc6/restle/fields.py#L72-L87 | train |
consbio/restle | restle/fields.py | ObjectField.to_python | def to_python(self, value, resource):
"""Dictionary to Python object"""
if isinstance(value, dict):
d = {
self.aliases.get(k, k): self.to_python(v, resource) if isinstance(v, (dict, list)) else v
for k, v in six.iteritems(value)
}
return type(self.class_name, (), d)
elif isinstance(value, list):
return [self.to_python(x, resource) if isinstance(x, (dict, list)) else x for x in value]
else:
return value | python | def to_python(self, value, resource):
"""Dictionary to Python object"""
if isinstance(value, dict):
d = {
self.aliases.get(k, k): self.to_python(v, resource) if isinstance(v, (dict, list)) else v
for k, v in six.iteritems(value)
}
return type(self.class_name, (), d)
elif isinstance(value, list):
return [self.to_python(x, resource) if isinstance(x, (dict, list)) else x for x in value]
else:
return value | [
"def",
"to_python",
"(",
"self",
",",
"value",
",",
"resource",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"d",
"=",
"{",
"self",
".",
"aliases",
".",
"get",
"(",
"k",
",",
"k",
")",
":",
"self",
".",
"to_python",
"(",
"v",
",",
"resource",
")",
"if",
"isinstance",
"(",
"v",
",",
"(",
"dict",
",",
"list",
")",
")",
"else",
"v",
"for",
"k",
",",
"v",
"in",
"six",
".",
"iteritems",
"(",
"value",
")",
"}",
"return",
"type",
"(",
"self",
".",
"class_name",
",",
"(",
")",
",",
"d",
")",
"elif",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"return",
"[",
"self",
".",
"to_python",
"(",
"x",
",",
"resource",
")",
"if",
"isinstance",
"(",
"x",
",",
"(",
"dict",
",",
"list",
")",
")",
"else",
"x",
"for",
"x",
"in",
"value",
"]",
"else",
":",
"return",
"value"
] | Dictionary to Python object | [
"Dictionary",
"to",
"Python",
"object"
] | 60d100da034c612d4910f4f79eaa57a76eb3dcc6 | https://github.com/consbio/restle/blob/60d100da034c612d4910f4f79eaa57a76eb3dcc6/restle/fields.py#L145-L157 | train |
consbio/restle | restle/fields.py | ObjectField.to_value | def to_value(self, obj, resource, visited=set()):
"""Python object to dictionary"""
if id(obj) in visited:
raise ValueError('Circular reference detected when attempting to serialize object')
if isinstance(obj, (list, tuple, set)):
return [self.to_value(x, resource) if hasattr(x, '__dict__') else x for x in obj]
elif hasattr(obj, '__dict__'):
attrs = obj.__dict__.copy()
for key in six.iterkeys(obj.__dict__):
if key.startswith('_'):
del attrs[key]
return {
self.reverse_aliases.get(k, k):
self.to_value(v, resource) if hasattr(v, '__dict__') or isinstance(v, (list, tuple, set)) else v
for k, v in six.iteritems(attrs)
}
else:
return obj | python | def to_value(self, obj, resource, visited=set()):
"""Python object to dictionary"""
if id(obj) in visited:
raise ValueError('Circular reference detected when attempting to serialize object')
if isinstance(obj, (list, tuple, set)):
return [self.to_value(x, resource) if hasattr(x, '__dict__') else x for x in obj]
elif hasattr(obj, '__dict__'):
attrs = obj.__dict__.copy()
for key in six.iterkeys(obj.__dict__):
if key.startswith('_'):
del attrs[key]
return {
self.reverse_aliases.get(k, k):
self.to_value(v, resource) if hasattr(v, '__dict__') or isinstance(v, (list, tuple, set)) else v
for k, v in six.iteritems(attrs)
}
else:
return obj | [
"def",
"to_value",
"(",
"self",
",",
"obj",
",",
"resource",
",",
"visited",
"=",
"set",
"(",
")",
")",
":",
"if",
"id",
"(",
"obj",
")",
"in",
"visited",
":",
"raise",
"ValueError",
"(",
"'Circular reference detected when attempting to serialize object'",
")",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"list",
",",
"tuple",
",",
"set",
")",
")",
":",
"return",
"[",
"self",
".",
"to_value",
"(",
"x",
",",
"resource",
")",
"if",
"hasattr",
"(",
"x",
",",
"'__dict__'",
")",
"else",
"x",
"for",
"x",
"in",
"obj",
"]",
"elif",
"hasattr",
"(",
"obj",
",",
"'__dict__'",
")",
":",
"attrs",
"=",
"obj",
".",
"__dict__",
".",
"copy",
"(",
")",
"for",
"key",
"in",
"six",
".",
"iterkeys",
"(",
"obj",
".",
"__dict__",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"'_'",
")",
":",
"del",
"attrs",
"[",
"key",
"]",
"return",
"{",
"self",
".",
"reverse_aliases",
".",
"get",
"(",
"k",
",",
"k",
")",
":",
"self",
".",
"to_value",
"(",
"v",
",",
"resource",
")",
"if",
"hasattr",
"(",
"v",
",",
"'__dict__'",
")",
"or",
"isinstance",
"(",
"v",
",",
"(",
"list",
",",
"tuple",
",",
"set",
")",
")",
"else",
"v",
"for",
"k",
",",
"v",
"in",
"six",
".",
"iteritems",
"(",
"attrs",
")",
"}",
"else",
":",
"return",
"obj"
] | Python object to dictionary | [
"Python",
"object",
"to",
"dictionary"
] | 60d100da034c612d4910f4f79eaa57a76eb3dcc6 | https://github.com/consbio/restle/blob/60d100da034c612d4910f4f79eaa57a76eb3dcc6/restle/fields.py#L159-L179 | train |
a1ezzz/wasp-general | wasp_general/network/beacon/messenger.py | WBeaconGouverneurMessenger.hello_message | def hello_message(self, invert_hello=False):
""" Return message header.
:param invert_hello: whether to return the original header (in case of False value) or reversed \
one (in case of True value).
:return: bytes
"""
if invert_hello is False:
return self.__gouverneur_message
hello_message = []
for i in range(len(self.__gouverneur_message) - 1, -1, -1):
hello_message.append(self.__gouverneur_message[i])
return bytes(hello_message) | python | def hello_message(self, invert_hello=False):
""" Return message header.
:param invert_hello: whether to return the original header (in case of False value) or reversed \
one (in case of True value).
:return: bytes
"""
if invert_hello is False:
return self.__gouverneur_message
hello_message = []
for i in range(len(self.__gouverneur_message) - 1, -1, -1):
hello_message.append(self.__gouverneur_message[i])
return bytes(hello_message) | [
"def",
"hello_message",
"(",
"self",
",",
"invert_hello",
"=",
"False",
")",
":",
"if",
"invert_hello",
"is",
"False",
":",
"return",
"self",
".",
"__gouverneur_message",
"hello_message",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"__gouverneur_message",
")",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"hello_message",
".",
"append",
"(",
"self",
".",
"__gouverneur_message",
"[",
"i",
"]",
")",
"return",
"bytes",
"(",
"hello_message",
")"
] | Return message header.
:param invert_hello: whether to return the original header (in case of False value) or reversed \
one (in case of True value).
:return: bytes | [
"Return",
"message",
"header",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/beacon/messenger.py#L200-L213 | train |
a1ezzz/wasp-general | wasp_general/network/beacon/messenger.py | WBeaconGouverneurMessenger._message_address_parse | def _message_address_parse(self, message, invert_hello=False):
""" Read address from beacon message. If no address is specified then "nullable" WIPV4SocketInfo returns
:param message: message to parse
:param invert_hello: defines whether message header is the original one or reversed.
:return: WIPV4SocketInfo
"""
message_header = self.hello_message(invert_hello=invert_hello)
if message[:len(message_header)] != message_header:
raise ValueError('Invalid message header')
message = message[len(message_header):]
message_parts = message.split(WBeaconGouverneurMessenger.__message_splitter__)
address = None
port = None
if len(message_parts) > 3:
raise ValueError('Invalid message. Too many separators')
elif len(message_parts) == 3:
address = WIPV4SocketInfo.parse_address(message_parts[1].decode('ascii'))
port = WIPPort(int(message_parts[2]))
elif len(message_parts) == 2 and len(message_parts[1]) > 0:
address = WIPV4SocketInfo.parse_address(message_parts[1].decode('ascii'))
return WIPV4SocketInfo(address, port) | python | def _message_address_parse(self, message, invert_hello=False):
""" Read address from beacon message. If no address is specified then "nullable" WIPV4SocketInfo returns
:param message: message to parse
:param invert_hello: defines whether message header is the original one or reversed.
:return: WIPV4SocketInfo
"""
message_header = self.hello_message(invert_hello=invert_hello)
if message[:len(message_header)] != message_header:
raise ValueError('Invalid message header')
message = message[len(message_header):]
message_parts = message.split(WBeaconGouverneurMessenger.__message_splitter__)
address = None
port = None
if len(message_parts) > 3:
raise ValueError('Invalid message. Too many separators')
elif len(message_parts) == 3:
address = WIPV4SocketInfo.parse_address(message_parts[1].decode('ascii'))
port = WIPPort(int(message_parts[2]))
elif len(message_parts) == 2 and len(message_parts[1]) > 0:
address = WIPV4SocketInfo.parse_address(message_parts[1].decode('ascii'))
return WIPV4SocketInfo(address, port) | [
"def",
"_message_address_parse",
"(",
"self",
",",
"message",
",",
"invert_hello",
"=",
"False",
")",
":",
"message_header",
"=",
"self",
".",
"hello_message",
"(",
"invert_hello",
"=",
"invert_hello",
")",
"if",
"message",
"[",
":",
"len",
"(",
"message_header",
")",
"]",
"!=",
"message_header",
":",
"raise",
"ValueError",
"(",
"'Invalid message header'",
")",
"message",
"=",
"message",
"[",
"len",
"(",
"message_header",
")",
":",
"]",
"message_parts",
"=",
"message",
".",
"split",
"(",
"WBeaconGouverneurMessenger",
".",
"__message_splitter__",
")",
"address",
"=",
"None",
"port",
"=",
"None",
"if",
"len",
"(",
"message_parts",
")",
">",
"3",
":",
"raise",
"ValueError",
"(",
"'Invalid message. Too many separators'",
")",
"elif",
"len",
"(",
"message_parts",
")",
"==",
"3",
":",
"address",
"=",
"WIPV4SocketInfo",
".",
"parse_address",
"(",
"message_parts",
"[",
"1",
"]",
".",
"decode",
"(",
"'ascii'",
")",
")",
"port",
"=",
"WIPPort",
"(",
"int",
"(",
"message_parts",
"[",
"2",
"]",
")",
")",
"elif",
"len",
"(",
"message_parts",
")",
"==",
"2",
"and",
"len",
"(",
"message_parts",
"[",
"1",
"]",
")",
">",
"0",
":",
"address",
"=",
"WIPV4SocketInfo",
".",
"parse_address",
"(",
"message_parts",
"[",
"1",
"]",
".",
"decode",
"(",
"'ascii'",
")",
")",
"return",
"WIPV4SocketInfo",
"(",
"address",
",",
"port",
")"
] | Read address from beacon message. If no address is specified then "nullable" WIPV4SocketInfo returns
:param message: message to parse
:param invert_hello: defines whether message header is the original one or reversed.
:return: WIPV4SocketInfo | [
"Read",
"address",
"from",
"beacon",
"message",
".",
"If",
"no",
"address",
"is",
"specified",
"then",
"nullable",
"WIPV4SocketInfo",
"returns"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/beacon/messenger.py#L254-L280 | train |
a1ezzz/wasp-general | wasp_general/crypto/aes.py | WAES.cipher | def cipher(self):
""" Generate AES-cipher
:return: Crypto.Cipher.AES.AESCipher
"""
#cipher = pyAES.new(*self.mode().aes_args(), **self.mode().aes_kwargs())
cipher = Cipher(*self.mode().aes_args(), **self.mode().aes_kwargs())
return WAES.WAESCipher(cipher) | python | def cipher(self):
""" Generate AES-cipher
:return: Crypto.Cipher.AES.AESCipher
"""
#cipher = pyAES.new(*self.mode().aes_args(), **self.mode().aes_kwargs())
cipher = Cipher(*self.mode().aes_args(), **self.mode().aes_kwargs())
return WAES.WAESCipher(cipher) | [
"def",
"cipher",
"(",
"self",
")",
":",
"#cipher = pyAES.new(*self.mode().aes_args(), **self.mode().aes_kwargs())",
"cipher",
"=",
"Cipher",
"(",
"*",
"self",
".",
"mode",
"(",
")",
".",
"aes_args",
"(",
")",
",",
"*",
"*",
"self",
".",
"mode",
"(",
")",
".",
"aes_kwargs",
"(",
")",
")",
"return",
"WAES",
".",
"WAESCipher",
"(",
"cipher",
")"
] | Generate AES-cipher
:return: Crypto.Cipher.AES.AESCipher | [
"Generate",
"AES",
"-",
"cipher"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/crypto/aes.py#L491-L498 | train |
a1ezzz/wasp-general | wasp_general/crypto/aes.py | WAES.encrypt | def encrypt(self, data):
""" Encrypt the given data with cipher that is got from AES.cipher call.
:param data: data to encrypt
:return: bytes
"""
padding = self.mode().padding()
if padding is not None:
data = padding.pad(data, WAESMode.__data_padding_length__)
return self.cipher().encrypt_block(data) | python | def encrypt(self, data):
""" Encrypt the given data with cipher that is got from AES.cipher call.
:param data: data to encrypt
:return: bytes
"""
padding = self.mode().padding()
if padding is not None:
data = padding.pad(data, WAESMode.__data_padding_length__)
return self.cipher().encrypt_block(data) | [
"def",
"encrypt",
"(",
"self",
",",
"data",
")",
":",
"padding",
"=",
"self",
".",
"mode",
"(",
")",
".",
"padding",
"(",
")",
"if",
"padding",
"is",
"not",
"None",
":",
"data",
"=",
"padding",
".",
"pad",
"(",
"data",
",",
"WAESMode",
".",
"__data_padding_length__",
")",
"return",
"self",
".",
"cipher",
"(",
")",
".",
"encrypt_block",
"(",
"data",
")"
] | Encrypt the given data with cipher that is got from AES.cipher call.
:param data: data to encrypt
:return: bytes | [
"Encrypt",
"the",
"given",
"data",
"with",
"cipher",
"that",
"is",
"got",
"from",
"AES",
".",
"cipher",
"call",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/crypto/aes.py#L501-L511 | train |
a1ezzz/wasp-general | wasp_general/crypto/aes.py | WAES.decrypt | def decrypt(self, data, decode=False):
""" Decrypt the given data with cipher that is got from AES.cipher call.
:param data: data to decrypt
:param decode: whether to decode bytes to str or not
:return: bytes or str (depends on decode flag)
"""
#result = self.cipher().decrypt(data)
result = self.cipher().decrypt_block(data)
padding = self.mode().padding()
if padding is not None:
result = padding.reverse_pad(result, WAESMode.__data_padding_length__)
return result.decode() if decode else result | python | def decrypt(self, data, decode=False):
""" Decrypt the given data with cipher that is got from AES.cipher call.
:param data: data to decrypt
:param decode: whether to decode bytes to str or not
:return: bytes or str (depends on decode flag)
"""
#result = self.cipher().decrypt(data)
result = self.cipher().decrypt_block(data)
padding = self.mode().padding()
if padding is not None:
result = padding.reverse_pad(result, WAESMode.__data_padding_length__)
return result.decode() if decode else result | [
"def",
"decrypt",
"(",
"self",
",",
"data",
",",
"decode",
"=",
"False",
")",
":",
"#result = self.cipher().decrypt(data)",
"result",
"=",
"self",
".",
"cipher",
"(",
")",
".",
"decrypt_block",
"(",
"data",
")",
"padding",
"=",
"self",
".",
"mode",
"(",
")",
".",
"padding",
"(",
")",
"if",
"padding",
"is",
"not",
"None",
":",
"result",
"=",
"padding",
".",
"reverse_pad",
"(",
"result",
",",
"WAESMode",
".",
"__data_padding_length__",
")",
"return",
"result",
".",
"decode",
"(",
")",
"if",
"decode",
"else",
"result"
] | Decrypt the given data with cipher that is got from AES.cipher call.
:param data: data to decrypt
:param decode: whether to decode bytes to str or not
:return: bytes or str (depends on decode flag) | [
"Decrypt",
"the",
"given",
"data",
"with",
"cipher",
"that",
"is",
"got",
"from",
"AES",
".",
"cipher",
"call",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/crypto/aes.py#L515-L530 | train |
a1ezzz/wasp-general | wasp_general/task/thread_tracker.py | WThreadTracker.thread_tracker_exception | def thread_tracker_exception(self, raised_exception):
""" Method is called whenever an exception is raised during registering a event
:param raised_exception: raised exception
:return: None
"""
print('Thread tracker execution was stopped by the exception. Exception: %s' % str(raised_exception))
print('Traceback:')
print(traceback.format_exc()) | python | def thread_tracker_exception(self, raised_exception):
""" Method is called whenever an exception is raised during registering a event
:param raised_exception: raised exception
:return: None
"""
print('Thread tracker execution was stopped by the exception. Exception: %s' % str(raised_exception))
print('Traceback:')
print(traceback.format_exc()) | [
"def",
"thread_tracker_exception",
"(",
"self",
",",
"raised_exception",
")",
":",
"print",
"(",
"'Thread tracker execution was stopped by the exception. Exception: %s'",
"%",
"str",
"(",
"raised_exception",
")",
")",
"print",
"(",
"'Traceback:'",
")",
"print",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")"
] | Method is called whenever an exception is raised during registering a event
:param raised_exception: raised exception
:return: None | [
"Method",
"is",
"called",
"whenever",
"an",
"exception",
"is",
"raised",
"during",
"registering",
"a",
"event"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/thread_tracker.py#L278-L287 | train |
a1ezzz/wasp-general | wasp_general/task/thread_tracker.py | WSimpleTrackerStorage.__store_record | def __store_record(self, record):
""" Save record in a internal storage
:param record: record to save
:return: None
"""
if isinstance(record, WSimpleTrackerStorage.Record) is False:
raise TypeError('Invalid record type was')
limit = self.record_limit()
if limit is not None and len(self.__registry) >= limit:
self.__registry.pop(0)
self.__registry.append(record) | python | def __store_record(self, record):
""" Save record in a internal storage
:param record: record to save
:return: None
"""
if isinstance(record, WSimpleTrackerStorage.Record) is False:
raise TypeError('Invalid record type was')
limit = self.record_limit()
if limit is not None and len(self.__registry) >= limit:
self.__registry.pop(0)
self.__registry.append(record) | [
"def",
"__store_record",
"(",
"self",
",",
"record",
")",
":",
"if",
"isinstance",
"(",
"record",
",",
"WSimpleTrackerStorage",
".",
"Record",
")",
"is",
"False",
":",
"raise",
"TypeError",
"(",
"'Invalid record type was'",
")",
"limit",
"=",
"self",
".",
"record_limit",
"(",
")",
"if",
"limit",
"is",
"not",
"None",
"and",
"len",
"(",
"self",
".",
"__registry",
")",
">=",
"limit",
":",
"self",
".",
"__registry",
".",
"pop",
"(",
"0",
")",
"self",
".",
"__registry",
".",
"append",
"(",
"record",
")"
] | Save record in a internal storage
:param record: record to save
:return: None | [
"Save",
"record",
"in",
"a",
"internal",
"storage"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/thread_tracker.py#L461-L473 | train |
shaypal5/strct | strct/dicts/_dict.py | put_nested_val | def put_nested_val(dict_obj, key_tuple, value):
"""Put a value into nested dicts by the order of the given keys tuple.
Any missing intermediate dicts are created.
Parameters
----------
dict_obj : dict
The outer-most dict to put in.
key_tuple : tuple
The keys to use for putting, in order.
value : object
The value to put.
Example
-------
>>> dict_obj = {'a': {'h': 3}}
>>> put_nested_val(dict_obj, ('a', 'b'), 7)
>>> dict_obj['a']['b']
7
>>> put_nested_val(dict_obj, ('a', 'b'), 12)
>>> dict_obj['a']['b']
12
>>> put_nested_val(dict_obj, ('a', 'g', 'z'), 14)
>>> dict_obj['a']['g']['z']
14
>>> put_nested_val(dict_obj, ['base'], 88)
>>> dict_obj['base']
88
"""
current_dict = dict_obj
for key in key_tuple[:-1]:
try:
current_dict = current_dict[key]
except KeyError:
current_dict[key] = {}
current_dict = current_dict[key]
current_dict[key_tuple[-1]] = value | python | def put_nested_val(dict_obj, key_tuple, value):
"""Put a value into nested dicts by the order of the given keys tuple.
Any missing intermediate dicts are created.
Parameters
----------
dict_obj : dict
The outer-most dict to put in.
key_tuple : tuple
The keys to use for putting, in order.
value : object
The value to put.
Example
-------
>>> dict_obj = {'a': {'h': 3}}
>>> put_nested_val(dict_obj, ('a', 'b'), 7)
>>> dict_obj['a']['b']
7
>>> put_nested_val(dict_obj, ('a', 'b'), 12)
>>> dict_obj['a']['b']
12
>>> put_nested_val(dict_obj, ('a', 'g', 'z'), 14)
>>> dict_obj['a']['g']['z']
14
>>> put_nested_val(dict_obj, ['base'], 88)
>>> dict_obj['base']
88
"""
current_dict = dict_obj
for key in key_tuple[:-1]:
try:
current_dict = current_dict[key]
except KeyError:
current_dict[key] = {}
current_dict = current_dict[key]
current_dict[key_tuple[-1]] = value | [
"def",
"put_nested_val",
"(",
"dict_obj",
",",
"key_tuple",
",",
"value",
")",
":",
"current_dict",
"=",
"dict_obj",
"for",
"key",
"in",
"key_tuple",
"[",
":",
"-",
"1",
"]",
":",
"try",
":",
"current_dict",
"=",
"current_dict",
"[",
"key",
"]",
"except",
"KeyError",
":",
"current_dict",
"[",
"key",
"]",
"=",
"{",
"}",
"current_dict",
"=",
"current_dict",
"[",
"key",
"]",
"current_dict",
"[",
"key_tuple",
"[",
"-",
"1",
"]",
"]",
"=",
"value"
] | Put a value into nested dicts by the order of the given keys tuple.
Any missing intermediate dicts are created.
Parameters
----------
dict_obj : dict
The outer-most dict to put in.
key_tuple : tuple
The keys to use for putting, in order.
value : object
The value to put.
Example
-------
>>> dict_obj = {'a': {'h': 3}}
>>> put_nested_val(dict_obj, ('a', 'b'), 7)
>>> dict_obj['a']['b']
7
>>> put_nested_val(dict_obj, ('a', 'b'), 12)
>>> dict_obj['a']['b']
12
>>> put_nested_val(dict_obj, ('a', 'g', 'z'), 14)
>>> dict_obj['a']['g']['z']
14
>>> put_nested_val(dict_obj, ['base'], 88)
>>> dict_obj['base']
88 | [
"Put",
"a",
"value",
"into",
"nested",
"dicts",
"by",
"the",
"order",
"of",
"the",
"given",
"keys",
"tuple",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L131-L168 | train |
shaypal5/strct | strct/dicts/_dict.py | get_alternative_nested_val | def get_alternative_nested_val(key_tuple, dict_obj):
"""Return a value from nested dicts by any path in the given keys tuple.
Parameters
---------
key_tuple : tuple
Describe all possible paths for extraction.
dict_obj : dict
The outer-most dict to extract from.
Returns
-------
value : object
The extracted value, if exists. Otherwise, raises KeyError.
Example:
--------
>>> dict_obj = {'a': {'b': 7}}
>>> get_alternative_nested_val(('a', ('b', 'c')), dict_obj)
7
"""
# print('key_tuple: {}'.format(key_tuple))
# print('dict_obj: {}'.format(dict_obj))
top_keys = key_tuple[0] if isinstance(key_tuple[0], (list, tuple)) else [
key_tuple[0]]
for key in top_keys:
try:
if len(key_tuple) < 2:
return dict_obj[key]
return get_alternative_nested_val(key_tuple[1:], dict_obj[key])
except (KeyError, TypeError, IndexError):
pass
raise KeyError | python | def get_alternative_nested_val(key_tuple, dict_obj):
"""Return a value from nested dicts by any path in the given keys tuple.
Parameters
---------
key_tuple : tuple
Describe all possible paths for extraction.
dict_obj : dict
The outer-most dict to extract from.
Returns
-------
value : object
The extracted value, if exists. Otherwise, raises KeyError.
Example:
--------
>>> dict_obj = {'a': {'b': 7}}
>>> get_alternative_nested_val(('a', ('b', 'c')), dict_obj)
7
"""
# print('key_tuple: {}'.format(key_tuple))
# print('dict_obj: {}'.format(dict_obj))
top_keys = key_tuple[0] if isinstance(key_tuple[0], (list, tuple)) else [
key_tuple[0]]
for key in top_keys:
try:
if len(key_tuple) < 2:
return dict_obj[key]
return get_alternative_nested_val(key_tuple[1:], dict_obj[key])
except (KeyError, TypeError, IndexError):
pass
raise KeyError | [
"def",
"get_alternative_nested_val",
"(",
"key_tuple",
",",
"dict_obj",
")",
":",
"# print('key_tuple: {}'.format(key_tuple))",
"# print('dict_obj: {}'.format(dict_obj))",
"top_keys",
"=",
"key_tuple",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"key_tuple",
"[",
"0",
"]",
",",
"(",
"list",
",",
"tuple",
")",
")",
"else",
"[",
"key_tuple",
"[",
"0",
"]",
"]",
"for",
"key",
"in",
"top_keys",
":",
"try",
":",
"if",
"len",
"(",
"key_tuple",
")",
"<",
"2",
":",
"return",
"dict_obj",
"[",
"key",
"]",
"return",
"get_alternative_nested_val",
"(",
"key_tuple",
"[",
"1",
":",
"]",
",",
"dict_obj",
"[",
"key",
"]",
")",
"except",
"(",
"KeyError",
",",
"TypeError",
",",
"IndexError",
")",
":",
"pass",
"raise",
"KeyError"
] | Return a value from nested dicts by any path in the given keys tuple.
Parameters
---------
key_tuple : tuple
Describe all possible paths for extraction.
dict_obj : dict
The outer-most dict to extract from.
Returns
-------
value : object
The extracted value, if exists. Otherwise, raises KeyError.
Example:
--------
>>> dict_obj = {'a': {'b': 7}}
>>> get_alternative_nested_val(('a', ('b', 'c')), dict_obj)
7 | [
"Return",
"a",
"value",
"from",
"nested",
"dicts",
"by",
"any",
"path",
"in",
"the",
"given",
"keys",
"tuple",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L198-L230 | train |
shaypal5/strct | strct/dicts/_dict.py | subdict_by_keys | def subdict_by_keys(dict_obj, keys):
"""Returns a sub-dict composed solely of the given keys.
Parameters
----------
dict_obj : dict
The dict to create a sub-dict from.
keys : list of str
The keys to keep in the sub-dict. Keys not present in the given dict
will be ignored.
Returns
-------
dict
A sub-dict of the given dict composed solely of the given keys.
Example:
--------
>>> dict_obj = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
>>> subdict = subdict_by_keys(dict_obj, ['b', 'd', 'e'])
>>> print(sorted(subdict.items()))
[('b', 2), ('d', 4)]
"""
return {k: dict_obj[k] for k in set(keys).intersection(dict_obj.keys())} | python | def subdict_by_keys(dict_obj, keys):
"""Returns a sub-dict composed solely of the given keys.
Parameters
----------
dict_obj : dict
The dict to create a sub-dict from.
keys : list of str
The keys to keep in the sub-dict. Keys not present in the given dict
will be ignored.
Returns
-------
dict
A sub-dict of the given dict composed solely of the given keys.
Example:
--------
>>> dict_obj = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
>>> subdict = subdict_by_keys(dict_obj, ['b', 'd', 'e'])
>>> print(sorted(subdict.items()))
[('b', 2), ('d', 4)]
"""
return {k: dict_obj[k] for k in set(keys).intersection(dict_obj.keys())} | [
"def",
"subdict_by_keys",
"(",
"dict_obj",
",",
"keys",
")",
":",
"return",
"{",
"k",
":",
"dict_obj",
"[",
"k",
"]",
"for",
"k",
"in",
"set",
"(",
"keys",
")",
".",
"intersection",
"(",
"dict_obj",
".",
"keys",
"(",
")",
")",
"}"
] | Returns a sub-dict composed solely of the given keys.
Parameters
----------
dict_obj : dict
The dict to create a sub-dict from.
keys : list of str
The keys to keep in the sub-dict. Keys not present in the given dict
will be ignored.
Returns
-------
dict
A sub-dict of the given dict composed solely of the given keys.
Example:
--------
>>> dict_obj = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
>>> subdict = subdict_by_keys(dict_obj, ['b', 'd', 'e'])
>>> print(sorted(subdict.items()))
[('b', 2), ('d', 4)] | [
"Returns",
"a",
"sub",
"-",
"dict",
"composed",
"solely",
"of",
"the",
"given",
"keys",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L289-L312 | train |
shaypal5/strct | strct/dicts/_dict.py | add_to_dict_val_set | def add_to_dict_val_set(dict_obj, key, val):
"""Adds the given val to the set mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': set([1, 2])}
>>> add_to_dict_val_set(dict_obj, 'a', 2)
>>> print(dict_obj['a'])
{1, 2}
>>> add_to_dict_val_set(dict_obj, 'a', 3)
>>> print(dict_obj['a'])
{1, 2, 3}
"""
try:
dict_obj[key].add(val)
except KeyError:
dict_obj[key] = set([val]) | python | def add_to_dict_val_set(dict_obj, key, val):
"""Adds the given val to the set mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': set([1, 2])}
>>> add_to_dict_val_set(dict_obj, 'a', 2)
>>> print(dict_obj['a'])
{1, 2}
>>> add_to_dict_val_set(dict_obj, 'a', 3)
>>> print(dict_obj['a'])
{1, 2, 3}
"""
try:
dict_obj[key].add(val)
except KeyError:
dict_obj[key] = set([val]) | [
"def",
"add_to_dict_val_set",
"(",
"dict_obj",
",",
"key",
",",
"val",
")",
":",
"try",
":",
"dict_obj",
"[",
"key",
"]",
".",
"add",
"(",
"val",
")",
"except",
"KeyError",
":",
"dict_obj",
"[",
"key",
"]",
"=",
"set",
"(",
"[",
"val",
"]",
")"
] | Adds the given val to the set mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': set([1, 2])}
>>> add_to_dict_val_set(dict_obj, 'a', 2)
>>> print(dict_obj['a'])
{1, 2}
>>> add_to_dict_val_set(dict_obj, 'a', 3)
>>> print(dict_obj['a'])
{1, 2, 3} | [
"Adds",
"the",
"given",
"val",
"to",
"the",
"set",
"mapped",
"by",
"the",
"given",
"key",
".",
"If",
"the",
"key",
"is",
"missing",
"from",
"the",
"dict",
"the",
"given",
"mapping",
"is",
"added",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L386-L403 | train |
shaypal5/strct | strct/dicts/_dict.py | add_many_to_dict_val_set | def add_many_to_dict_val_set(dict_obj, key, val_list):
"""Adds the given value list to the set mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': set([1, 2])}
>>> add_many_to_dict_val_set(dict_obj, 'a', [2, 3])
>>> print(dict_obj['a'])
{1, 2, 3}
>>> add_many_to_dict_val_set(dict_obj, 'b', [2, 3])
>>> print(dict_obj['b'])
{2, 3}
"""
try:
dict_obj[key].update(val_list)
except KeyError:
dict_obj[key] = set(val_list) | python | def add_many_to_dict_val_set(dict_obj, key, val_list):
"""Adds the given value list to the set mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': set([1, 2])}
>>> add_many_to_dict_val_set(dict_obj, 'a', [2, 3])
>>> print(dict_obj['a'])
{1, 2, 3}
>>> add_many_to_dict_val_set(dict_obj, 'b', [2, 3])
>>> print(dict_obj['b'])
{2, 3}
"""
try:
dict_obj[key].update(val_list)
except KeyError:
dict_obj[key] = set(val_list) | [
"def",
"add_many_to_dict_val_set",
"(",
"dict_obj",
",",
"key",
",",
"val_list",
")",
":",
"try",
":",
"dict_obj",
"[",
"key",
"]",
".",
"update",
"(",
"val_list",
")",
"except",
"KeyError",
":",
"dict_obj",
"[",
"key",
"]",
"=",
"set",
"(",
"val_list",
")"
] | Adds the given value list to the set mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': set([1, 2])}
>>> add_many_to_dict_val_set(dict_obj, 'a', [2, 3])
>>> print(dict_obj['a'])
{1, 2, 3}
>>> add_many_to_dict_val_set(dict_obj, 'b', [2, 3])
>>> print(dict_obj['b'])
{2, 3} | [
"Adds",
"the",
"given",
"value",
"list",
"to",
"the",
"set",
"mapped",
"by",
"the",
"given",
"key",
".",
"If",
"the",
"key",
"is",
"missing",
"from",
"the",
"dict",
"the",
"given",
"mapping",
"is",
"added",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L406-L423 | train |
shaypal5/strct | strct/dicts/_dict.py | add_many_to_dict_val_list | def add_many_to_dict_val_list(dict_obj, key, val_list):
"""Adds the given value list to the list mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': [1, 2]}
>>> add_many_to_dict_val_list(dict_obj, 'a', [2, 3])
>>> print(dict_obj['a'])
[1, 2, 2, 3]
>>> add_many_to_dict_val_list(dict_obj, 'b', [2, 3])
>>> print(dict_obj['b'])
[2, 3]
"""
try:
dict_obj[key].extend(val_list)
except KeyError:
dict_obj[key] = list(val_list) | python | def add_many_to_dict_val_list(dict_obj, key, val_list):
"""Adds the given value list to the list mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': [1, 2]}
>>> add_many_to_dict_val_list(dict_obj, 'a', [2, 3])
>>> print(dict_obj['a'])
[1, 2, 2, 3]
>>> add_many_to_dict_val_list(dict_obj, 'b', [2, 3])
>>> print(dict_obj['b'])
[2, 3]
"""
try:
dict_obj[key].extend(val_list)
except KeyError:
dict_obj[key] = list(val_list) | [
"def",
"add_many_to_dict_val_list",
"(",
"dict_obj",
",",
"key",
",",
"val_list",
")",
":",
"try",
":",
"dict_obj",
"[",
"key",
"]",
".",
"extend",
"(",
"val_list",
")",
"except",
"KeyError",
":",
"dict_obj",
"[",
"key",
"]",
"=",
"list",
"(",
"val_list",
")"
] | Adds the given value list to the list mapped by the given key.
If the key is missing from the dict, the given mapping is added.
Example
-------
>>> dict_obj = {'a': [1, 2]}
>>> add_many_to_dict_val_list(dict_obj, 'a', [2, 3])
>>> print(dict_obj['a'])
[1, 2, 2, 3]
>>> add_many_to_dict_val_list(dict_obj, 'b', [2, 3])
>>> print(dict_obj['b'])
[2, 3] | [
"Adds",
"the",
"given",
"value",
"list",
"to",
"the",
"list",
"mapped",
"by",
"the",
"given",
"key",
".",
"If",
"the",
"key",
"is",
"missing",
"from",
"the",
"dict",
"the",
"given",
"mapping",
"is",
"added",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L426-L443 | train |
shaypal5/strct | strct/dicts/_dict.py | get_keys_of_max_n | def get_keys_of_max_n(dict_obj, n):
"""Returns the keys that maps to the top n max values in the given dict.
Example:
--------
>>> dict_obj = {'a':2, 'b':1, 'c':5}
>>> get_keys_of_max_n(dict_obj, 2)
['a', 'c']
"""
return sorted([
item[0]
for item in sorted(
dict_obj.items(), key=lambda item: item[1], reverse=True
)[:n]
]) | python | def get_keys_of_max_n(dict_obj, n):
"""Returns the keys that maps to the top n max values in the given dict.
Example:
--------
>>> dict_obj = {'a':2, 'b':1, 'c':5}
>>> get_keys_of_max_n(dict_obj, 2)
['a', 'c']
"""
return sorted([
item[0]
for item in sorted(
dict_obj.items(), key=lambda item: item[1], reverse=True
)[:n]
]) | [
"def",
"get_keys_of_max_n",
"(",
"dict_obj",
",",
"n",
")",
":",
"return",
"sorted",
"(",
"[",
"item",
"[",
"0",
"]",
"for",
"item",
"in",
"sorted",
"(",
"dict_obj",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"item",
":",
"item",
"[",
"1",
"]",
",",
"reverse",
"=",
"True",
")",
"[",
":",
"n",
"]",
"]",
")"
] | Returns the keys that maps to the top n max values in the given dict.
Example:
--------
>>> dict_obj = {'a':2, 'b':1, 'c':5}
>>> get_keys_of_max_n(dict_obj, 2)
['a', 'c'] | [
"Returns",
"the",
"keys",
"that",
"maps",
"to",
"the",
"top",
"n",
"max",
"values",
"in",
"the",
"given",
"dict",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L459-L473 | train |
shaypal5/strct | strct/dicts/_dict.py | deep_merge_dict | def deep_merge_dict(base, priority):
"""Recursively merges the two given dicts into a single dict.
Treating base as the the initial point of the resulting merged dict,
and considering the nested dictionaries as trees, they are merged os:
1. Every path to every leaf in priority would be represented in the result.
2. Subtrees of base are overwritten if a leaf is found in the
corresponding path in priority.
3. The invariant that all priority leaf nodes remain leafs is maintained.
Parameters
----------
base : dict
The first, lower-priority, dict to merge.
priority : dict
The second, higher-priority, dict to merge.
Returns
-------
dict
A recursive merge of the two given dicts.
Example:
--------
>>> base = {'a': 1, 'b': 2, 'c': {'d': 4}, 'e': 5}
>>> priority = {'a': {'g': 7}, 'c': 3, 'e': 5, 'f': 6}
>>> result = deep_merge_dict(base, priority)
>>> print(sorted(result.items()))
[('a', {'g': 7}), ('b', 2), ('c', 3), ('e', 5), ('f', 6)]
"""
if not isinstance(base, dict) or not isinstance(priority, dict):
return priority
result = copy.deepcopy(base)
for key in priority.keys():
if key in base:
result[key] = deep_merge_dict(base[key], priority[key])
else:
result[key] = priority[key]
return result | python | def deep_merge_dict(base, priority):
"""Recursively merges the two given dicts into a single dict.
Treating base as the the initial point of the resulting merged dict,
and considering the nested dictionaries as trees, they are merged os:
1. Every path to every leaf in priority would be represented in the result.
2. Subtrees of base are overwritten if a leaf is found in the
corresponding path in priority.
3. The invariant that all priority leaf nodes remain leafs is maintained.
Parameters
----------
base : dict
The first, lower-priority, dict to merge.
priority : dict
The second, higher-priority, dict to merge.
Returns
-------
dict
A recursive merge of the two given dicts.
Example:
--------
>>> base = {'a': 1, 'b': 2, 'c': {'d': 4}, 'e': 5}
>>> priority = {'a': {'g': 7}, 'c': 3, 'e': 5, 'f': 6}
>>> result = deep_merge_dict(base, priority)
>>> print(sorted(result.items()))
[('a', {'g': 7}), ('b', 2), ('c', 3), ('e', 5), ('f', 6)]
"""
if not isinstance(base, dict) or not isinstance(priority, dict):
return priority
result = copy.deepcopy(base)
for key in priority.keys():
if key in base:
result[key] = deep_merge_dict(base[key], priority[key])
else:
result[key] = priority[key]
return result | [
"def",
"deep_merge_dict",
"(",
"base",
",",
"priority",
")",
":",
"if",
"not",
"isinstance",
"(",
"base",
",",
"dict",
")",
"or",
"not",
"isinstance",
"(",
"priority",
",",
"dict",
")",
":",
"return",
"priority",
"result",
"=",
"copy",
".",
"deepcopy",
"(",
"base",
")",
"for",
"key",
"in",
"priority",
".",
"keys",
"(",
")",
":",
"if",
"key",
"in",
"base",
":",
"result",
"[",
"key",
"]",
"=",
"deep_merge_dict",
"(",
"base",
"[",
"key",
"]",
",",
"priority",
"[",
"key",
"]",
")",
"else",
":",
"result",
"[",
"key",
"]",
"=",
"priority",
"[",
"key",
"]",
"return",
"result"
] | Recursively merges the two given dicts into a single dict.
Treating base as the the initial point of the resulting merged dict,
and considering the nested dictionaries as trees, they are merged os:
1. Every path to every leaf in priority would be represented in the result.
2. Subtrees of base are overwritten if a leaf is found in the
corresponding path in priority.
3. The invariant that all priority leaf nodes remain leafs is maintained.
Parameters
----------
base : dict
The first, lower-priority, dict to merge.
priority : dict
The second, higher-priority, dict to merge.
Returns
-------
dict
A recursive merge of the two given dicts.
Example:
--------
>>> base = {'a': 1, 'b': 2, 'c': {'d': 4}, 'e': 5}
>>> priority = {'a': {'g': 7}, 'c': 3, 'e': 5, 'f': 6}
>>> result = deep_merge_dict(base, priority)
>>> print(sorted(result.items()))
[('a', {'g': 7}), ('b', 2), ('c', 3), ('e', 5), ('f', 6)] | [
"Recursively",
"merges",
"the",
"two",
"given",
"dicts",
"into",
"a",
"single",
"dict",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L539-L578 | train |
shaypal5/strct | strct/dicts/_dict.py | norm_int_dict | def norm_int_dict(int_dict):
"""Normalizes values in the given dict with int values.
Parameters
----------
int_dict : list
A dict object mapping each key to an int value.
Returns
-------
dict
A dict where each key is mapped to its relative part in the sum of
all dict values.
Example
-------
>>> dict_obj = {'a': 3, 'b': 5, 'c': 2}
>>> result = norm_int_dict(dict_obj)
>>> print(sorted(result.items()))
[('a', 0.3), ('b', 0.5), ('c', 0.2)]
"""
norm_dict = int_dict.copy()
val_sum = sum(norm_dict.values())
for key in norm_dict:
norm_dict[key] = norm_dict[key] / val_sum
return norm_dict | python | def norm_int_dict(int_dict):
"""Normalizes values in the given dict with int values.
Parameters
----------
int_dict : list
A dict object mapping each key to an int value.
Returns
-------
dict
A dict where each key is mapped to its relative part in the sum of
all dict values.
Example
-------
>>> dict_obj = {'a': 3, 'b': 5, 'c': 2}
>>> result = norm_int_dict(dict_obj)
>>> print(sorted(result.items()))
[('a', 0.3), ('b', 0.5), ('c', 0.2)]
"""
norm_dict = int_dict.copy()
val_sum = sum(norm_dict.values())
for key in norm_dict:
norm_dict[key] = norm_dict[key] / val_sum
return norm_dict | [
"def",
"norm_int_dict",
"(",
"int_dict",
")",
":",
"norm_dict",
"=",
"int_dict",
".",
"copy",
"(",
")",
"val_sum",
"=",
"sum",
"(",
"norm_dict",
".",
"values",
"(",
")",
")",
"for",
"key",
"in",
"norm_dict",
":",
"norm_dict",
"[",
"key",
"]",
"=",
"norm_dict",
"[",
"key",
"]",
"/",
"val_sum",
"return",
"norm_dict"
] | Normalizes values in the given dict with int values.
Parameters
----------
int_dict : list
A dict object mapping each key to an int value.
Returns
-------
dict
A dict where each key is mapped to its relative part in the sum of
all dict values.
Example
-------
>>> dict_obj = {'a': 3, 'b': 5, 'c': 2}
>>> result = norm_int_dict(dict_obj)
>>> print(sorted(result.items()))
[('a', 0.3), ('b', 0.5), ('c', 0.2)] | [
"Normalizes",
"values",
"in",
"the",
"given",
"dict",
"with",
"int",
"values",
"."
] | f3a301692d052ddb79331230b3c00625db1d83fc | https://github.com/shaypal5/strct/blob/f3a301692d052ddb79331230b3c00625db1d83fc/strct/dicts/_dict.py#L581-L606 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.