repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
KE-works/pykechain
pykechain/client.py
Client.user
def user(self, username=None, pk=None, **kwargs): """ User of KE-chain. Provides single user of :class:`User` of KE-chain. You can filter on username or id or an advanced filter. :param username: (optional) username to filter :type username: basestring or None :param pk: (optional) id of the user to filter :type pk: basestring or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`User` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found """ _users = self.users(username=username, pk=pk, **kwargs) if len(_users) == 0: raise NotFoundError("No user criteria matches") if len(_users) != 1: raise MultipleFoundError("Multiple users fit criteria") return _users[0]
python
def user(self, username=None, pk=None, **kwargs): """ User of KE-chain. Provides single user of :class:`User` of KE-chain. You can filter on username or id or an advanced filter. :param username: (optional) username to filter :type username: basestring or None :param pk: (optional) id of the user to filter :type pk: basestring or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`User` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found """ _users = self.users(username=username, pk=pk, **kwargs) if len(_users) == 0: raise NotFoundError("No user criteria matches") if len(_users) != 1: raise MultipleFoundError("Multiple users fit criteria") return _users[0]
[ "def", "user", "(", "self", ",", "username", "=", "None", ",", "pk", "=", "None", ",", "*", "*", "kwargs", ")", ":", "_users", "=", "self", ".", "users", "(", "username", "=", "username", ",", "pk", "=", "pk", ",", "*", "*", "kwargs", ")", "if", "len", "(", "_users", ")", "==", "0", ":", "raise", "NotFoundError", "(", "\"No user criteria matches\"", ")", "if", "len", "(", "_users", ")", "!=", "1", ":", "raise", "MultipleFoundError", "(", "\"Multiple users fit criteria\"", ")", "return", "_users", "[", "0", "]" ]
User of KE-chain. Provides single user of :class:`User` of KE-chain. You can filter on username or id or an advanced filter. :param username: (optional) username to filter :type username: basestring or None :param pk: (optional) id of the user to filter :type pk: basestring or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`User` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found
[ "User", "of", "KE", "-", "chain", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L818-L841
train
KE-works/pykechain
pykechain/client.py
Client.team
def team(self, name=None, id=None, is_hidden=False, **kwargs): """ Team of KE-chain. Provides a team of :class:`Team` of KE-chain. You can filter on team name or provide id. :param name: (optional) team name to filter :type name: basestring or None :param id: (optional) id of the user to filter :type id: basestring or None :param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden) :type is_hidden: bool or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`Team` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found """ _teams = self.teams(name=name, id=id, **kwargs) if len(_teams) == 0: raise NotFoundError("No team criteria matches") if len(_teams) != 1: raise MultipleFoundError("Multiple teams fit criteria") return _teams[0]
python
def team(self, name=None, id=None, is_hidden=False, **kwargs): """ Team of KE-chain. Provides a team of :class:`Team` of KE-chain. You can filter on team name or provide id. :param name: (optional) team name to filter :type name: basestring or None :param id: (optional) id of the user to filter :type id: basestring or None :param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden) :type is_hidden: bool or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`Team` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found """ _teams = self.teams(name=name, id=id, **kwargs) if len(_teams) == 0: raise NotFoundError("No team criteria matches") if len(_teams) != 1: raise MultipleFoundError("Multiple teams fit criteria") return _teams[0]
[ "def", "team", "(", "self", ",", "name", "=", "None", ",", "id", "=", "None", ",", "is_hidden", "=", "False", ",", "*", "*", "kwargs", ")", ":", "_teams", "=", "self", ".", "teams", "(", "name", "=", "name", ",", "id", "=", "id", ",", "*", "*", "kwargs", ")", "if", "len", "(", "_teams", ")", "==", "0", ":", "raise", "NotFoundError", "(", "\"No team criteria matches\"", ")", "if", "len", "(", "_teams", ")", "!=", "1", ":", "raise", "MultipleFoundError", "(", "\"Multiple teams fit criteria\"", ")", "return", "_teams", "[", "0", "]" ]
Team of KE-chain. Provides a team of :class:`Team` of KE-chain. You can filter on team name or provide id. :param name: (optional) team name to filter :type name: basestring or None :param id: (optional) id of the user to filter :type id: basestring or None :param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden) :type is_hidden: bool or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`Team` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found
[ "Team", "of", "KE", "-", "chain", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L843-L868
train
KE-works/pykechain
pykechain/client.py
Client.teams
def teams(self, name=None, id=None, is_hidden=False, **kwargs): """ Teams of KE-chain. Provide a list of :class:`Team`s of KE-chain. You can filter on teamname or id or any other advanced filter. :param name: (optional) teamname to filter :type name: basestring or None :param id: (optional) id of the team to filter :type id: basestring or None :param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden) :type is_hidden: bool or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`Teams` :raises NotFoundError: when a team could not be found """ request_params = { 'name': name, 'id': id, 'is_hidden': is_hidden } if kwargs: request_params.update(**kwargs) r = self._request('GET', self._build_url('teams'), params=request_params) if r.status_code != requests.codes.ok: # pragma: no cover raise NotFoundError("Could not find teams: '{}'".format(r.json())) data = r.json() return [Team(team, client=self) for team in data['results']]
python
def teams(self, name=None, id=None, is_hidden=False, **kwargs): """ Teams of KE-chain. Provide a list of :class:`Team`s of KE-chain. You can filter on teamname or id or any other advanced filter. :param name: (optional) teamname to filter :type name: basestring or None :param id: (optional) id of the team to filter :type id: basestring or None :param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden) :type is_hidden: bool or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`Teams` :raises NotFoundError: when a team could not be found """ request_params = { 'name': name, 'id': id, 'is_hidden': is_hidden } if kwargs: request_params.update(**kwargs) r = self._request('GET', self._build_url('teams'), params=request_params) if r.status_code != requests.codes.ok: # pragma: no cover raise NotFoundError("Could not find teams: '{}'".format(r.json())) data = r.json() return [Team(team, client=self) for team in data['results']]
[ "def", "teams", "(", "self", ",", "name", "=", "None", ",", "id", "=", "None", ",", "is_hidden", "=", "False", ",", "*", "*", "kwargs", ")", ":", "request_params", "=", "{", "'name'", ":", "name", ",", "'id'", ":", "id", ",", "'is_hidden'", ":", "is_hidden", "}", "if", "kwargs", ":", "request_params", ".", "update", "(", "*", "*", "kwargs", ")", "r", "=", "self", ".", "_request", "(", "'GET'", ",", "self", ".", "_build_url", "(", "'teams'", ")", ",", "params", "=", "request_params", ")", "if", "r", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "# pragma: no cover", "raise", "NotFoundError", "(", "\"Could not find teams: '{}'\"", ".", "format", "(", "r", ".", "json", "(", ")", ")", ")", "data", "=", "r", ".", "json", "(", ")", "return", "[", "Team", "(", "team", ",", "client", "=", "self", ")", "for", "team", "in", "data", "[", "'results'", "]", "]" ]
Teams of KE-chain. Provide a list of :class:`Team`s of KE-chain. You can filter on teamname or id or any other advanced filter. :param name: (optional) teamname to filter :type name: basestring or None :param id: (optional) id of the team to filter :type id: basestring or None :param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden) :type is_hidden: bool or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`Teams` :raises NotFoundError: when a team could not be found
[ "Teams", "of", "KE", "-", "chain", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L870-L901
train
KE-works/pykechain
pykechain/client.py
Client._create_part
def _create_part(self, action, data, **kwargs): """Create a part internal core function.""" # suppress_kevents should be in the data (not the query_params) if 'suppress_kevents' in kwargs: data['suppress_kevents'] = kwargs.pop('suppress_kevents') # prepare url query parameters query_params = kwargs query_params['select_action'] = action response = self._request('POST', self._build_url('parts'), params=query_params, # {"select_action": action}, data=data) if response.status_code != requests.codes.created: raise APIError("Could not create part, {}: {}".format(str(response), response.content)) return Part(response.json()['results'][0], client=self)
python
def _create_part(self, action, data, **kwargs): """Create a part internal core function.""" # suppress_kevents should be in the data (not the query_params) if 'suppress_kevents' in kwargs: data['suppress_kevents'] = kwargs.pop('suppress_kevents') # prepare url query parameters query_params = kwargs query_params['select_action'] = action response = self._request('POST', self._build_url('parts'), params=query_params, # {"select_action": action}, data=data) if response.status_code != requests.codes.created: raise APIError("Could not create part, {}: {}".format(str(response), response.content)) return Part(response.json()['results'][0], client=self)
[ "def", "_create_part", "(", "self", ",", "action", ",", "data", ",", "*", "*", "kwargs", ")", ":", "# suppress_kevents should be in the data (not the query_params)", "if", "'suppress_kevents'", "in", "kwargs", ":", "data", "[", "'suppress_kevents'", "]", "=", "kwargs", ".", "pop", "(", "'suppress_kevents'", ")", "# prepare url query parameters", "query_params", "=", "kwargs", "query_params", "[", "'select_action'", "]", "=", "action", "response", "=", "self", ".", "_request", "(", "'POST'", ",", "self", ".", "_build_url", "(", "'parts'", ")", ",", "params", "=", "query_params", ",", "# {\"select_action\": action},", "data", "=", "data", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "created", ":", "raise", "APIError", "(", "\"Could not create part, {}: {}\"", ".", "format", "(", "str", "(", "response", ")", ",", "response", ".", "content", ")", ")", "return", "Part", "(", "response", ".", "json", "(", ")", "[", "'results'", "]", "[", "0", "]", ",", "client", "=", "self", ")" ]
Create a part internal core function.
[ "Create", "a", "part", "internal", "core", "function", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1043-L1060
train
KE-works/pykechain
pykechain/client.py
Client.create_part
def create_part(self, parent, model, name=None, **kwargs): """Create a new part instance from a given model under a given parent. In order to prevent the backend from updating the frontend you may add `suppress_kevents=True` as additional keyword=value argument to this method. This will improve performance of the backend against a trade-off that someone looking at the frontend won't notice any changes unless the page is refreshed. :param parent: parent part instance of the new instance :type parent: :class:`models.Part` :param model: target part model on which the new instance is based :type model: :class:`models.Part` :param name: new part name :type name: basestring :param kwargs: (optional) additional keyword=value arguments :return: Part (category = instance) :return: :class:`models.Part` with category `INSTANCE` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Part` could not be created """ if parent.category != Category.INSTANCE: raise IllegalArgumentError("The parent should be an category 'INSTANCE'") if model.category != Category.MODEL: raise IllegalArgumentError("The models should be of category 'MODEL'") if not name: name = model.name data = { "name": name, "parent": parent.id, "model": model.id } return self._create_part(action="new_instance", data=data, **kwargs)
python
def create_part(self, parent, model, name=None, **kwargs): """Create a new part instance from a given model under a given parent. In order to prevent the backend from updating the frontend you may add `suppress_kevents=True` as additional keyword=value argument to this method. This will improve performance of the backend against a trade-off that someone looking at the frontend won't notice any changes unless the page is refreshed. :param parent: parent part instance of the new instance :type parent: :class:`models.Part` :param model: target part model on which the new instance is based :type model: :class:`models.Part` :param name: new part name :type name: basestring :param kwargs: (optional) additional keyword=value arguments :return: Part (category = instance) :return: :class:`models.Part` with category `INSTANCE` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Part` could not be created """ if parent.category != Category.INSTANCE: raise IllegalArgumentError("The parent should be an category 'INSTANCE'") if model.category != Category.MODEL: raise IllegalArgumentError("The models should be of category 'MODEL'") if not name: name = model.name data = { "name": name, "parent": parent.id, "model": model.id } return self._create_part(action="new_instance", data=data, **kwargs)
[ "def", "create_part", "(", "self", ",", "parent", ",", "model", ",", "name", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "parent", ".", "category", "!=", "Category", ".", "INSTANCE", ":", "raise", "IllegalArgumentError", "(", "\"The parent should be an category 'INSTANCE'\"", ")", "if", "model", ".", "category", "!=", "Category", ".", "MODEL", ":", "raise", "IllegalArgumentError", "(", "\"The models should be of category 'MODEL'\"", ")", "if", "not", "name", ":", "name", "=", "model", ".", "name", "data", "=", "{", "\"name\"", ":", "name", ",", "\"parent\"", ":", "parent", ".", "id", ",", "\"model\"", ":", "model", ".", "id", "}", "return", "self", ".", "_create_part", "(", "action", "=", "\"new_instance\"", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")" ]
Create a new part instance from a given model under a given parent. In order to prevent the backend from updating the frontend you may add `suppress_kevents=True` as additional keyword=value argument to this method. This will improve performance of the backend against a trade-off that someone looking at the frontend won't notice any changes unless the page is refreshed. :param parent: parent part instance of the new instance :type parent: :class:`models.Part` :param model: target part model on which the new instance is based :type model: :class:`models.Part` :param name: new part name :type name: basestring :param kwargs: (optional) additional keyword=value arguments :return: Part (category = instance) :return: :class:`models.Part` with category `INSTANCE` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Part` could not be created
[ "Create", "a", "new", "part", "instance", "from", "a", "given", "model", "under", "a", "given", "parent", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1062-L1096
train
KE-works/pykechain
pykechain/client.py
Client.create_model
def create_model(self, parent, name, multiplicity='ZERO_MANY', **kwargs): """Create a new child model under a given parent. In order to prevent the backend from updating the frontend you may add `suppress_kevents=True` as additional keyword=value argument to this method. This will improve performance of the backend against a trade-off that someone looking at the frontend won't notice any changes unless the page is refreshed. :param parent: parent model :param name: new model name :param parent: parent part instance :type parent: :class:`models.Part` :param name: new part name :type name: basestring :param multiplicity: choose between ZERO_ONE, ONE, ZERO_MANY, ONE_MANY or M_N :type multiplicity: basestring :param kwargs: (optional) additional keyword=value arguments :type kwargs: dict :return: :class:`models.Part` with category `MODEL` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Part` could not be created """ if parent.category != Category.MODEL: raise IllegalArgumentError("The parent should be of category 'MODEL'") data = { "name": name, "parent": parent.id, "multiplicity": multiplicity } return self._create_part(action="create_child_model", data=data, **kwargs)
python
def create_model(self, parent, name, multiplicity='ZERO_MANY', **kwargs): """Create a new child model under a given parent. In order to prevent the backend from updating the frontend you may add `suppress_kevents=True` as additional keyword=value argument to this method. This will improve performance of the backend against a trade-off that someone looking at the frontend won't notice any changes unless the page is refreshed. :param parent: parent model :param name: new model name :param parent: parent part instance :type parent: :class:`models.Part` :param name: new part name :type name: basestring :param multiplicity: choose between ZERO_ONE, ONE, ZERO_MANY, ONE_MANY or M_N :type multiplicity: basestring :param kwargs: (optional) additional keyword=value arguments :type kwargs: dict :return: :class:`models.Part` with category `MODEL` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Part` could not be created """ if parent.category != Category.MODEL: raise IllegalArgumentError("The parent should be of category 'MODEL'") data = { "name": name, "parent": parent.id, "multiplicity": multiplicity } return self._create_part(action="create_child_model", data=data, **kwargs)
[ "def", "create_model", "(", "self", ",", "parent", ",", "name", ",", "multiplicity", "=", "'ZERO_MANY'", ",", "*", "*", "kwargs", ")", ":", "if", "parent", ".", "category", "!=", "Category", ".", "MODEL", ":", "raise", "IllegalArgumentError", "(", "\"The parent should be of category 'MODEL'\"", ")", "data", "=", "{", "\"name\"", ":", "name", ",", "\"parent\"", ":", "parent", ".", "id", ",", "\"multiplicity\"", ":", "multiplicity", "}", "return", "self", ".", "_create_part", "(", "action", "=", "\"create_child_model\"", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")" ]
Create a new child model under a given parent. In order to prevent the backend from updating the frontend you may add `suppress_kevents=True` as additional keyword=value argument to this method. This will improve performance of the backend against a trade-off that someone looking at the frontend won't notice any changes unless the page is refreshed. :param parent: parent model :param name: new model name :param parent: parent part instance :type parent: :class:`models.Part` :param name: new part name :type name: basestring :param multiplicity: choose between ZERO_ONE, ONE, ZERO_MANY, ONE_MANY or M_N :type multiplicity: basestring :param kwargs: (optional) additional keyword=value arguments :type kwargs: dict :return: :class:`models.Part` with category `MODEL` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Part` could not be created
[ "Create", "a", "new", "child", "model", "under", "a", "given", "parent", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1098-L1129
train
KE-works/pykechain
pykechain/client.py
Client._create_clone
def _create_clone(self, parent, part, **kwargs): """Create a new `Part` clone under the `Parent`. .. versionadded:: 2.3 :param parent: parent part :type parent: :class:`models.Part` :param part: part to be cloned :type part: :class:`models.Part` :param kwargs: (optional) additional keyword=value arguments :type kwargs: dict :return: cloned :class:`models.Part` :raises APIError: if the `Part` could not be cloned """ if part.category == Category.MODEL: select_action = 'clone_model' else: select_action = 'clone_instance' data = { "part": part.id, "parent": parent.id, "suppress_kevents": kwargs.pop('suppress_kevents', None) } # prepare url query parameters query_params = kwargs query_params['select_action'] = select_action response = self._request('POST', self._build_url('parts'), params=query_params, data=data) if response.status_code != requests.codes.created: raise APIError("Could not clone part, {}: {}".format(str(response), response.content)) return Part(response.json()['results'][0], client=self)
python
def _create_clone(self, parent, part, **kwargs): """Create a new `Part` clone under the `Parent`. .. versionadded:: 2.3 :param parent: parent part :type parent: :class:`models.Part` :param part: part to be cloned :type part: :class:`models.Part` :param kwargs: (optional) additional keyword=value arguments :type kwargs: dict :return: cloned :class:`models.Part` :raises APIError: if the `Part` could not be cloned """ if part.category == Category.MODEL: select_action = 'clone_model' else: select_action = 'clone_instance' data = { "part": part.id, "parent": parent.id, "suppress_kevents": kwargs.pop('suppress_kevents', None) } # prepare url query parameters query_params = kwargs query_params['select_action'] = select_action response = self._request('POST', self._build_url('parts'), params=query_params, data=data) if response.status_code != requests.codes.created: raise APIError("Could not clone part, {}: {}".format(str(response), response.content)) return Part(response.json()['results'][0], client=self)
[ "def", "_create_clone", "(", "self", ",", "parent", ",", "part", ",", "*", "*", "kwargs", ")", ":", "if", "part", ".", "category", "==", "Category", ".", "MODEL", ":", "select_action", "=", "'clone_model'", "else", ":", "select_action", "=", "'clone_instance'", "data", "=", "{", "\"part\"", ":", "part", ".", "id", ",", "\"parent\"", ":", "parent", ".", "id", ",", "\"suppress_kevents\"", ":", "kwargs", ".", "pop", "(", "'suppress_kevents'", ",", "None", ")", "}", "# prepare url query parameters", "query_params", "=", "kwargs", "query_params", "[", "'select_action'", "]", "=", "select_action", "response", "=", "self", ".", "_request", "(", "'POST'", ",", "self", ".", "_build_url", "(", "'parts'", ")", ",", "params", "=", "query_params", ",", "data", "=", "data", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "created", ":", "raise", "APIError", "(", "\"Could not clone part, {}: {}\"", ".", "format", "(", "str", "(", "response", ")", ",", "response", ".", "content", ")", ")", "return", "Part", "(", "response", ".", "json", "(", ")", "[", "'results'", "]", "[", "0", "]", ",", "client", "=", "self", ")" ]
Create a new `Part` clone under the `Parent`. .. versionadded:: 2.3 :param parent: parent part :type parent: :class:`models.Part` :param part: part to be cloned :type part: :class:`models.Part` :param kwargs: (optional) additional keyword=value arguments :type kwargs: dict :return: cloned :class:`models.Part` :raises APIError: if the `Part` could not be cloned
[ "Create", "a", "new", "Part", "clone", "under", "the", "Parent", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1131-L1167
train
KE-works/pykechain
pykechain/client.py
Client.create_property
def create_property(self, model, name, description=None, property_type=PropertyType.CHAR_VALUE, default_value=None, unit=None, options=None): """Create a new property model under a given model. Use the :class:`enums.PropertyType` to select which property type to create to ensure that you provide the correct values to the KE-chain backend. The default is a `PropertyType.CHAR_VALUE` which is a single line text in KE-chain. :param model: parent model :type model: :class:`models.Part` :param name: property model name :type name: basestring :param description: property model description (optional) :type description: basestring or None :param property_type: choose one of the :class:`enums.PropertyType`, defaults to `PropertyType.CHAR_VALUE`. :type property_type: basestring or None :param default_value: (optional) default value used for part instances when creating a model. :type default_value: any :param unit: (optional) unit of the property :type unit: basestring or None :param options: (optional) property options (eg. validators or 'single selectlist choices') :type options: basestring or None :return: a :class:`models.Property` with category `MODEL` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Property` model could not be created """ if model.category != Category.MODEL: raise IllegalArgumentError("The model should be of category MODEL") if not property_type.endswith('_VALUE'): warnings.warn("Please use the `PropertyType` enumeration to ensure providing correct " "values to the backend.", UserWarning) property_type = '{}_VALUE'.format(property_type.upper()) if property_type not in PropertyType.values(): raise IllegalArgumentError("Please provide a valid propertytype, please use one of `enums.PropertyType`. " "Got: '{}'".format(property_type)) # because the references value only accepts a single 'model_id' in the default value, we need to convert this # to a single value from the list of values. if property_type in (PropertyType.REFERENCE_VALUE, PropertyType.REFERENCES_VALUE) and \ isinstance(default_value, (list, tuple)) and default_value: default_value = default_value[0] data = { "name": name, "part": model.id, "description": description or '', "property_type": property_type.upper(), "value": default_value, "unit": unit or '', "options": options or {} } # # We add options after the fact only if they are available, otherwise the options will be set to null in the # # request and that can't be handled by KE-chain. # if options: # data['options'] = options response = self._request('POST', self._build_url('properties'), json=data) if response.status_code != requests.codes.created: raise APIError("Could not create property") prop = Property.create(response.json()['results'][0], client=self) model.properties.append(prop) return prop
python
def create_property(self, model, name, description=None, property_type=PropertyType.CHAR_VALUE, default_value=None, unit=None, options=None): """Create a new property model under a given model. Use the :class:`enums.PropertyType` to select which property type to create to ensure that you provide the correct values to the KE-chain backend. The default is a `PropertyType.CHAR_VALUE` which is a single line text in KE-chain. :param model: parent model :type model: :class:`models.Part` :param name: property model name :type name: basestring :param description: property model description (optional) :type description: basestring or None :param property_type: choose one of the :class:`enums.PropertyType`, defaults to `PropertyType.CHAR_VALUE`. :type property_type: basestring or None :param default_value: (optional) default value used for part instances when creating a model. :type default_value: any :param unit: (optional) unit of the property :type unit: basestring or None :param options: (optional) property options (eg. validators or 'single selectlist choices') :type options: basestring or None :return: a :class:`models.Property` with category `MODEL` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Property` model could not be created """ if model.category != Category.MODEL: raise IllegalArgumentError("The model should be of category MODEL") if not property_type.endswith('_VALUE'): warnings.warn("Please use the `PropertyType` enumeration to ensure providing correct " "values to the backend.", UserWarning) property_type = '{}_VALUE'.format(property_type.upper()) if property_type not in PropertyType.values(): raise IllegalArgumentError("Please provide a valid propertytype, please use one of `enums.PropertyType`. " "Got: '{}'".format(property_type)) # because the references value only accepts a single 'model_id' in the default value, we need to convert this # to a single value from the list of values. if property_type in (PropertyType.REFERENCE_VALUE, PropertyType.REFERENCES_VALUE) and \ isinstance(default_value, (list, tuple)) and default_value: default_value = default_value[0] data = { "name": name, "part": model.id, "description": description or '', "property_type": property_type.upper(), "value": default_value, "unit": unit or '', "options": options or {} } # # We add options after the fact only if they are available, otherwise the options will be set to null in the # # request and that can't be handled by KE-chain. # if options: # data['options'] = options response = self._request('POST', self._build_url('properties'), json=data) if response.status_code != requests.codes.created: raise APIError("Could not create property") prop = Property.create(response.json()['results'][0], client=self) model.properties.append(prop) return prop
[ "def", "create_property", "(", "self", ",", "model", ",", "name", ",", "description", "=", "None", ",", "property_type", "=", "PropertyType", ".", "CHAR_VALUE", ",", "default_value", "=", "None", ",", "unit", "=", "None", ",", "options", "=", "None", ")", ":", "if", "model", ".", "category", "!=", "Category", ".", "MODEL", ":", "raise", "IllegalArgumentError", "(", "\"The model should be of category MODEL\"", ")", "if", "not", "property_type", ".", "endswith", "(", "'_VALUE'", ")", ":", "warnings", ".", "warn", "(", "\"Please use the `PropertyType` enumeration to ensure providing correct \"", "\"values to the backend.\"", ",", "UserWarning", ")", "property_type", "=", "'{}_VALUE'", ".", "format", "(", "property_type", ".", "upper", "(", ")", ")", "if", "property_type", "not", "in", "PropertyType", ".", "values", "(", ")", ":", "raise", "IllegalArgumentError", "(", "\"Please provide a valid propertytype, please use one of `enums.PropertyType`. \"", "\"Got: '{}'\"", ".", "format", "(", "property_type", ")", ")", "# because the references value only accepts a single 'model_id' in the default value, we need to convert this", "# to a single value from the list of values.", "if", "property_type", "in", "(", "PropertyType", ".", "REFERENCE_VALUE", ",", "PropertyType", ".", "REFERENCES_VALUE", ")", "and", "isinstance", "(", "default_value", ",", "(", "list", ",", "tuple", ")", ")", "and", "default_value", ":", "default_value", "=", "default_value", "[", "0", "]", "data", "=", "{", "\"name\"", ":", "name", ",", "\"part\"", ":", "model", ".", "id", ",", "\"description\"", ":", "description", "or", "''", ",", "\"property_type\"", ":", "property_type", ".", "upper", "(", ")", ",", "\"value\"", ":", "default_value", ",", "\"unit\"", ":", "unit", "or", "''", ",", "\"options\"", ":", "options", "or", "{", "}", "}", "# # We add options after the fact only if they are available, otherwise the options will be set to null in the", "# # request and that can't be handled by KE-chain.", "# if options:", "# data['options'] = options", "response", "=", "self", ".", "_request", "(", "'POST'", ",", "self", ".", "_build_url", "(", "'properties'", ")", ",", "json", "=", "data", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "created", ":", "raise", "APIError", "(", "\"Could not create property\"", ")", "prop", "=", "Property", ".", "create", "(", "response", ".", "json", "(", ")", "[", "'results'", "]", "[", "0", "]", ",", "client", "=", "self", ")", "model", ".", "properties", ".", "append", "(", "prop", ")", "return", "prop" ]
Create a new property model under a given model. Use the :class:`enums.PropertyType` to select which property type to create to ensure that you provide the correct values to the KE-chain backend. The default is a `PropertyType.CHAR_VALUE` which is a single line text in KE-chain. :param model: parent model :type model: :class:`models.Part` :param name: property model name :type name: basestring :param description: property model description (optional) :type description: basestring or None :param property_type: choose one of the :class:`enums.PropertyType`, defaults to `PropertyType.CHAR_VALUE`. :type property_type: basestring or None :param default_value: (optional) default value used for part instances when creating a model. :type default_value: any :param unit: (optional) unit of the property :type unit: basestring or None :param options: (optional) property options (eg. validators or 'single selectlist choices') :type options: basestring or None :return: a :class:`models.Property` with category `MODEL` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: if the `Property` model could not be created
[ "Create", "a", "new", "property", "model", "under", "a", "given", "model", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1208-L1277
train
KE-works/pykechain
pykechain/client.py
Client.create_service
def create_service(self, name, scope, description=None, version=None, service_type=ServiceType.PYTHON_SCRIPT, environment_version=ServiceEnvironmentVersion.PYTHON_3_5, pkg_path=None): """ Create a Service. A service can be created only providing the name (and scope). Other information can be added later. If you provide a path to the `kecpkg` (or python script) to upload (`pkg_path`) on creation, this `kecpkg` will be uploaded in one go. If the later fails, the service is still there, and the package is not uploaded. Permission to upload a script is restricted to a superuser, a user in the `GG:Configurators` group and a Scope Manager of the scope to which you are uploading the script. :param name: Name of the service :type name: basestring :param scope: Scope where the create the Service under :type scope: :class:`models.Scope` :param description: (optional) description of the Service :type description: basestring or None :param version: (optional) version information of the Service :type version: basestring or None :param service_type: (optional) service type of the service (refer to :class:`pykechain.enums.ServiceType`), defaults to `PYTHON_SCRIPT` :type service_type: basestring or None :param environment_version: (optional) execution environment of the service (refer to :class:`pykechain.enums.ServiceEnvironmentVersion`), defaults to `PYTHON_3_5` :type environment_version: basestring or None :param pkg_path: (optional) full path name to the `kecpkg` (or python script) to upload :type pkg_path: basestring or None :return: the created :class:`models.Service` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: In case of failure of the creation or failure to upload the pkg_path :raises OSError: In case of failure to locate the `pkg_path` """ if service_type not in ServiceType.values(): raise IllegalArgumentError("The type should be of one of {}".format(ServiceType.values())) if environment_version not in ServiceEnvironmentVersion.values(): raise IllegalArgumentError("The environment version should be of one of {}". format(ServiceEnvironmentVersion.values())) data = { "name": name, "scope": scope, "description": description, "script_type": service_type, "script_version": version, "env_version": environment_version, } response = self._request('POST', self._build_url('services'), data=data) if response.status_code != requests.codes.created: # pragma: no cover raise APIError("Could not create service ({})".format((response, response.json()))) service = Service(response.json().get('results')[0], client=self) if pkg_path: # upload the package service.upload(pkg_path) # refresh service contents in place service.refresh() return service
python
def create_service(self, name, scope, description=None, version=None, service_type=ServiceType.PYTHON_SCRIPT, environment_version=ServiceEnvironmentVersion.PYTHON_3_5, pkg_path=None): """ Create a Service. A service can be created only providing the name (and scope). Other information can be added later. If you provide a path to the `kecpkg` (or python script) to upload (`pkg_path`) on creation, this `kecpkg` will be uploaded in one go. If the later fails, the service is still there, and the package is not uploaded. Permission to upload a script is restricted to a superuser, a user in the `GG:Configurators` group and a Scope Manager of the scope to which you are uploading the script. :param name: Name of the service :type name: basestring :param scope: Scope where the create the Service under :type scope: :class:`models.Scope` :param description: (optional) description of the Service :type description: basestring or None :param version: (optional) version information of the Service :type version: basestring or None :param service_type: (optional) service type of the service (refer to :class:`pykechain.enums.ServiceType`), defaults to `PYTHON_SCRIPT` :type service_type: basestring or None :param environment_version: (optional) execution environment of the service (refer to :class:`pykechain.enums.ServiceEnvironmentVersion`), defaults to `PYTHON_3_5` :type environment_version: basestring or None :param pkg_path: (optional) full path name to the `kecpkg` (or python script) to upload :type pkg_path: basestring or None :return: the created :class:`models.Service` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: In case of failure of the creation or failure to upload the pkg_path :raises OSError: In case of failure to locate the `pkg_path` """ if service_type not in ServiceType.values(): raise IllegalArgumentError("The type should be of one of {}".format(ServiceType.values())) if environment_version not in ServiceEnvironmentVersion.values(): raise IllegalArgumentError("The environment version should be of one of {}". format(ServiceEnvironmentVersion.values())) data = { "name": name, "scope": scope, "description": description, "script_type": service_type, "script_version": version, "env_version": environment_version, } response = self._request('POST', self._build_url('services'), data=data) if response.status_code != requests.codes.created: # pragma: no cover raise APIError("Could not create service ({})".format((response, response.json()))) service = Service(response.json().get('results')[0], client=self) if pkg_path: # upload the package service.upload(pkg_path) # refresh service contents in place service.refresh() return service
[ "def", "create_service", "(", "self", ",", "name", ",", "scope", ",", "description", "=", "None", ",", "version", "=", "None", ",", "service_type", "=", "ServiceType", ".", "PYTHON_SCRIPT", ",", "environment_version", "=", "ServiceEnvironmentVersion", ".", "PYTHON_3_5", ",", "pkg_path", "=", "None", ")", ":", "if", "service_type", "not", "in", "ServiceType", ".", "values", "(", ")", ":", "raise", "IllegalArgumentError", "(", "\"The type should be of one of {}\"", ".", "format", "(", "ServiceType", ".", "values", "(", ")", ")", ")", "if", "environment_version", "not", "in", "ServiceEnvironmentVersion", ".", "values", "(", ")", ":", "raise", "IllegalArgumentError", "(", "\"The environment version should be of one of {}\"", ".", "format", "(", "ServiceEnvironmentVersion", ".", "values", "(", ")", ")", ")", "data", "=", "{", "\"name\"", ":", "name", ",", "\"scope\"", ":", "scope", ",", "\"description\"", ":", "description", ",", "\"script_type\"", ":", "service_type", ",", "\"script_version\"", ":", "version", ",", "\"env_version\"", ":", "environment_version", ",", "}", "response", "=", "self", ".", "_request", "(", "'POST'", ",", "self", ".", "_build_url", "(", "'services'", ")", ",", "data", "=", "data", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "created", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not create service ({})\"", ".", "format", "(", "(", "response", ",", "response", ".", "json", "(", ")", ")", ")", ")", "service", "=", "Service", "(", "response", ".", "json", "(", ")", ".", "get", "(", "'results'", ")", "[", "0", "]", ",", "client", "=", "self", ")", "if", "pkg_path", ":", "# upload the package", "service", ".", "upload", "(", "pkg_path", ")", "# refresh service contents in place", "service", ".", "refresh", "(", ")", "return", "service" ]
Create a Service. A service can be created only providing the name (and scope). Other information can be added later. If you provide a path to the `kecpkg` (or python script) to upload (`pkg_path`) on creation, this `kecpkg` will be uploaded in one go. If the later fails, the service is still there, and the package is not uploaded. Permission to upload a script is restricted to a superuser, a user in the `GG:Configurators` group and a Scope Manager of the scope to which you are uploading the script. :param name: Name of the service :type name: basestring :param scope: Scope where the create the Service under :type scope: :class:`models.Scope` :param description: (optional) description of the Service :type description: basestring or None :param version: (optional) version information of the Service :type version: basestring or None :param service_type: (optional) service type of the service (refer to :class:`pykechain.enums.ServiceType`), defaults to `PYTHON_SCRIPT` :type service_type: basestring or None :param environment_version: (optional) execution environment of the service (refer to :class:`pykechain.enums.ServiceEnvironmentVersion`), defaults to `PYTHON_3_5` :type environment_version: basestring or None :param pkg_path: (optional) full path name to the `kecpkg` (or python script) to upload :type pkg_path: basestring or None :return: the created :class:`models.Service` :raises IllegalArgumentError: When the provided arguments are incorrect :raises APIError: In case of failure of the creation or failure to upload the pkg_path :raises OSError: In case of failure to locate the `pkg_path`
[ "Create", "a", "Service", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1279-L1346
train
KE-works/pykechain
pykechain/client.py
Client.delete_scope
def delete_scope(self, scope): """ Delete a scope. This will delete a scope if the client has the right to do so. Sufficient permissions to delete a scope are a superuser, a user in the `GG:Configurators` group or a user that is the Scope manager of the scope to be deleted. :param scope: Scope object to be deleted :type scope: :class: `models.Scope` :return: None :raises APIError: in case of failure in the deletion of the scope """ assert isinstance(scope, Scope), 'Scope "{}" is not a scope!'.format(scope.name) response = self._request('DELETE', self._build_url('scope', scope_id=str(scope.id))) if response.status_code != requests.codes.no_content: # pragma: no cover raise APIError("Could not delete scope, {}: {}".format(str(response), response.content))
python
def delete_scope(self, scope): """ Delete a scope. This will delete a scope if the client has the right to do so. Sufficient permissions to delete a scope are a superuser, a user in the `GG:Configurators` group or a user that is the Scope manager of the scope to be deleted. :param scope: Scope object to be deleted :type scope: :class: `models.Scope` :return: None :raises APIError: in case of failure in the deletion of the scope """ assert isinstance(scope, Scope), 'Scope "{}" is not a scope!'.format(scope.name) response = self._request('DELETE', self._build_url('scope', scope_id=str(scope.id))) if response.status_code != requests.codes.no_content: # pragma: no cover raise APIError("Could not delete scope, {}: {}".format(str(response), response.content))
[ "def", "delete_scope", "(", "self", ",", "scope", ")", ":", "assert", "isinstance", "(", "scope", ",", "Scope", ")", ",", "'Scope \"{}\" is not a scope!'", ".", "format", "(", "scope", ".", "name", ")", "response", "=", "self", ".", "_request", "(", "'DELETE'", ",", "self", ".", "_build_url", "(", "'scope'", ",", "scope_id", "=", "str", "(", "scope", ".", "id", ")", ")", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "no_content", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not delete scope, {}: {}\"", ".", "format", "(", "str", "(", "response", ")", ",", "response", ".", "content", ")", ")" ]
Delete a scope. This will delete a scope if the client has the right to do so. Sufficient permissions to delete a scope are a superuser, a user in the `GG:Configurators` group or a user that is the Scope manager of the scope to be deleted. :param scope: Scope object to be deleted :type scope: :class: `models.Scope` :return: None :raises APIError: in case of failure in the deletion of the scope
[ "Delete", "a", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/client.py#L1447-L1466
train
aptivate/django-sortable-listview
sortable_listview/views.py
SortableListView.set_sort
def set_sort(self, request): """ Take the sort parameter from the get parameters and split it into the field and the prefix """ # Look for 'sort' in get request. If not available use default. sort_request = request.GET.get(self.sort_parameter, self.default_sort) if sort_request.startswith('-'): sort_order = '-' sort_field = sort_request.split('-')[1] else: sort_order = '' sort_field = sort_request # Invalid sort requests fail silently if not sort_field in self._allowed_sort_fields: sort_order = self.default_sort_order sort_field = self.default_sort_field return (sort_order, sort_field)
python
def set_sort(self, request): """ Take the sort parameter from the get parameters and split it into the field and the prefix """ # Look for 'sort' in get request. If not available use default. sort_request = request.GET.get(self.sort_parameter, self.default_sort) if sort_request.startswith('-'): sort_order = '-' sort_field = sort_request.split('-')[1] else: sort_order = '' sort_field = sort_request # Invalid sort requests fail silently if not sort_field in self._allowed_sort_fields: sort_order = self.default_sort_order sort_field = self.default_sort_field return (sort_order, sort_field)
[ "def", "set_sort", "(", "self", ",", "request", ")", ":", "# Look for 'sort' in get request. If not available use default.", "sort_request", "=", "request", ".", "GET", ".", "get", "(", "self", ".", "sort_parameter", ",", "self", ".", "default_sort", ")", "if", "sort_request", ".", "startswith", "(", "'-'", ")", ":", "sort_order", "=", "'-'", "sort_field", "=", "sort_request", ".", "split", "(", "'-'", ")", "[", "1", "]", "else", ":", "sort_order", "=", "''", "sort_field", "=", "sort_request", "# Invalid sort requests fail silently", "if", "not", "sort_field", "in", "self", ".", "_allowed_sort_fields", ":", "sort_order", "=", "self", ".", "default_sort_order", "sort_field", "=", "self", ".", "default_sort_field", "return", "(", "sort_order", ",", "sort_field", ")" ]
Take the sort parameter from the get parameters and split it into the field and the prefix
[ "Take", "the", "sort", "parameter", "from", "the", "get", "parameters", "and", "split", "it", "into", "the", "field", "and", "the", "prefix" ]
9d5fa5847f0c3e80893780c6540e5098635ace9f
https://github.com/aptivate/django-sortable-listview/blob/9d5fa5847f0c3e80893780c6540e5098635ace9f/sortable_listview/views.py#L108-L125
train
aptivate/django-sortable-listview
sortable_listview/views.py
SortableListView.get_next_sort_string
def get_next_sort_string(self, field): """ If we're already sorted by the field then the sort query returned reverses the sort order. """ # self.sort_field is the currect sort field if field == self.sort_field: next_sort = self.toggle_sort_order() + field else: default_order_for_field = \ self._allowed_sort_fields[field]['default_direction'] next_sort = default_order_for_field + field return self.get_sort_string(next_sort)
python
def get_next_sort_string(self, field): """ If we're already sorted by the field then the sort query returned reverses the sort order. """ # self.sort_field is the currect sort field if field == self.sort_field: next_sort = self.toggle_sort_order() + field else: default_order_for_field = \ self._allowed_sort_fields[field]['default_direction'] next_sort = default_order_for_field + field return self.get_sort_string(next_sort)
[ "def", "get_next_sort_string", "(", "self", ",", "field", ")", ":", "# self.sort_field is the currect sort field", "if", "field", "==", "self", ".", "sort_field", ":", "next_sort", "=", "self", ".", "toggle_sort_order", "(", ")", "+", "field", "else", ":", "default_order_for_field", "=", "self", ".", "_allowed_sort_fields", "[", "field", "]", "[", "'default_direction'", "]", "next_sort", "=", "default_order_for_field", "+", "field", "return", "self", ".", "get_sort_string", "(", "next_sort", ")" ]
If we're already sorted by the field then the sort query returned reverses the sort order.
[ "If", "we", "re", "already", "sorted", "by", "the", "field", "then", "the", "sort", "query", "returned", "reverses", "the", "sort", "order", "." ]
9d5fa5847f0c3e80893780c6540e5098635ace9f
https://github.com/aptivate/django-sortable-listview/blob/9d5fa5847f0c3e80893780c6540e5098635ace9f/sortable_listview/views.py#L135-L147
train
aptivate/django-sortable-listview
sortable_listview/views.py
SortableListView.get_sort_indicator
def get_sort_indicator(self, field): """ Returns a sort class for the active sort only. That is, if field is not sort_field, then nothing will be returned becaues the sort is not active. """ indicator = '' if field == self.sort_field: indicator = 'sort-asc' if self.sort_order == '-': indicator = 'sort-desc' return indicator
python
def get_sort_indicator(self, field): """ Returns a sort class for the active sort only. That is, if field is not sort_field, then nothing will be returned becaues the sort is not active. """ indicator = '' if field == self.sort_field: indicator = 'sort-asc' if self.sort_order == '-': indicator = 'sort-desc' return indicator
[ "def", "get_sort_indicator", "(", "self", ",", "field", ")", ":", "indicator", "=", "''", "if", "field", "==", "self", ".", "sort_field", ":", "indicator", "=", "'sort-asc'", "if", "self", ".", "sort_order", "==", "'-'", ":", "indicator", "=", "'sort-desc'", "return", "indicator" ]
Returns a sort class for the active sort only. That is, if field is not sort_field, then nothing will be returned becaues the sort is not active.
[ "Returns", "a", "sort", "class", "for", "the", "active", "sort", "only", ".", "That", "is", "if", "field", "is", "not", "sort_field", "then", "nothing", "will", "be", "returned", "becaues", "the", "sort", "is", "not", "active", "." ]
9d5fa5847f0c3e80893780c6540e5098635ace9f
https://github.com/aptivate/django-sortable-listview/blob/9d5fa5847f0c3e80893780c6540e5098635ace9f/sortable_listview/views.py#L149-L160
train
aptivate/django-sortable-listview
sortable_listview/views.py
SortableListView.get_basic_sort_link
def get_basic_sort_link(self, request, field): """ Thanks to del_query_parameters and get_querystring, we build the link with preserving interesting get parameters and removing the others """ query_string = self.get_querystring() sort_string = self.get_next_sort_string(field) if sort_string: sort_link = request.path + '?' + sort_string if query_string: sort_link += '&' + query_string else: sort_link = request.path if query_string: sort_link += '?' + query_string return sort_link
python
def get_basic_sort_link(self, request, field): """ Thanks to del_query_parameters and get_querystring, we build the link with preserving interesting get parameters and removing the others """ query_string = self.get_querystring() sort_string = self.get_next_sort_string(field) if sort_string: sort_link = request.path + '?' + sort_string if query_string: sort_link += '&' + query_string else: sort_link = request.path if query_string: sort_link += '?' + query_string return sort_link
[ "def", "get_basic_sort_link", "(", "self", ",", "request", ",", "field", ")", ":", "query_string", "=", "self", ".", "get_querystring", "(", ")", "sort_string", "=", "self", ".", "get_next_sort_string", "(", "field", ")", "if", "sort_string", ":", "sort_link", "=", "request", ".", "path", "+", "'?'", "+", "sort_string", "if", "query_string", ":", "sort_link", "+=", "'&'", "+", "query_string", "else", ":", "sort_link", "=", "request", ".", "path", "if", "query_string", ":", "sort_link", "+=", "'?'", "+", "query_string", "return", "sort_link" ]
Thanks to del_query_parameters and get_querystring, we build the link with preserving interesting get parameters and removing the others
[ "Thanks", "to", "del_query_parameters", "and", "get_querystring", "we", "build", "the", "link", "with", "preserving", "interesting", "get", "parameters", "and", "removing", "the", "others" ]
9d5fa5847f0c3e80893780c6540e5098635ace9f
https://github.com/aptivate/django-sortable-listview/blob/9d5fa5847f0c3e80893780c6540e5098635ace9f/sortable_listview/views.py#L180-L195
train
manikos/django-progressiveimagefield
progressiveimagefield/fields.py
ProgressiveImageField.build_thumb_path
def build_thumb_path(self, image): """ Build the absolute path of the to-be-saved thumbnail. """ image_file = image.file image_name_w_ext = split(image.name)[-1] image_name, ext = splitext(image_name_w_ext) if not self.in_memory(image_file): # `image_file` is already in disk (not in memory). # `image_name` is the full path, not just the name image_name = image_name.split('/')[-1] upload_to = image.field.upload_to if not upload_to.endswith('/'): upload_to = f'{upload_to}/' path_upload_to = f'{upload_to}{image_name}' return f'{self.storage.location}/{path_upload_to}{THUMB_EXT}{ext}'
python
def build_thumb_path(self, image): """ Build the absolute path of the to-be-saved thumbnail. """ image_file = image.file image_name_w_ext = split(image.name)[-1] image_name, ext = splitext(image_name_w_ext) if not self.in_memory(image_file): # `image_file` is already in disk (not in memory). # `image_name` is the full path, not just the name image_name = image_name.split('/')[-1] upload_to = image.field.upload_to if not upload_to.endswith('/'): upload_to = f'{upload_to}/' path_upload_to = f'{upload_to}{image_name}' return f'{self.storage.location}/{path_upload_to}{THUMB_EXT}{ext}'
[ "def", "build_thumb_path", "(", "self", ",", "image", ")", ":", "image_file", "=", "image", ".", "file", "image_name_w_ext", "=", "split", "(", "image", ".", "name", ")", "[", "-", "1", "]", "image_name", ",", "ext", "=", "splitext", "(", "image_name_w_ext", ")", "if", "not", "self", ".", "in_memory", "(", "image_file", ")", ":", "# `image_file` is already in disk (not in memory).", "# `image_name` is the full path, not just the name", "image_name", "=", "image_name", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "upload_to", "=", "image", ".", "field", ".", "upload_to", "if", "not", "upload_to", ".", "endswith", "(", "'/'", ")", ":", "upload_to", "=", "f'{upload_to}/'", "path_upload_to", "=", "f'{upload_to}{image_name}'", "return", "f'{self.storage.location}/{path_upload_to}{THUMB_EXT}{ext}'" ]
Build the absolute path of the to-be-saved thumbnail.
[ "Build", "the", "absolute", "path", "of", "the", "to", "-", "be", "-", "saved", "thumbnail", "." ]
a432c79d23d87ea8944ac252ae7d15df1e4f3072
https://github.com/manikos/django-progressiveimagefield/blob/a432c79d23d87ea8944ac252ae7d15df1e4f3072/progressiveimagefield/fields.py#L22-L37
train
uranusjr/django-gunicorn
djgunicorn/management/commands/gunserver.py
Command.run
def run(self, **options): """Override runserver's entry point to bring Gunicorn on. A large portion of code in this method is copied from `django.core.management.commands.runserver`. """ shutdown_message = options.get('shutdown_message', '') self.stdout.write("Performing system checks...\n\n") self.check(display_num_errors=True) self.check_migrations() now = datetime.datetime.now().strftime(r'%B %d, %Y - %X') if six.PY2: now = now.decode(get_system_encoding()) self.stdout.write(now) addr, port = self.addr, self.port addr = '[{}]'.format(addr) if self._raw_ipv6 else addr runner = GunicornRunner(addr, port, options) try: runner.run() except KeyboardInterrupt: runner.shutdown() if shutdown_message: self.stdout.write(shutdown_message) sys.exit(0) except: runner.shutdown() raise
python
def run(self, **options): """Override runserver's entry point to bring Gunicorn on. A large portion of code in this method is copied from `django.core.management.commands.runserver`. """ shutdown_message = options.get('shutdown_message', '') self.stdout.write("Performing system checks...\n\n") self.check(display_num_errors=True) self.check_migrations() now = datetime.datetime.now().strftime(r'%B %d, %Y - %X') if six.PY2: now = now.decode(get_system_encoding()) self.stdout.write(now) addr, port = self.addr, self.port addr = '[{}]'.format(addr) if self._raw_ipv6 else addr runner = GunicornRunner(addr, port, options) try: runner.run() except KeyboardInterrupt: runner.shutdown() if shutdown_message: self.stdout.write(shutdown_message) sys.exit(0) except: runner.shutdown() raise
[ "def", "run", "(", "self", ",", "*", "*", "options", ")", ":", "shutdown_message", "=", "options", ".", "get", "(", "'shutdown_message'", ",", "''", ")", "self", ".", "stdout", ".", "write", "(", "\"Performing system checks...\\n\\n\"", ")", "self", ".", "check", "(", "display_num_errors", "=", "True", ")", "self", ".", "check_migrations", "(", ")", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "r'%B %d, %Y - %X'", ")", "if", "six", ".", "PY2", ":", "now", "=", "now", ".", "decode", "(", "get_system_encoding", "(", ")", ")", "self", ".", "stdout", ".", "write", "(", "now", ")", "addr", ",", "port", "=", "self", ".", "addr", ",", "self", ".", "port", "addr", "=", "'[{}]'", ".", "format", "(", "addr", ")", "if", "self", ".", "_raw_ipv6", "else", "addr", "runner", "=", "GunicornRunner", "(", "addr", ",", "port", ",", "options", ")", "try", ":", "runner", ".", "run", "(", ")", "except", "KeyboardInterrupt", ":", "runner", ".", "shutdown", "(", ")", "if", "shutdown_message", ":", "self", ".", "stdout", ".", "write", "(", "shutdown_message", ")", "sys", ".", "exit", "(", "0", ")", "except", ":", "runner", ".", "shutdown", "(", ")", "raise" ]
Override runserver's entry point to bring Gunicorn on. A large portion of code in this method is copied from `django.core.management.commands.runserver`.
[ "Override", "runserver", "s", "entry", "point", "to", "bring", "Gunicorn", "on", "." ]
4fb16f48048ff5fff8f889a007f376236646497b
https://github.com/uranusjr/django-gunicorn/blob/4fb16f48048ff5fff8f889a007f376236646497b/djgunicorn/management/commands/gunserver.py#L34-L63
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/panthomkins/detect_panthomkins_peaks.py
_plot
def _plot(x, mph, mpd, threshold, edge, valley, ax, ind): """Plot results of the detect_peaks function, see its help.""" try: import matplotlib.pyplot as plt except ImportError: print('matplotlib is not available.') else: if ax is None: _, ax = plt.subplots(1, 1, figsize=(8, 4)) ax.plot(x, 'b', lw=1) if ind.size: label = 'valley' if valley else 'peak' label = label + 's' if ind.size > 1 else label ax.plot(ind, x[ind], '+', mfc=None, mec='r', mew=2, ms=8, label='%d %s' % (ind.size, label)) ax.legend(loc='best', framealpha=.5, numpoints=1) ax.set_xlim(-.02*x.size, x.size*1.02-1) ymin, ymax = x[np.isfinite(x)].min(), x[np.isfinite(x)].max() yrange = ymax - ymin if ymax > ymin else 1 ax.set_ylim(ymin - 0.1*yrange, ymax + 0.1*yrange) ax.set_xlabel('Data #', fontsize=14) ax.set_ylabel('Amplitude', fontsize=14) mode = 'Valley detection' if valley else 'Peak detection' ax.set_title("%s (mph=%s, mpd=%d, threshold=%s, edge='%s')" % (mode, str(mph), mpd, str(threshold), edge))
python
def _plot(x, mph, mpd, threshold, edge, valley, ax, ind): """Plot results of the detect_peaks function, see its help.""" try: import matplotlib.pyplot as plt except ImportError: print('matplotlib is not available.') else: if ax is None: _, ax = plt.subplots(1, 1, figsize=(8, 4)) ax.plot(x, 'b', lw=1) if ind.size: label = 'valley' if valley else 'peak' label = label + 's' if ind.size > 1 else label ax.plot(ind, x[ind], '+', mfc=None, mec='r', mew=2, ms=8, label='%d %s' % (ind.size, label)) ax.legend(loc='best', framealpha=.5, numpoints=1) ax.set_xlim(-.02*x.size, x.size*1.02-1) ymin, ymax = x[np.isfinite(x)].min(), x[np.isfinite(x)].max() yrange = ymax - ymin if ymax > ymin else 1 ax.set_ylim(ymin - 0.1*yrange, ymax + 0.1*yrange) ax.set_xlabel('Data #', fontsize=14) ax.set_ylabel('Amplitude', fontsize=14) mode = 'Valley detection' if valley else 'Peak detection' ax.set_title("%s (mph=%s, mpd=%d, threshold=%s, edge='%s')" % (mode, str(mph), mpd, str(threshold), edge))
[ "def", "_plot", "(", "x", ",", "mph", ",", "mpd", ",", "threshold", ",", "edge", ",", "valley", ",", "ax", ",", "ind", ")", ":", "try", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "except", "ImportError", ":", "print", "(", "'matplotlib is not available.'", ")", "else", ":", "if", "ax", "is", "None", ":", "_", ",", "ax", "=", "plt", ".", "subplots", "(", "1", ",", "1", ",", "figsize", "=", "(", "8", ",", "4", ")", ")", "ax", ".", "plot", "(", "x", ",", "'b'", ",", "lw", "=", "1", ")", "if", "ind", ".", "size", ":", "label", "=", "'valley'", "if", "valley", "else", "'peak'", "label", "=", "label", "+", "'s'", "if", "ind", ".", "size", ">", "1", "else", "label", "ax", ".", "plot", "(", "ind", ",", "x", "[", "ind", "]", ",", "'+'", ",", "mfc", "=", "None", ",", "mec", "=", "'r'", ",", "mew", "=", "2", ",", "ms", "=", "8", ",", "label", "=", "'%d %s'", "%", "(", "ind", ".", "size", ",", "label", ")", ")", "ax", ".", "legend", "(", "loc", "=", "'best'", ",", "framealpha", "=", ".5", ",", "numpoints", "=", "1", ")", "ax", ".", "set_xlim", "(", "-", ".02", "*", "x", ".", "size", ",", "x", ".", "size", "*", "1.02", "-", "1", ")", "ymin", ",", "ymax", "=", "x", "[", "np", ".", "isfinite", "(", "x", ")", "]", ".", "min", "(", ")", ",", "x", "[", "np", ".", "isfinite", "(", "x", ")", "]", ".", "max", "(", ")", "yrange", "=", "ymax", "-", "ymin", "if", "ymax", ">", "ymin", "else", "1", "ax", ".", "set_ylim", "(", "ymin", "-", "0.1", "*", "yrange", ",", "ymax", "+", "0.1", "*", "yrange", ")", "ax", ".", "set_xlabel", "(", "'Data #'", ",", "fontsize", "=", "14", ")", "ax", ".", "set_ylabel", "(", "'Amplitude'", ",", "fontsize", "=", "14", ")", "mode", "=", "'Valley detection'", "if", "valley", "else", "'Peak detection'", "ax", ".", "set_title", "(", "\"%s (mph=%s, mpd=%d, threshold=%s, edge='%s')\"", "%", "(", "mode", ",", "str", "(", "mph", ")", ",", "mpd", ",", "str", "(", "threshold", ")", ",", "edge", ")", ")" ]
Plot results of the detect_peaks function, see its help.
[ "Plot", "results", "of", "the", "detect_peaks", "function", "see", "its", "help", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/panthomkins/detect_panthomkins_peaks.py#L119-L144
train
KE-works/pykechain
pykechain/models/activity2.py
Activity2.assignees
def assignees(self): """List of assignees to the activity.""" if 'assignees' in self._json_data and self._json_data.get('assignees_ids') == list(): return [] elif 'assignees' in self._json_data and self._json_data.get('assignees_ids'): assignees_ids_str = ','.join([str(id) for id in self._json_data.get('assignees_ids')]) return self._client.users(id__in=assignees_ids_str, is_hidden=False) return None
python
def assignees(self): """List of assignees to the activity.""" if 'assignees' in self._json_data and self._json_data.get('assignees_ids') == list(): return [] elif 'assignees' in self._json_data and self._json_data.get('assignees_ids'): assignees_ids_str = ','.join([str(id) for id in self._json_data.get('assignees_ids')]) return self._client.users(id__in=assignees_ids_str, is_hidden=False) return None
[ "def", "assignees", "(", "self", ")", ":", "if", "'assignees'", "in", "self", ".", "_json_data", "and", "self", ".", "_json_data", ".", "get", "(", "'assignees_ids'", ")", "==", "list", "(", ")", ":", "return", "[", "]", "elif", "'assignees'", "in", "self", ".", "_json_data", "and", "self", ".", "_json_data", ".", "get", "(", "'assignees_ids'", ")", ":", "assignees_ids_str", "=", "','", ".", "join", "(", "[", "str", "(", "id", ")", "for", "id", "in", "self", ".", "_json_data", ".", "get", "(", "'assignees_ids'", ")", "]", ")", "return", "self", ".", "_client", ".", "users", "(", "id__in", "=", "assignees_ids_str", ",", "is_hidden", "=", "False", ")", "return", "None" ]
List of assignees to the activity.
[ "List", "of", "assignees", "to", "the", "activity", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity2.py#L45-L52
train
KE-works/pykechain
pykechain/models/activity2.py
Activity2.is_rootlevel
def is_rootlevel(self): """ Determine if the Activity is at the root level of a project. It will look for the name of the parent which should be either ActivityRootNames.WORKFLOW_ROOT or ActivityRootNames.CATALOG_ROOT. If the name of the parent cannot be found an additional API call is made to retrieve the parent object (based on the `parent_id` in the json_data). :return: Return True if it is a root level activity, otherwise return False :rtype: bool """ # when the activity itself is a root, than return False immediately if self.is_root(): return False parent_name = None parent_dict = self._json_data.get('parent_id_name') if parent_dict and 'name' in parent_dict: parent_name = parent_dict.get('name') if not parent_dict: parent_name = self._client.activity(id=self._json_data.get('parent_id')).name if parent_name in ActivityRootNames.values(): return True return False
python
def is_rootlevel(self): """ Determine if the Activity is at the root level of a project. It will look for the name of the parent which should be either ActivityRootNames.WORKFLOW_ROOT or ActivityRootNames.CATALOG_ROOT. If the name of the parent cannot be found an additional API call is made to retrieve the parent object (based on the `parent_id` in the json_data). :return: Return True if it is a root level activity, otherwise return False :rtype: bool """ # when the activity itself is a root, than return False immediately if self.is_root(): return False parent_name = None parent_dict = self._json_data.get('parent_id_name') if parent_dict and 'name' in parent_dict: parent_name = parent_dict.get('name') if not parent_dict: parent_name = self._client.activity(id=self._json_data.get('parent_id')).name if parent_name in ActivityRootNames.values(): return True return False
[ "def", "is_rootlevel", "(", "self", ")", ":", "# when the activity itself is a root, than return False immediately", "if", "self", ".", "is_root", "(", ")", ":", "return", "False", "parent_name", "=", "None", "parent_dict", "=", "self", ".", "_json_data", ".", "get", "(", "'parent_id_name'", ")", "if", "parent_dict", "and", "'name'", "in", "parent_dict", ":", "parent_name", "=", "parent_dict", ".", "get", "(", "'name'", ")", "if", "not", "parent_dict", ":", "parent_name", "=", "self", ".", "_client", ".", "activity", "(", "id", "=", "self", ".", "_json_data", ".", "get", "(", "'parent_id'", ")", ")", ".", "name", "if", "parent_name", "in", "ActivityRootNames", ".", "values", "(", ")", ":", "return", "True", "return", "False" ]
Determine if the Activity is at the root level of a project. It will look for the name of the parent which should be either ActivityRootNames.WORKFLOW_ROOT or ActivityRootNames.CATALOG_ROOT. If the name of the parent cannot be found an additional API call is made to retrieve the parent object (based on the `parent_id` in the json_data). :return: Return True if it is a root level activity, otherwise return False :rtype: bool
[ "Determine", "if", "the", "Activity", "is", "at", "the", "root", "level", "of", "a", "project", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity2.py#L58-L82
train
KE-works/pykechain
pykechain/models/activity2.py
Activity2.parent
def parent(self): """Retrieve the parent in which this activity is defined. If this is a task on top level, it raises NotFounderror. :return: a :class:`Activity2` :raises NotFoundError: when it is a task in the top level of a project :raises APIError: when other error occurs Example ------- >>> task = project.activity('Subtask') >>> parent_of_task = task.parent() """ parent_id = self._json_data.get('parent_id') if parent_id is None: raise NotFoundError("Cannot find subprocess for this task '{}', " "as this task exist on top level.".format(self.name)) return self._client.activity(pk=parent_id, scope=self.scope_id)
python
def parent(self): """Retrieve the parent in which this activity is defined. If this is a task on top level, it raises NotFounderror. :return: a :class:`Activity2` :raises NotFoundError: when it is a task in the top level of a project :raises APIError: when other error occurs Example ------- >>> task = project.activity('Subtask') >>> parent_of_task = task.parent() """ parent_id = self._json_data.get('parent_id') if parent_id is None: raise NotFoundError("Cannot find subprocess for this task '{}', " "as this task exist on top level.".format(self.name)) return self._client.activity(pk=parent_id, scope=self.scope_id)
[ "def", "parent", "(", "self", ")", ":", "parent_id", "=", "self", ".", "_json_data", ".", "get", "(", "'parent_id'", ")", "if", "parent_id", "is", "None", ":", "raise", "NotFoundError", "(", "\"Cannot find subprocess for this task '{}', \"", "\"as this task exist on top level.\"", ".", "format", "(", "self", ".", "name", ")", ")", "return", "self", ".", "_client", ".", "activity", "(", "pk", "=", "parent_id", ",", "scope", "=", "self", ".", "scope_id", ")" ]
Retrieve the parent in which this activity is defined. If this is a task on top level, it raises NotFounderror. :return: a :class:`Activity2` :raises NotFoundError: when it is a task in the top level of a project :raises APIError: when other error occurs Example ------- >>> task = project.activity('Subtask') >>> parent_of_task = task.parent()
[ "Retrieve", "the", "parent", "in", "which", "this", "activity", "is", "defined", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity2.py#L192-L211
train
KE-works/pykechain
pykechain/models/activity2.py
Activity2.siblings
def siblings(self, **kwargs): """Retrieve the other activities that also belong to the parent. It returns a combination of Tasks (a.o. UserTasks) and Subprocesses on the level of the current task, including itself. This also works if the activity is of type `ActivityType.PROCESS`. :param kwargs: Additional search arguments, check :func:`pykechain.Client.activities` for additional info :type kwargs: dict or None :return: list of :class:`Activity2` :raises NotFoundError: when it is a task in the top level of a project Example ------- >>> task = project.activity('Some Task') >>> siblings = task.siblings() Example for siblings containing certain words in the task name >>> task = project.activity('Some Task') >>> siblings = task.siblings(name__contains='Another Task') """ parent_id = self._json_data.get('parent_id') if parent_id is None: raise NotFoundError("Cannot find subprocess for this task '{}', " "as this task exist on top level.".format(self.name)) return self._client.activities(parent_id=parent_id, scope=self.scope_id, **kwargs)
python
def siblings(self, **kwargs): """Retrieve the other activities that also belong to the parent. It returns a combination of Tasks (a.o. UserTasks) and Subprocesses on the level of the current task, including itself. This also works if the activity is of type `ActivityType.PROCESS`. :param kwargs: Additional search arguments, check :func:`pykechain.Client.activities` for additional info :type kwargs: dict or None :return: list of :class:`Activity2` :raises NotFoundError: when it is a task in the top level of a project Example ------- >>> task = project.activity('Some Task') >>> siblings = task.siblings() Example for siblings containing certain words in the task name >>> task = project.activity('Some Task') >>> siblings = task.siblings(name__contains='Another Task') """ parent_id = self._json_data.get('parent_id') if parent_id is None: raise NotFoundError("Cannot find subprocess for this task '{}', " "as this task exist on top level.".format(self.name)) return self._client.activities(parent_id=parent_id, scope=self.scope_id, **kwargs)
[ "def", "siblings", "(", "self", ",", "*", "*", "kwargs", ")", ":", "parent_id", "=", "self", ".", "_json_data", ".", "get", "(", "'parent_id'", ")", "if", "parent_id", "is", "None", ":", "raise", "NotFoundError", "(", "\"Cannot find subprocess for this task '{}', \"", "\"as this task exist on top level.\"", ".", "format", "(", "self", ".", "name", ")", ")", "return", "self", ".", "_client", ".", "activities", "(", "parent_id", "=", "parent_id", ",", "scope", "=", "self", ".", "scope_id", ",", "*", "*", "kwargs", ")" ]
Retrieve the other activities that also belong to the parent. It returns a combination of Tasks (a.o. UserTasks) and Subprocesses on the level of the current task, including itself. This also works if the activity is of type `ActivityType.PROCESS`. :param kwargs: Additional search arguments, check :func:`pykechain.Client.activities` for additional info :type kwargs: dict or None :return: list of :class:`Activity2` :raises NotFoundError: when it is a task in the top level of a project Example ------- >>> task = project.activity('Some Task') >>> siblings = task.siblings() Example for siblings containing certain words in the task name >>> task = project.activity('Some Task') >>> siblings = task.siblings(name__contains='Another Task')
[ "Retrieve", "the", "other", "activities", "that", "also", "belong", "to", "the", "parent", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity2.py#L241-L266
train
KE-works/pykechain
pykechain/models/activity2.py
Activity2.download_as_pdf
def download_as_pdf(self, target_dir=None, pdf_filename=None, paper_size=PaperSize.A4, paper_orientation=PaperOrientation.PORTRAIT, include_appendices=False): """ Retrieve the PDF of the Activity. .. versionadded:: 2.1 :param target_dir: (optional) directory path name where the store the log.txt to. :type target_dir: basestring or None :param pdf_filename: (optional) log filename to write the log to, defaults to `log.txt`. :type pdf_filename: basestring or None :param paper_size: The size of the paper to which the PDF is downloaded: - a4paper (default): A4 paper size - a3paper: A3 paper size - a2paper: A2 paper size - a1paper: A1 paper size - a0paper: A0 paper size :type paper_size: basestring (see :class:`enums.PaperSize`) :param paper_orientation: The orientation of the paper to which the PDF is downloaded: - portrait (default): portrait orientation - landscape: landscape orientation :type paper_size: basestring (see :class:`enums.PaperOrientation`) :param include_appendices: True if the PDF should contain appendices, False (default) if otherwise. :type include_appendices: bool :raises APIError: if the pdf file could not be found. :raises OSError: if the file could not be written. """ if not pdf_filename: pdf_filename = self.name + '.pdf' if not pdf_filename.endswith('.pdf'): pdf_filename += '.pdf' full_path = os.path.join(target_dir or os.getcwd(), pdf_filename) request_params = { 'papersize': paper_size, 'orientation': paper_orientation, 'appendices': include_appendices } url = self._client._build_url('activity_export', activity_id=self.id) response = self._client._request('GET', url, params=request_params) if response.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not download PDF of activity {}".format(self.name)) # If appendices are included, the request becomes asynchronous if include_appendices: data = response.json() # Download the pdf async url = urljoin(self._client.api_root, data['download_url']) count = 0 while count <= ASYNC_TIMEOUT_LIMIT: response = self._client._request('GET', url=url) if response.status_code == requests.codes.ok: # pragma: no cover with open(full_path, 'wb') as f: for chunk in response.iter_content(1024): f.write(chunk) return count += ASYNC_REFRESH_INTERVAL time.sleep(ASYNC_REFRESH_INTERVAL) raise APIError("Could not download PDF of activity {} within the time-out limit of {} " "seconds".format(self.name, ASYNC_TIMEOUT_LIMIT)) with open(full_path, 'wb') as f: for chunk in response.iter_content(1024): f.write(chunk)
python
def download_as_pdf(self, target_dir=None, pdf_filename=None, paper_size=PaperSize.A4, paper_orientation=PaperOrientation.PORTRAIT, include_appendices=False): """ Retrieve the PDF of the Activity. .. versionadded:: 2.1 :param target_dir: (optional) directory path name where the store the log.txt to. :type target_dir: basestring or None :param pdf_filename: (optional) log filename to write the log to, defaults to `log.txt`. :type pdf_filename: basestring or None :param paper_size: The size of the paper to which the PDF is downloaded: - a4paper (default): A4 paper size - a3paper: A3 paper size - a2paper: A2 paper size - a1paper: A1 paper size - a0paper: A0 paper size :type paper_size: basestring (see :class:`enums.PaperSize`) :param paper_orientation: The orientation of the paper to which the PDF is downloaded: - portrait (default): portrait orientation - landscape: landscape orientation :type paper_size: basestring (see :class:`enums.PaperOrientation`) :param include_appendices: True if the PDF should contain appendices, False (default) if otherwise. :type include_appendices: bool :raises APIError: if the pdf file could not be found. :raises OSError: if the file could not be written. """ if not pdf_filename: pdf_filename = self.name + '.pdf' if not pdf_filename.endswith('.pdf'): pdf_filename += '.pdf' full_path = os.path.join(target_dir or os.getcwd(), pdf_filename) request_params = { 'papersize': paper_size, 'orientation': paper_orientation, 'appendices': include_appendices } url = self._client._build_url('activity_export', activity_id=self.id) response = self._client._request('GET', url, params=request_params) if response.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not download PDF of activity {}".format(self.name)) # If appendices are included, the request becomes asynchronous if include_appendices: data = response.json() # Download the pdf async url = urljoin(self._client.api_root, data['download_url']) count = 0 while count <= ASYNC_TIMEOUT_LIMIT: response = self._client._request('GET', url=url) if response.status_code == requests.codes.ok: # pragma: no cover with open(full_path, 'wb') as f: for chunk in response.iter_content(1024): f.write(chunk) return count += ASYNC_REFRESH_INTERVAL time.sleep(ASYNC_REFRESH_INTERVAL) raise APIError("Could not download PDF of activity {} within the time-out limit of {} " "seconds".format(self.name, ASYNC_TIMEOUT_LIMIT)) with open(full_path, 'wb') as f: for chunk in response.iter_content(1024): f.write(chunk)
[ "def", "download_as_pdf", "(", "self", ",", "target_dir", "=", "None", ",", "pdf_filename", "=", "None", ",", "paper_size", "=", "PaperSize", ".", "A4", ",", "paper_orientation", "=", "PaperOrientation", ".", "PORTRAIT", ",", "include_appendices", "=", "False", ")", ":", "if", "not", "pdf_filename", ":", "pdf_filename", "=", "self", ".", "name", "+", "'.pdf'", "if", "not", "pdf_filename", ".", "endswith", "(", "'.pdf'", ")", ":", "pdf_filename", "+=", "'.pdf'", "full_path", "=", "os", ".", "path", ".", "join", "(", "target_dir", "or", "os", ".", "getcwd", "(", ")", ",", "pdf_filename", ")", "request_params", "=", "{", "'papersize'", ":", "paper_size", ",", "'orientation'", ":", "paper_orientation", ",", "'appendices'", ":", "include_appendices", "}", "url", "=", "self", ".", "_client", ".", "_build_url", "(", "'activity_export'", ",", "activity_id", "=", "self", ".", "id", ")", "response", "=", "self", ".", "_client", ".", "_request", "(", "'GET'", ",", "url", ",", "params", "=", "request_params", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not download PDF of activity {}\"", ".", "format", "(", "self", ".", "name", ")", ")", "# If appendices are included, the request becomes asynchronous", "if", "include_appendices", ":", "data", "=", "response", ".", "json", "(", ")", "# Download the pdf async", "url", "=", "urljoin", "(", "self", ".", "_client", ".", "api_root", ",", "data", "[", "'download_url'", "]", ")", "count", "=", "0", "while", "count", "<=", "ASYNC_TIMEOUT_LIMIT", ":", "response", "=", "self", ".", "_client", ".", "_request", "(", "'GET'", ",", "url", "=", "url", ")", "if", "response", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "# pragma: no cover", "with", "open", "(", "full_path", ",", "'wb'", ")", "as", "f", ":", "for", "chunk", "in", "response", ".", "iter_content", "(", "1024", ")", ":", "f", ".", "write", "(", "chunk", ")", "return", "count", "+=", "ASYNC_REFRESH_INTERVAL", "time", ".", "sleep", "(", "ASYNC_REFRESH_INTERVAL", ")", "raise", "APIError", "(", "\"Could not download PDF of activity {} within the time-out limit of {} \"", "\"seconds\"", ".", "format", "(", "self", ".", "name", ",", "ASYNC_TIMEOUT_LIMIT", ")", ")", "with", "open", "(", "full_path", ",", "'wb'", ")", "as", "f", ":", "for", "chunk", "in", "response", ".", "iter_content", "(", "1024", ")", ":", "f", ".", "write", "(", "chunk", ")" ]
Retrieve the PDF of the Activity. .. versionadded:: 2.1 :param target_dir: (optional) directory path name where the store the log.txt to. :type target_dir: basestring or None :param pdf_filename: (optional) log filename to write the log to, defaults to `log.txt`. :type pdf_filename: basestring or None :param paper_size: The size of the paper to which the PDF is downloaded: - a4paper (default): A4 paper size - a3paper: A3 paper size - a2paper: A2 paper size - a1paper: A1 paper size - a0paper: A0 paper size :type paper_size: basestring (see :class:`enums.PaperSize`) :param paper_orientation: The orientation of the paper to which the PDF is downloaded: - portrait (default): portrait orientation - landscape: landscape orientation :type paper_size: basestring (see :class:`enums.PaperOrientation`) :param include_appendices: True if the PDF should contain appendices, False (default) if otherwise. :type include_appendices: bool :raises APIError: if the pdf file could not be found. :raises OSError: if the file could not be written.
[ "Retrieve", "the", "PDF", "of", "the", "Activity", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity2.py#L437-L509
train
kytos/kytos-utils
kytos/cli/commands/users/parser.py
parse
def parse(argv): """Parse cli args.""" args = docopt(__doc__, argv=argv) try: call(sys.argv[2], args) except KytosException as exception: print("Error parsing args: {}".format(exception)) exit()
python
def parse(argv): """Parse cli args.""" args = docopt(__doc__, argv=argv) try: call(sys.argv[2], args) except KytosException as exception: print("Error parsing args: {}".format(exception)) exit()
[ "def", "parse", "(", "argv", ")", ":", "args", "=", "docopt", "(", "__doc__", ",", "argv", "=", "argv", ")", "try", ":", "call", "(", "sys", ".", "argv", "[", "2", "]", ",", "args", ")", "except", "KytosException", "as", "exception", ":", "print", "(", "\"Error parsing args: {}\"", ".", "format", "(", "exception", ")", ")", "exit", "(", ")" ]
Parse cli args.
[ "Parse", "cli", "args", "." ]
b4750c618d15cff75970ea6124bda4d2b9a33578
https://github.com/kytos/kytos-utils/blob/b4750c618d15cff75970ea6124bda4d2b9a33578/kytos/cli/commands/users/parser.py#L26-L33
train
kytos/kytos-utils
setup.py
Cleaner.run
def run(self): """Clean build, dist, pyc and egg from package and docs.""" super().run() call('rm -vrf ./build ./dist ./*.egg-info', shell=True) call('find . -name __pycache__ -type d | xargs rm -rf', shell=True) call('test -d docs && make -C docs/ clean', shell=True)
python
def run(self): """Clean build, dist, pyc and egg from package and docs.""" super().run() call('rm -vrf ./build ./dist ./*.egg-info', shell=True) call('find . -name __pycache__ -type d | xargs rm -rf', shell=True) call('test -d docs && make -C docs/ clean', shell=True)
[ "def", "run", "(", "self", ")", ":", "super", "(", ")", ".", "run", "(", ")", "call", "(", "'rm -vrf ./build ./dist ./*.egg-info'", ",", "shell", "=", "True", ")", "call", "(", "'find . -name __pycache__ -type d | xargs rm -rf'", ",", "shell", "=", "True", ")", "call", "(", "'test -d docs && make -C docs/ clean'", ",", "shell", "=", "True", ")" ]
Clean build, dist, pyc and egg from package and docs.
[ "Clean", "build", "dist", "pyc", "and", "egg", "from", "package", "and", "docs", "." ]
b4750c618d15cff75970ea6124bda4d2b9a33578
https://github.com/kytos/kytos-utils/blob/b4750c618d15cff75970ea6124bda4d2b9a33578/setup.py#L71-L76
train
kytos/kytos-utils
setup.py
Linter.run
def run(self): """Run yala.""" print('Yala is running. It may take several seconds...') try: check_call('yala setup.py tests kytos', shell=True) print('No linter error found.') except CalledProcessError: print('Linter check failed. Fix the error(s) above and try again.') sys.exit(-1)
python
def run(self): """Run yala.""" print('Yala is running. It may take several seconds...') try: check_call('yala setup.py tests kytos', shell=True) print('No linter error found.') except CalledProcessError: print('Linter check failed. Fix the error(s) above and try again.') sys.exit(-1)
[ "def", "run", "(", "self", ")", ":", "print", "(", "'Yala is running. It may take several seconds...'", ")", "try", ":", "check_call", "(", "'yala setup.py tests kytos'", ",", "shell", "=", "True", ")", "print", "(", "'No linter error found.'", ")", "except", "CalledProcessError", ":", "print", "(", "'Linter check failed. Fix the error(s) above and try again.'", ")", "sys", ".", "exit", "(", "-", "1", ")" ]
Run yala.
[ "Run", "yala", "." ]
b4750c618d15cff75970ea6124bda4d2b9a33578
https://github.com/kytos/kytos-utils/blob/b4750c618d15cff75970ea6124bda4d2b9a33578/setup.py#L106-L114
train
mozilla/FoxPuppet
foxpuppet/windows/browser/notifications/addons.py
AddOnInstallBlocked.allow
def allow(self): """Allow the add-on to be installed.""" with self.selenium.context(self.selenium.CONTEXT_CHROME): self.find_primary_button().click()
python
def allow(self): """Allow the add-on to be installed.""" with self.selenium.context(self.selenium.CONTEXT_CHROME): self.find_primary_button().click()
[ "def", "allow", "(", "self", ")", ":", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "self", ".", "find_primary_button", "(", ")", ".", "click", "(", ")" ]
Allow the add-on to be installed.
[ "Allow", "the", "add", "-", "on", "to", "be", "installed", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L14-L17
train
mozilla/FoxPuppet
foxpuppet/windows/browser/notifications/addons.py
AddOnInstallConfirmation.addon_name
def addon_name(self): """Provide access to the add-on name. Returns: str: Add-on name. """ with self.selenium.context(self.selenium.CONTEXT_CHROME): el = self.find_description() return el.find_element(By.CSS_SELECTOR, "b").text
python
def addon_name(self): """Provide access to the add-on name. Returns: str: Add-on name. """ with self.selenium.context(self.selenium.CONTEXT_CHROME): el = self.find_description() return el.find_element(By.CSS_SELECTOR, "b").text
[ "def", "addon_name", "(", "self", ")", ":", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "el", "=", "self", ".", "find_description", "(", ")", "return", "el", ".", "find_element", "(", "By", ".", "CSS_SELECTOR", ",", "\"b\"", ")", ".", "text" ]
Provide access to the add-on name. Returns: str: Add-on name.
[ "Provide", "access", "to", "the", "add", "-", "on", "name", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L24-L33
train
mozilla/FoxPuppet
foxpuppet/windows/browser/notifications/addons.py
AddOnInstallConfirmation.cancel
def cancel(self): """Cancel add-on install.""" with self.selenium.context(self.selenium.CONTEXT_CHROME): self.find_secondary_button().click()
python
def cancel(self): """Cancel add-on install.""" with self.selenium.context(self.selenium.CONTEXT_CHROME): self.find_secondary_button().click()
[ "def", "cancel", "(", "self", ")", ":", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "self", ".", "find_secondary_button", "(", ")", ".", "click", "(", ")" ]
Cancel add-on install.
[ "Cancel", "add", "-", "on", "install", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L35-L38
train
mozilla/FoxPuppet
foxpuppet/windows/browser/notifications/addons.py
AddOnInstallConfirmation.install
def install(self): """Confirm add-on install.""" with self.selenium.context(self.selenium.CONTEXT_CHROME): self.find_primary_button().click()
python
def install(self): """Confirm add-on install.""" with self.selenium.context(self.selenium.CONTEXT_CHROME): self.find_primary_button().click()
[ "def", "install", "(", "self", ")", ":", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "self", ".", "find_primary_button", "(", ")", ".", "click", "(", ")" ]
Confirm add-on install.
[ "Confirm", "add", "-", "on", "install", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L40-L43
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py
_load_txt
def _load_txt(file, devices, channels, header, **kwargs): """ Function used for reading .txt files generated by OpenSignals. ---------- Parameters ---------- file : file, str, or pathlib.Path File, filename, or generator to read. If the filename extension is ``.gz`` or ``.bz2``, the file is first decompressed. Note that generators should return byte strings for Python 3k. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. header : dict File header with relevant metadata for identifying which columns may be read. **kwargs : list of variable keyword arguments. The valid keywords are those used by numpy.loadtxt function. Returns ------- out_dict : dict Data read from the text file. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%% Exclusion of invalid keywords %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% kwargs_txt = _filter_keywords(numpy.loadtxt, kwargs) # %%%%%%%%%%%%%%%%%%%%%%%%%% Columns of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = {} for dev_nbr, device in enumerate(devices): out_dict[device] = {} columns = [] for chn in channels[dev_nbr]: columns.append(header[device]["column labels"][chn]) # header[device]["column labels"] contains the column of .txt file where the data of # channel "chn" is located. out_dict[device]["CH" + str(chn)] = numpy.loadtxt(fname=file, usecols=header[device]["column labels"][chn], **kwargs_txt) return out_dict
python
def _load_txt(file, devices, channels, header, **kwargs): """ Function used for reading .txt files generated by OpenSignals. ---------- Parameters ---------- file : file, str, or pathlib.Path File, filename, or generator to read. If the filename extension is ``.gz`` or ``.bz2``, the file is first decompressed. Note that generators should return byte strings for Python 3k. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. header : dict File header with relevant metadata for identifying which columns may be read. **kwargs : list of variable keyword arguments. The valid keywords are those used by numpy.loadtxt function. Returns ------- out_dict : dict Data read from the text file. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%% Exclusion of invalid keywords %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% kwargs_txt = _filter_keywords(numpy.loadtxt, kwargs) # %%%%%%%%%%%%%%%%%%%%%%%%%% Columns of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = {} for dev_nbr, device in enumerate(devices): out_dict[device] = {} columns = [] for chn in channels[dev_nbr]: columns.append(header[device]["column labels"][chn]) # header[device]["column labels"] contains the column of .txt file where the data of # channel "chn" is located. out_dict[device]["CH" + str(chn)] = numpy.loadtxt(fname=file, usecols=header[device]["column labels"][chn], **kwargs_txt) return out_dict
[ "def", "_load_txt", "(", "file", ",", "devices", ",", "channels", ",", "header", ",", "*", "*", "kwargs", ")", ":", "# %%%%%%%%%%%%%%%%%%%%%%%%%%% Exclusion of invalid keywords %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%", "kwargs_txt", "=", "_filter_keywords", "(", "numpy", ".", "loadtxt", ",", "kwargs", ")", "# %%%%%%%%%%%%%%%%%%%%%%%%%% Columns of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%", "out_dict", "=", "{", "}", "for", "dev_nbr", ",", "device", "in", "enumerate", "(", "devices", ")", ":", "out_dict", "[", "device", "]", "=", "{", "}", "columns", "=", "[", "]", "for", "chn", "in", "channels", "[", "dev_nbr", "]", ":", "columns", ".", "append", "(", "header", "[", "device", "]", "[", "\"column labels\"", "]", "[", "chn", "]", ")", "# header[device][\"column labels\"] contains the column of .txt file where the data of", "# channel \"chn\" is located.", "out_dict", "[", "device", "]", "[", "\"CH\"", "+", "str", "(", "chn", ")", "]", "=", "numpy", ".", "loadtxt", "(", "fname", "=", "file", ",", "usecols", "=", "header", "[", "device", "]", "[", "\"column labels\"", "]", "[", "chn", "]", ",", "*", "*", "kwargs_txt", ")", "return", "out_dict" ]
Function used for reading .txt files generated by OpenSignals. ---------- Parameters ---------- file : file, str, or pathlib.Path File, filename, or generator to read. If the filename extension is ``.gz`` or ``.bz2``, the file is first decompressed. Note that generators should return byte strings for Python 3k. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. header : dict File header with relevant metadata for identifying which columns may be read. **kwargs : list of variable keyword arguments. The valid keywords are those used by numpy.loadtxt function. Returns ------- out_dict : dict Data read from the text file.
[ "Function", "used", "for", "reading", ".", "txt", "files", "generated", "by", "OpenSignals", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py#L487-L533
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py
_load_h5
def _load_h5(file, devices, channels): """ Function used for reading .h5 files generated by OpenSignals. ---------- Parameters ---------- file : file path. File Path. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. Returns ------- out_dict : dict Data read from the h5 file. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%%% Creation of h5py object %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% h5_object = h5py.File(file) # %%%%%%%%%%%%%%%%%%%%%%%%% Data of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = {} for dev_nbr, device in enumerate(devices): out_dict[device] = {} for chn in channels[dev_nbr]: data_temp = list(h5_object.get(device).get("raw").get("channel_" + str(chn))) # Conversion of a nested list to a flatten list by list-comprehension # The following line is equivalent to: # for sublist in h5_data: # for item in sublist: # flat_list.append(item) #out_dict[device]["CH" + str(chn)] = [item for sublist in data_temp for item in sublist] out_dict[device]["CH" + str(chn)] = numpy.concatenate(data_temp) return out_dict
python
def _load_h5(file, devices, channels): """ Function used for reading .h5 files generated by OpenSignals. ---------- Parameters ---------- file : file path. File Path. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. Returns ------- out_dict : dict Data read from the h5 file. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%%% Creation of h5py object %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% h5_object = h5py.File(file) # %%%%%%%%%%%%%%%%%%%%%%%%% Data of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = {} for dev_nbr, device in enumerate(devices): out_dict[device] = {} for chn in channels[dev_nbr]: data_temp = list(h5_object.get(device).get("raw").get("channel_" + str(chn))) # Conversion of a nested list to a flatten list by list-comprehension # The following line is equivalent to: # for sublist in h5_data: # for item in sublist: # flat_list.append(item) #out_dict[device]["CH" + str(chn)] = [item for sublist in data_temp for item in sublist] out_dict[device]["CH" + str(chn)] = numpy.concatenate(data_temp) return out_dict
[ "def", "_load_h5", "(", "file", ",", "devices", ",", "channels", ")", ":", "# %%%%%%%%%%%%%%%%%%%%%%%%%%%% Creation of h5py object %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%", "h5_object", "=", "h5py", ".", "File", "(", "file", ")", "# %%%%%%%%%%%%%%%%%%%%%%%%% Data of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%", "out_dict", "=", "{", "}", "for", "dev_nbr", ",", "device", "in", "enumerate", "(", "devices", ")", ":", "out_dict", "[", "device", "]", "=", "{", "}", "for", "chn", "in", "channels", "[", "dev_nbr", "]", ":", "data_temp", "=", "list", "(", "h5_object", ".", "get", "(", "device", ")", ".", "get", "(", "\"raw\"", ")", ".", "get", "(", "\"channel_\"", "+", "str", "(", "chn", ")", ")", ")", "# Conversion of a nested list to a flatten list by list-comprehension", "# The following line is equivalent to:", "# for sublist in h5_data:", "# for item in sublist:", "# flat_list.append(item)", "#out_dict[device][\"CH\" + str(chn)] = [item for sublist in data_temp for item in sublist]", "out_dict", "[", "device", "]", "[", "\"CH\"", "+", "str", "(", "chn", ")", "]", "=", "numpy", ".", "concatenate", "(", "data_temp", ")", "return", "out_dict" ]
Function used for reading .h5 files generated by OpenSignals. ---------- Parameters ---------- file : file path. File Path. devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. Returns ------- out_dict : dict Data read from the h5 file.
[ "Function", "used", "for", "reading", ".", "h5", "files", "generated", "by", "OpenSignals", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py#L536-L577
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py
_check_chn_type
def _check_chn_type(channels, available_channels): """ Function used for checking weather the elements in "channels" input are coincident with the available channels. ---------- Parameters ---------- channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. available_channels : dict Dictionary with the list of all the available channels per device. Returns ------- out : list It is returned a list of the selected channels in a standardized format. """ # ------------------------ Definition of constants and variables ------------------------------- chn_list_standardized = [] # %%%%%%%%%%%%%%%%%%%%%%%%%%% Fill of "chn_list_standardized" %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% devices = list(available_channels.keys()) for dev_nbr, device in enumerate(devices): if channels is not None: sub_unit = channels[dev_nbr] for channel in sub_unit: # Each sublist must be composed by integers. if channel in available_channels[devices[dev_nbr]]: continue else: raise RuntimeError("At least one of the specified channels is not available in " "the acquisition file.") chn_list_standardized.append(sub_unit) else: # By omission all the channels were selected. chn_list_standardized.append(available_channels[device]) return chn_list_standardized
python
def _check_chn_type(channels, available_channels): """ Function used for checking weather the elements in "channels" input are coincident with the available channels. ---------- Parameters ---------- channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. available_channels : dict Dictionary with the list of all the available channels per device. Returns ------- out : list It is returned a list of the selected channels in a standardized format. """ # ------------------------ Definition of constants and variables ------------------------------- chn_list_standardized = [] # %%%%%%%%%%%%%%%%%%%%%%%%%%% Fill of "chn_list_standardized" %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% devices = list(available_channels.keys()) for dev_nbr, device in enumerate(devices): if channels is not None: sub_unit = channels[dev_nbr] for channel in sub_unit: # Each sublist must be composed by integers. if channel in available_channels[devices[dev_nbr]]: continue else: raise RuntimeError("At least one of the specified channels is not available in " "the acquisition file.") chn_list_standardized.append(sub_unit) else: # By omission all the channels were selected. chn_list_standardized.append(available_channels[device]) return chn_list_standardized
[ "def", "_check_chn_type", "(", "channels", ",", "available_channels", ")", ":", "# ------------------------ Definition of constants and variables -------------------------------", "chn_list_standardized", "=", "[", "]", "# %%%%%%%%%%%%%%%%%%%%%%%%%%% Fill of \"chn_list_standardized\" %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%", "devices", "=", "list", "(", "available_channels", ".", "keys", "(", ")", ")", "for", "dev_nbr", ",", "device", "in", "enumerate", "(", "devices", ")", ":", "if", "channels", "is", "not", "None", ":", "sub_unit", "=", "channels", "[", "dev_nbr", "]", "for", "channel", "in", "sub_unit", ":", "# Each sublist must be composed by integers.", "if", "channel", "in", "available_channels", "[", "devices", "[", "dev_nbr", "]", "]", ":", "continue", "else", ":", "raise", "RuntimeError", "(", "\"At least one of the specified channels is not available in \"", "\"the acquisition file.\"", ")", "chn_list_standardized", ".", "append", "(", "sub_unit", ")", "else", ":", "# By omission all the channels were selected.", "chn_list_standardized", ".", "append", "(", "available_channels", "[", "device", "]", ")", "return", "chn_list_standardized" ]
Function used for checking weather the elements in "channels" input are coincident with the available channels. ---------- Parameters ---------- channels : list [[mac_address_1_channel_1 <int>, mac_address_1_channel_2 <int>...], [mac_address_2_channel_1 <int>...]...] From which channels will the data be loaded. available_channels : dict Dictionary with the list of all the available channels per device. Returns ------- out : list It is returned a list of the selected channels in a standardized format.
[ "Function", "used", "for", "checking", "weather", "the", "elements", "in", "channels", "input", "are", "coincident", "with", "the", "available", "channels", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py#L642-L683
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py
_available_channels
def _available_channels(devices, header): """ Function used for the determination of the available channels in each device. ---------- Parameters ---------- devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. header: dict Dictionary that contains auxiliary data of the acquisition. Returns ------- out : dict Returns a dictionary where each device defines a key and the respective value will be a list of the available channels for the device. """ # ------------------------ Definition of constants and variables ------------------------------ chn_dict = {} # %%%%%%%%%%%%%%%%%%%%%% Access to the relevant data in the header %%%%%%%%%%%%%%%%%%%%%%%%%%%% for dev in devices: chn_dict[dev] = header[dev]["column labels"].keys() return chn_dict
python
def _available_channels(devices, header): """ Function used for the determination of the available channels in each device. ---------- Parameters ---------- devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. header: dict Dictionary that contains auxiliary data of the acquisition. Returns ------- out : dict Returns a dictionary where each device defines a key and the respective value will be a list of the available channels for the device. """ # ------------------------ Definition of constants and variables ------------------------------ chn_dict = {} # %%%%%%%%%%%%%%%%%%%%%% Access to the relevant data in the header %%%%%%%%%%%%%%%%%%%%%%%%%%%% for dev in devices: chn_dict[dev] = header[dev]["column labels"].keys() return chn_dict
[ "def", "_available_channels", "(", "devices", ",", "header", ")", ":", "# ------------------------ Definition of constants and variables ------------------------------", "chn_dict", "=", "{", "}", "# %%%%%%%%%%%%%%%%%%%%%% Access to the relevant data in the header %%%%%%%%%%%%%%%%%%%%%%%%%%%%", "for", "dev", "in", "devices", ":", "chn_dict", "[", "dev", "]", "=", "header", "[", "dev", "]", "[", "\"column labels\"", "]", ".", "keys", "(", ")", "return", "chn_dict" ]
Function used for the determination of the available channels in each device. ---------- Parameters ---------- devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. header: dict Dictionary that contains auxiliary data of the acquisition. Returns ------- out : dict Returns a dictionary where each device defines a key and the respective value will be a list of the available channels for the device.
[ "Function", "used", "for", "the", "determination", "of", "the", "available", "channels", "in", "each", "device", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py#L686-L714
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py
_check_dev_type
def _check_dev_type(devices, dev_list): """ Function used for checking weather the "devices" field only contain devices used during the acquisition. ---------- Parameters ---------- devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. dev_list : list List of available devices in the acquisition file. Returns ------- out : list Returns a standardized list of devices. """ if devices is not None: for device in devices: if device in dev_list: # List element is one of the available devices. continue else: raise RuntimeError("At least one of the specified devices is not available in the " "acquisition file.") out = devices else: out = dev_list return out
python
def _check_dev_type(devices, dev_list): """ Function used for checking weather the "devices" field only contain devices used during the acquisition. ---------- Parameters ---------- devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. dev_list : list List of available devices in the acquisition file. Returns ------- out : list Returns a standardized list of devices. """ if devices is not None: for device in devices: if device in dev_list: # List element is one of the available devices. continue else: raise RuntimeError("At least one of the specified devices is not available in the " "acquisition file.") out = devices else: out = dev_list return out
[ "def", "_check_dev_type", "(", "devices", ",", "dev_list", ")", ":", "if", "devices", "is", "not", "None", ":", "for", "device", "in", "devices", ":", "if", "device", "in", "dev_list", ":", "# List element is one of the available devices.", "continue", "else", ":", "raise", "RuntimeError", "(", "\"At least one of the specified devices is not available in the \"", "\"acquisition file.\"", ")", "out", "=", "devices", "else", ":", "out", "=", "dev_list", "return", "out" ]
Function used for checking weather the "devices" field only contain devices used during the acquisition. ---------- Parameters ---------- devices : list ["mac_address_1" <str>, "mac_address_2" <str>...] List of devices selected by the user. dev_list : list List of available devices in the acquisition file. Returns ------- out : list Returns a standardized list of devices.
[ "Function", "used", "for", "checking", "weather", "the", "devices", "field", "only", "contain", "devices", "used", "during", "the", "acquisition", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py#L717-L750
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py
_file_type
def _file_type(file): """ Function intended for identification of the file type. ---------- Parameters ---------- file : file path File path. Returns ------- out : str Identified file type. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%%%% Verification of file type %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% if "." in file: # File with known extension. file_type = file.split(".")[-1] else: # File without known extension. file_type = magic.from_file(file, mime=True).split("/")[-1] return file_type
python
def _file_type(file): """ Function intended for identification of the file type. ---------- Parameters ---------- file : file path File path. Returns ------- out : str Identified file type. """ # %%%%%%%%%%%%%%%%%%%%%%%%%%%%% Verification of file type %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% if "." in file: # File with known extension. file_type = file.split(".")[-1] else: # File without known extension. file_type = magic.from_file(file, mime=True).split("/")[-1] return file_type
[ "def", "_file_type", "(", "file", ")", ":", "# %%%%%%%%%%%%%%%%%%%%%%%%%%%%% Verification of file type %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%", "if", "\".\"", "in", "file", ":", "# File with known extension.", "file_type", "=", "file", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "else", ":", "# File without known extension.", "file_type", "=", "magic", ".", "from_file", "(", "file", ",", "mime", "=", "True", ")", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", "return", "file_type" ]
Function intended for identification of the file type. ---------- Parameters ---------- file : file path File path. Returns ------- out : str Identified file type.
[ "Function", "intended", "for", "identification", "of", "the", "file", "type", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/load.py#L753-L775
train
KE-works/pykechain
pykechain/models/scope.py
Scope.team
def team(self): """Team to which the scope is assigned.""" team_dict = self._json_data.get('team') if team_dict and team_dict.get('id'): return self._client.team(id=team_dict.get('id')) else: return None
python
def team(self): """Team to which the scope is assigned.""" team_dict = self._json_data.get('team') if team_dict and team_dict.get('id'): return self._client.team(id=team_dict.get('id')) else: return None
[ "def", "team", "(", "self", ")", ":", "team_dict", "=", "self", ".", "_json_data", ".", "get", "(", "'team'", ")", "if", "team_dict", "and", "team_dict", ".", "get", "(", "'id'", ")", ":", "return", "self", ".", "_client", ".", "team", "(", "id", "=", "team_dict", ".", "get", "(", "'id'", ")", ")", "else", ":", "return", "None" ]
Team to which the scope is assigned.
[ "Team", "to", "which", "the", "scope", "is", "assigned", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L56-L62
train
KE-works/pykechain
pykechain/models/scope.py
Scope.parts
def parts(self, *args, **kwargs): """Retrieve parts belonging to this scope. See :class:`pykechain.Client.parts` for available parameters. """ return self._client.parts(*args, bucket=self.bucket.get('id'), **kwargs)
python
def parts(self, *args, **kwargs): """Retrieve parts belonging to this scope. See :class:`pykechain.Client.parts` for available parameters. """ return self._client.parts(*args, bucket=self.bucket.get('id'), **kwargs)
[ "def", "parts", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "parts", "(", "*", "args", ",", "bucket", "=", "self", ".", "bucket", ".", "get", "(", "'id'", ")", ",", "*", "*", "kwargs", ")" ]
Retrieve parts belonging to this scope. See :class:`pykechain.Client.parts` for available parameters.
[ "Retrieve", "parts", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L76-L81
train
KE-works/pykechain
pykechain/models/scope.py
Scope.part
def part(self, *args, **kwargs): """Retrieve a single part belonging to this scope. See :class:`pykechain.Client.part` for available parameters. """ return self._client.part(*args, bucket=self.bucket.get('id'), **kwargs)
python
def part(self, *args, **kwargs): """Retrieve a single part belonging to this scope. See :class:`pykechain.Client.part` for available parameters. """ return self._client.part(*args, bucket=self.bucket.get('id'), **kwargs)
[ "def", "part", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "part", "(", "*", "args", ",", "bucket", "=", "self", ".", "bucket", ".", "get", "(", "'id'", ")", ",", "*", "*", "kwargs", ")" ]
Retrieve a single part belonging to this scope. See :class:`pykechain.Client.part` for available parameters.
[ "Retrieve", "a", "single", "part", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L83-L88
train
KE-works/pykechain
pykechain/models/scope.py
Scope.create_model
def create_model(self, parent, name, multiplicity=Multiplicity.ZERO_MANY): """Create a single part model in this scope. See :class:`pykechain.Client.create_model` for available parameters. """ return self._client.create_model(parent, name, multiplicity=multiplicity)
python
def create_model(self, parent, name, multiplicity=Multiplicity.ZERO_MANY): """Create a single part model in this scope. See :class:`pykechain.Client.create_model` for available parameters. """ return self._client.create_model(parent, name, multiplicity=multiplicity)
[ "def", "create_model", "(", "self", ",", "parent", ",", "name", ",", "multiplicity", "=", "Multiplicity", ".", "ZERO_MANY", ")", ":", "return", "self", ".", "_client", ".", "create_model", "(", "parent", ",", "name", ",", "multiplicity", "=", "multiplicity", ")" ]
Create a single part model in this scope. See :class:`pykechain.Client.create_model` for available parameters.
[ "Create", "a", "single", "part", "model", "in", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L90-L95
train
KE-works/pykechain
pykechain/models/scope.py
Scope.model
def model(self, *args, **kwargs): """Retrieve a single model belonging to this scope. See :class:`pykechain.Client.model` for available parameters. """ return self._client.model(*args, bucket=self.bucket.get('id'), **kwargs)
python
def model(self, *args, **kwargs): """Retrieve a single model belonging to this scope. See :class:`pykechain.Client.model` for available parameters. """ return self._client.model(*args, bucket=self.bucket.get('id'), **kwargs)
[ "def", "model", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "model", "(", "*", "args", ",", "bucket", "=", "self", ".", "bucket", ".", "get", "(", "'id'", ")", ",", "*", "*", "kwargs", ")" ]
Retrieve a single model belonging to this scope. See :class:`pykechain.Client.model` for available parameters.
[ "Retrieve", "a", "single", "model", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L97-L102
train
KE-works/pykechain
pykechain/models/scope.py
Scope.activities
def activities(self, *args, **kwargs): """Retrieve activities belonging to this scope. See :class:`pykechain.Client.activities` for available parameters. """ if self._client.match_app_version(label='wim', version='<2.0.0', default=True): return self._client.activities(*args, scope=self.id, **kwargs) else: return self._client.activities(*args, scope_id=self.id, **kwargs)
python
def activities(self, *args, **kwargs): """Retrieve activities belonging to this scope. See :class:`pykechain.Client.activities` for available parameters. """ if self._client.match_app_version(label='wim', version='<2.0.0', default=True): return self._client.activities(*args, scope=self.id, **kwargs) else: return self._client.activities(*args, scope_id=self.id, **kwargs)
[ "def", "activities", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_client", ".", "match_app_version", "(", "label", "=", "'wim'", ",", "version", "=", "'<2.0.0'", ",", "default", "=", "True", ")", ":", "return", "self", ".", "_client", ".", "activities", "(", "*", "args", ",", "scope", "=", "self", ".", "id", ",", "*", "*", "kwargs", ")", "else", ":", "return", "self", ".", "_client", ".", "activities", "(", "*", "args", ",", "scope_id", "=", "self", ".", "id", ",", "*", "*", "kwargs", ")" ]
Retrieve activities belonging to this scope. See :class:`pykechain.Client.activities` for available parameters.
[ "Retrieve", "activities", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L104-L112
train
KE-works/pykechain
pykechain/models/scope.py
Scope.create_activity
def create_activity(self, *args, **kwargs): """Create a new activity belonging to this scope. See :class:`pykechain.Client.create_activity` for available parameters. """ if self._client.match_app_version(label='wim', version='<2.0.0', default=True): return self._client.create_activity(self.process, *args, **kwargs) else: return self._client.create_activity(self.workflow_root, *args, **kwargs)
python
def create_activity(self, *args, **kwargs): """Create a new activity belonging to this scope. See :class:`pykechain.Client.create_activity` for available parameters. """ if self._client.match_app_version(label='wim', version='<2.0.0', default=True): return self._client.create_activity(self.process, *args, **kwargs) else: return self._client.create_activity(self.workflow_root, *args, **kwargs)
[ "def", "create_activity", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_client", ".", "match_app_version", "(", "label", "=", "'wim'", ",", "version", "=", "'<2.0.0'", ",", "default", "=", "True", ")", ":", "return", "self", ".", "_client", ".", "create_activity", "(", "self", ".", "process", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "self", ".", "_client", ".", "create_activity", "(", "self", ".", "workflow_root", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Create a new activity belonging to this scope. See :class:`pykechain.Client.create_activity` for available parameters.
[ "Create", "a", "new", "activity", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L124-L132
train
KE-works/pykechain
pykechain/models/scope.py
Scope.create_service
def create_service(self, *args, **kwargs): """Create a service to current scope. See :class:`pykechain.Client.create_service` for available parameters. .. versionadded:: 1.13 """ return self._client.create_service(*args, scope=self.id, **kwargs)
python
def create_service(self, *args, **kwargs): """Create a service to current scope. See :class:`pykechain.Client.create_service` for available parameters. .. versionadded:: 1.13 """ return self._client.create_service(*args, scope=self.id, **kwargs)
[ "def", "create_service", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "create_service", "(", "*", "args", ",", "scope", "=", "self", ".", "id", ",", "*", "*", "kwargs", ")" ]
Create a service to current scope. See :class:`pykechain.Client.create_service` for available parameters. .. versionadded:: 1.13
[ "Create", "a", "service", "to", "current", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L143-L150
train
KE-works/pykechain
pykechain/models/scope.py
Scope.service
def service(self, *args, **kwargs): """Retrieve a single service belonging to this scope. See :class:`pykechain.Client.service` for available parameters. .. versionadded:: 1.13 """ return self._client.service(*args, scope=self.id, **kwargs)
python
def service(self, *args, **kwargs): """Retrieve a single service belonging to this scope. See :class:`pykechain.Client.service` for available parameters. .. versionadded:: 1.13 """ return self._client.service(*args, scope=self.id, **kwargs)
[ "def", "service", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "service", "(", "*", "args", ",", "scope", "=", "self", ".", "id", ",", "*", "*", "kwargs", ")" ]
Retrieve a single service belonging to this scope. See :class:`pykechain.Client.service` for available parameters. .. versionadded:: 1.13
[ "Retrieve", "a", "single", "service", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L152-L159
train
KE-works/pykechain
pykechain/models/scope.py
Scope.service_execution
def service_execution(self, *args, **kwargs): """Retrieve a single service execution belonging to this scope. See :class:`pykechain.Client.service_execution` for available parameters. .. versionadded:: 1.13 """ return self._client.service_execution(*args, scope=self.id, **kwargs)
python
def service_execution(self, *args, **kwargs): """Retrieve a single service execution belonging to this scope. See :class:`pykechain.Client.service_execution` for available parameters. .. versionadded:: 1.13 """ return self._client.service_execution(*args, scope=self.id, **kwargs)
[ "def", "service_execution", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "service_execution", "(", "*", "args", ",", "scope", "=", "self", ".", "id", ",", "*", "*", "kwargs", ")" ]
Retrieve a single service execution belonging to this scope. See :class:`pykechain.Client.service_execution` for available parameters. .. versionadded:: 1.13
[ "Retrieve", "a", "single", "service", "execution", "belonging", "to", "this", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L170-L177
train
KE-works/pykechain
pykechain/models/scope.py
Scope.members
def members(self, is_manager=None): """ Retrieve members of the scope. :param is_manager: (optional) set to True to return only Scope members that are also managers. :type is_manager: bool :return: List of members (usernames) Examples -------- >>> members = project.members() >>> managers = project.members(is_manager=True) """ if not is_manager: return [member for member in self._json_data['members'] if member['is_active']] else: return [member for member in self._json_data['members'] if member.get('is_active', False) and member.get('is_manager', False)]
python
def members(self, is_manager=None): """ Retrieve members of the scope. :param is_manager: (optional) set to True to return only Scope members that are also managers. :type is_manager: bool :return: List of members (usernames) Examples -------- >>> members = project.members() >>> managers = project.members(is_manager=True) """ if not is_manager: return [member for member in self._json_data['members'] if member['is_active']] else: return [member for member in self._json_data['members'] if member.get('is_active', False) and member.get('is_manager', False)]
[ "def", "members", "(", "self", ",", "is_manager", "=", "None", ")", ":", "if", "not", "is_manager", ":", "return", "[", "member", "for", "member", "in", "self", ".", "_json_data", "[", "'members'", "]", "if", "member", "[", "'is_active'", "]", "]", "else", ":", "return", "[", "member", "for", "member", "in", "self", ".", "_json_data", "[", "'members'", "]", "if", "member", ".", "get", "(", "'is_active'", ",", "False", ")", "and", "member", ".", "get", "(", "'is_manager'", ",", "False", ")", "]" ]
Retrieve members of the scope. :param is_manager: (optional) set to True to return only Scope members that are also managers. :type is_manager: bool :return: List of members (usernames) Examples -------- >>> members = project.members() >>> managers = project.members(is_manager=True)
[ "Retrieve", "members", "of", "the", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L179-L197
train
KE-works/pykechain
pykechain/models/scope.py
Scope.add_member
def add_member(self, member): """ Add a single member to the scope. You may only edit the list of members if the pykechain credentials allow this. :param member: single username to be added to the scope list of members :type member: basestring :raises APIError: when unable to update the scope member """ select_action = 'add_member' self._update_scope_project_team(select_action=select_action, user=member, user_type='member')
python
def add_member(self, member): """ Add a single member to the scope. You may only edit the list of members if the pykechain credentials allow this. :param member: single username to be added to the scope list of members :type member: basestring :raises APIError: when unable to update the scope member """ select_action = 'add_member' self._update_scope_project_team(select_action=select_action, user=member, user_type='member')
[ "def", "add_member", "(", "self", ",", "member", ")", ":", "select_action", "=", "'add_member'", "self", ".", "_update_scope_project_team", "(", "select_action", "=", "select_action", ",", "user", "=", "member", ",", "user_type", "=", "'member'", ")" ]
Add a single member to the scope. You may only edit the list of members if the pykechain credentials allow this. :param member: single username to be added to the scope list of members :type member: basestring :raises APIError: when unable to update the scope member
[ "Add", "a", "single", "member", "to", "the", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L199-L211
train
KE-works/pykechain
pykechain/models/scope.py
Scope.remove_member
def remove_member(self, member): """ Remove a single member to the scope. :param member: single username to be removed from the scope list of members :type member: basestring :raises APIError: when unable to update the scope member """ select_action = 'remove_member' self._update_scope_project_team(select_action=select_action, user=member, user_type='member')
python
def remove_member(self, member): """ Remove a single member to the scope. :param member: single username to be removed from the scope list of members :type member: basestring :raises APIError: when unable to update the scope member """ select_action = 'remove_member' self._update_scope_project_team(select_action=select_action, user=member, user_type='member')
[ "def", "remove_member", "(", "self", ",", "member", ")", ":", "select_action", "=", "'remove_member'", "self", ".", "_update_scope_project_team", "(", "select_action", "=", "select_action", ",", "user", "=", "member", ",", "user_type", "=", "'member'", ")" ]
Remove a single member to the scope. :param member: single username to be removed from the scope list of members :type member: basestring :raises APIError: when unable to update the scope member
[ "Remove", "a", "single", "member", "to", "the", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L213-L223
train
KE-works/pykechain
pykechain/models/scope.py
Scope.add_manager
def add_manager(self, manager): """ Add a single manager to the scope. :param manager: single username to be added to the scope list of managers :type manager: basestring :raises APIError: when unable to update the scope manager """ select_action = 'add_manager' self._update_scope_project_team(select_action=select_action, user=manager, user_type='manager')
python
def add_manager(self, manager): """ Add a single manager to the scope. :param manager: single username to be added to the scope list of managers :type manager: basestring :raises APIError: when unable to update the scope manager """ select_action = 'add_manager' self._update_scope_project_team(select_action=select_action, user=manager, user_type='manager')
[ "def", "add_manager", "(", "self", ",", "manager", ")", ":", "select_action", "=", "'add_manager'", "self", ".", "_update_scope_project_team", "(", "select_action", "=", "select_action", ",", "user", "=", "manager", ",", "user_type", "=", "'manager'", ")" ]
Add a single manager to the scope. :param manager: single username to be added to the scope list of managers :type manager: basestring :raises APIError: when unable to update the scope manager
[ "Add", "a", "single", "manager", "to", "the", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L225-L235
train
KE-works/pykechain
pykechain/models/scope.py
Scope.remove_manager
def remove_manager(self, manager): """ Remove a single manager to the scope. :param manager: single username to be added to the scope list of managers :type manager: basestring :raises APIError: when unable to update the scope manager """ select_action = 'remove_manager' self._update_scope_project_team(select_action=select_action, user=manager, user_type='manager')
python
def remove_manager(self, manager): """ Remove a single manager to the scope. :param manager: single username to be added to the scope list of managers :type manager: basestring :raises APIError: when unable to update the scope manager """ select_action = 'remove_manager' self._update_scope_project_team(select_action=select_action, user=manager, user_type='manager')
[ "def", "remove_manager", "(", "self", ",", "manager", ")", ":", "select_action", "=", "'remove_manager'", "self", ".", "_update_scope_project_team", "(", "select_action", "=", "select_action", ",", "user", "=", "manager", ",", "user_type", "=", "'manager'", ")" ]
Remove a single manager to the scope. :param manager: single username to be added to the scope list of managers :type manager: basestring :raises APIError: when unable to update the scope manager
[ "Remove", "a", "single", "manager", "to", "the", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L237-L247
train
KE-works/pykechain
pykechain/models/scope.py
Scope._update_scope_project_team
def _update_scope_project_team(self, select_action, user, user_type): """ Update the Project Team of the Scope. Updates include addition or removing of managers or members. :param select_action: type of action to be applied :type select_action: basestring :param user: the username of the user to which the action applies to :type user: basestring :param user_type: the type of the user (member or manager) :type user_type: basestring :raises APIError: When unable to update the scope project team. """ if isinstance(user, str): users = self._client._retrieve_users() manager_object = next((item for item in users['results'] if item["username"] == user), None) if manager_object: url = self._client._build_url('scope', scope_id=self.id) r = self._client._request('PUT', url, params={'select_action': select_action}, data={ 'user_id': manager_object['pk'] }) if r.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not {} {} in Scope".format(select_action.split('_')[0], user_type)) else: raise NotFoundError("User {} does not exist".format(user)) else: raise TypeError("User {} should be defined as a string".format(user))
python
def _update_scope_project_team(self, select_action, user, user_type): """ Update the Project Team of the Scope. Updates include addition or removing of managers or members. :param select_action: type of action to be applied :type select_action: basestring :param user: the username of the user to which the action applies to :type user: basestring :param user_type: the type of the user (member or manager) :type user_type: basestring :raises APIError: When unable to update the scope project team. """ if isinstance(user, str): users = self._client._retrieve_users() manager_object = next((item for item in users['results'] if item["username"] == user), None) if manager_object: url = self._client._build_url('scope', scope_id=self.id) r = self._client._request('PUT', url, params={'select_action': select_action}, data={ 'user_id': manager_object['pk'] }) if r.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not {} {} in Scope".format(select_action.split('_')[0], user_type)) else: raise NotFoundError("User {} does not exist".format(user)) else: raise TypeError("User {} should be defined as a string".format(user))
[ "def", "_update_scope_project_team", "(", "self", ",", "select_action", ",", "user", ",", "user_type", ")", ":", "if", "isinstance", "(", "user", ",", "str", ")", ":", "users", "=", "self", ".", "_client", ".", "_retrieve_users", "(", ")", "manager_object", "=", "next", "(", "(", "item", "for", "item", "in", "users", "[", "'results'", "]", "if", "item", "[", "\"username\"", "]", "==", "user", ")", ",", "None", ")", "if", "manager_object", ":", "url", "=", "self", ".", "_client", ".", "_build_url", "(", "'scope'", ",", "scope_id", "=", "self", ".", "id", ")", "r", "=", "self", ".", "_client", ".", "_request", "(", "'PUT'", ",", "url", ",", "params", "=", "{", "'select_action'", ":", "select_action", "}", ",", "data", "=", "{", "'user_id'", ":", "manager_object", "[", "'pk'", "]", "}", ")", "if", "r", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not {} {} in Scope\"", ".", "format", "(", "select_action", ".", "split", "(", "'_'", ")", "[", "0", "]", ",", "user_type", ")", ")", "else", ":", "raise", "NotFoundError", "(", "\"User {} does not exist\"", ".", "format", "(", "user", ")", ")", "else", ":", "raise", "TypeError", "(", "\"User {} should be defined as a string\"", ".", "format", "(", "user", ")", ")" ]
Update the Project Team of the Scope. Updates include addition or removing of managers or members. :param select_action: type of action to be applied :type select_action: basestring :param user: the username of the user to which the action applies to :type user: basestring :param user_type: the type of the user (member or manager) :type user_type: basestring :raises APIError: When unable to update the scope project team.
[ "Update", "the", "Project", "Team", "of", "the", "Scope", ".", "Updates", "include", "addition", "or", "removing", "of", "managers", "or", "members", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L249-L275
train
KE-works/pykechain
pykechain/models/scope.py
Scope.clone
def clone(self, *args, **kwargs): """ Clone current scope. See :class:`pykechain.Client.clone_scope` for available parameters. .. versionadded:: 2.6.0 """ return self._client.clone_scope(*args, source_scope=self, **kwargs)
python
def clone(self, *args, **kwargs): """ Clone current scope. See :class:`pykechain.Client.clone_scope` for available parameters. .. versionadded:: 2.6.0 """ return self._client.clone_scope(*args, source_scope=self, **kwargs)
[ "def", "clone", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "clone_scope", "(", "*", "args", ",", "source_scope", "=", "self", ",", "*", "*", "kwargs", ")" ]
Clone current scope. See :class:`pykechain.Client.clone_scope` for available parameters. .. versionadded:: 2.6.0
[ "Clone", "current", "scope", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/scope.py#L413-L421
train
hfurubotten/enturclient
enturclient/dto.py
Place.name
def name(self) -> str: """Friendly name for the stop place or platform""" if self.is_platform: if self._data["publicCode"]: return self._data['name'] + " Platform " + \ self._data["publicCode"] else: return self._data['name'] + " Platform " + \ self.place_id.split(':')[-1] else: return self._data['name']
python
def name(self) -> str: """Friendly name for the stop place or platform""" if self.is_platform: if self._data["publicCode"]: return self._data['name'] + " Platform " + \ self._data["publicCode"] else: return self._data['name'] + " Platform " + \ self.place_id.split(':')[-1] else: return self._data['name']
[ "def", "name", "(", "self", ")", "->", "str", ":", "if", "self", ".", "is_platform", ":", "if", "self", ".", "_data", "[", "\"publicCode\"", "]", ":", "return", "self", ".", "_data", "[", "'name'", "]", "+", "\" Platform \"", "+", "self", ".", "_data", "[", "\"publicCode\"", "]", "else", ":", "return", "self", ".", "_data", "[", "'name'", "]", "+", "\" Platform \"", "+", "self", ".", "place_id", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "else", ":", "return", "self", ".", "_data", "[", "'name'", "]" ]
Friendly name for the stop place or platform
[ "Friendly", "name", "for", "the", "stop", "place", "or", "platform" ]
8230f9e9cf5b3a4911e860bc8cbe621231aa5ae4
https://github.com/hfurubotten/enturclient/blob/8230f9e9cf5b3a4911e860bc8cbe621231aa5ae4/enturclient/dto.py#L18-L28
train
SuryaSankar/flask-sqlalchemy-booster
flask_sqlalchemy_booster/custom_collections.py
MappedDictOfLists.remove
def remove(self, value, _sa_initiator=None): """Remove an item by value, consulting the keyfunc for the key.""" key = self.keyfunc(value) # Let self[key] raise if key is not in this collection # testlib.pragma exempt:__ne__ if not self.__contains__(key) or value not in self[key]: raise sa_exc.InvalidRequestError( "Can not remove '%s': collection holds '%s' for key '%s'. " "Possible cause: is the MappedCollection key function " "based on mutable properties or properties that only obtain " "values after flush?" % (value, self[key], key)) self.__getitem__(key, _sa_initiator).remove(value)
python
def remove(self, value, _sa_initiator=None): """Remove an item by value, consulting the keyfunc for the key.""" key = self.keyfunc(value) # Let self[key] raise if key is not in this collection # testlib.pragma exempt:__ne__ if not self.__contains__(key) or value not in self[key]: raise sa_exc.InvalidRequestError( "Can not remove '%s': collection holds '%s' for key '%s'. " "Possible cause: is the MappedCollection key function " "based on mutable properties or properties that only obtain " "values after flush?" % (value, self[key], key)) self.__getitem__(key, _sa_initiator).remove(value)
[ "def", "remove", "(", "self", ",", "value", ",", "_sa_initiator", "=", "None", ")", ":", "key", "=", "self", ".", "keyfunc", "(", "value", ")", "# Let self[key] raise if key is not in this collection", "# testlib.pragma exempt:__ne__", "if", "not", "self", ".", "__contains__", "(", "key", ")", "or", "value", "not", "in", "self", "[", "key", "]", ":", "raise", "sa_exc", ".", "InvalidRequestError", "(", "\"Can not remove '%s': collection holds '%s' for key '%s'. \"", "\"Possible cause: is the MappedCollection key function \"", "\"based on mutable properties or properties that only obtain \"", "\"values after flush?\"", "%", "(", "value", ",", "self", "[", "key", "]", ",", "key", ")", ")", "self", ".", "__getitem__", "(", "key", ",", "_sa_initiator", ")", ".", "remove", "(", "value", ")" ]
Remove an item by value, consulting the keyfunc for the key.
[ "Remove", "an", "item", "by", "value", "consulting", "the", "keyfunc", "for", "the", "key", "." ]
444048d167ab7718f758e943665ef32d101423a5
https://github.com/SuryaSankar/flask-sqlalchemy-booster/blob/444048d167ab7718f758e943665ef32d101423a5/flask_sqlalchemy_booster/custom_collections.py#L20-L33
train
manikos/django-progressiveimagefield
progressiveimagefield/jinja/filters.py
progressive
def progressive(image_field, alt_text=''): """ Used as a Jinja2 filter, this function returns a safe HTML chunk. Usage (in the HTML template): {{ obj.image|progressive }} :param django.db.models.fields.files.ImageFieldFile image_field: image :param str alt_text: str :return: a safe HTML template ready to be rendered """ if not isinstance(image_field, ImageFieldFile): raise ValueError('"image_field" argument must be an ImageField.') for engine in engines.all(): if isinstance(engine, BaseEngine) and hasattr(engine, 'env'): env = engine.env if isinstance(env, Environment): context = render_progressive_field(image_field, alt_text) template = env.get_template( 'progressiveimagefield/render_field.html' ) rendered = template.render(**context) return Markup(rendered) return ''
python
def progressive(image_field, alt_text=''): """ Used as a Jinja2 filter, this function returns a safe HTML chunk. Usage (in the HTML template): {{ obj.image|progressive }} :param django.db.models.fields.files.ImageFieldFile image_field: image :param str alt_text: str :return: a safe HTML template ready to be rendered """ if not isinstance(image_field, ImageFieldFile): raise ValueError('"image_field" argument must be an ImageField.') for engine in engines.all(): if isinstance(engine, BaseEngine) and hasattr(engine, 'env'): env = engine.env if isinstance(env, Environment): context = render_progressive_field(image_field, alt_text) template = env.get_template( 'progressiveimagefield/render_field.html' ) rendered = template.render(**context) return Markup(rendered) return ''
[ "def", "progressive", "(", "image_field", ",", "alt_text", "=", "''", ")", ":", "if", "not", "isinstance", "(", "image_field", ",", "ImageFieldFile", ")", ":", "raise", "ValueError", "(", "'\"image_field\" argument must be an ImageField.'", ")", "for", "engine", "in", "engines", ".", "all", "(", ")", ":", "if", "isinstance", "(", "engine", ",", "BaseEngine", ")", "and", "hasattr", "(", "engine", ",", "'env'", ")", ":", "env", "=", "engine", ".", "env", "if", "isinstance", "(", "env", ",", "Environment", ")", ":", "context", "=", "render_progressive_field", "(", "image_field", ",", "alt_text", ")", "template", "=", "env", ".", "get_template", "(", "'progressiveimagefield/render_field.html'", ")", "rendered", "=", "template", ".", "render", "(", "*", "*", "context", ")", "return", "Markup", "(", "rendered", ")", "return", "''" ]
Used as a Jinja2 filter, this function returns a safe HTML chunk. Usage (in the HTML template): {{ obj.image|progressive }} :param django.db.models.fields.files.ImageFieldFile image_field: image :param str alt_text: str :return: a safe HTML template ready to be rendered
[ "Used", "as", "a", "Jinja2", "filter", "this", "function", "returns", "a", "safe", "HTML", "chunk", "." ]
a432c79d23d87ea8944ac252ae7d15df1e4f3072
https://github.com/manikos/django-progressiveimagefield/blob/a432c79d23d87ea8944ac252ae7d15df1e4f3072/progressiveimagefield/jinja/filters.py#L17-L42
train
Thermondo/viewflow-extensions
viewflow_extensions/views.py
SavableViewActivationMixin.get_form
def get_form(self, form_class=None): """If the task was only saved, treat all form fields as not required.""" form = super().get_form(form_class) if self._save: make_form_or_formset_fields_not_required(form) return form
python
def get_form(self, form_class=None): """If the task was only saved, treat all form fields as not required.""" form = super().get_form(form_class) if self._save: make_form_or_formset_fields_not_required(form) return form
[ "def", "get_form", "(", "self", ",", "form_class", "=", "None", ")", ":", "form", "=", "super", "(", ")", ".", "get_form", "(", "form_class", ")", "if", "self", ".", "_save", ":", "make_form_or_formset_fields_not_required", "(", "form", ")", "return", "form" ]
If the task was only saved, treat all form fields as not required.
[ "If", "the", "task", "was", "only", "saved", "treat", "all", "form", "fields", "as", "not", "required", "." ]
5d2bbfe28ced7dda3e6832b96ea031c1b871053e
https://github.com/Thermondo/viewflow-extensions/blob/5d2bbfe28ced7dda3e6832b96ea031c1b871053e/viewflow_extensions/views.py#L40-L45
train
Thermondo/viewflow-extensions
viewflow_extensions/views.py
SavableViewActivationMixin.save_task
def save_task(self): """Transition to save the task and return to ``ASSIGNED`` state.""" task = self.request.activation.task task.status = STATUS.ASSIGNED task.save()
python
def save_task(self): """Transition to save the task and return to ``ASSIGNED`` state.""" task = self.request.activation.task task.status = STATUS.ASSIGNED task.save()
[ "def", "save_task", "(", "self", ")", ":", "task", "=", "self", ".", "request", ".", "activation", ".", "task", "task", ".", "status", "=", "STATUS", ".", "ASSIGNED", "task", ".", "save", "(", ")" ]
Transition to save the task and return to ``ASSIGNED`` state.
[ "Transition", "to", "save", "the", "task", "and", "return", "to", "ASSIGNED", "state", "." ]
5d2bbfe28ced7dda3e6832b96ea031c1b871053e
https://github.com/Thermondo/viewflow-extensions/blob/5d2bbfe28ced7dda3e6832b96ea031c1b871053e/viewflow_extensions/views.py#L47-L51
train
Thermondo/viewflow-extensions
viewflow_extensions/views.py
SavableViewActivationMixin.activation_done
def activation_done(self, *args, **kwargs): """Complete the ``activation`` or save only, depending on form submit.""" if self._save: self.save_task() else: super().activation_done(*args, **kwargs)
python
def activation_done(self, *args, **kwargs): """Complete the ``activation`` or save only, depending on form submit.""" if self._save: self.save_task() else: super().activation_done(*args, **kwargs)
[ "def", "activation_done", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_save", ":", "self", ".", "save_task", "(", ")", "else", ":", "super", "(", ")", ".", "activation_done", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Complete the ``activation`` or save only, depending on form submit.
[ "Complete", "the", "activation", "or", "save", "only", "depending", "on", "form", "submit", "." ]
5d2bbfe28ced7dda3e6832b96ea031c1b871053e
https://github.com/Thermondo/viewflow-extensions/blob/5d2bbfe28ced7dda3e6832b96ea031c1b871053e/viewflow_extensions/views.py#L53-L58
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/niplot.py
niplot
def niplot(): """ This script extends the native matplolib keyboard bindings. This script allows to use the `up`, `down`, `left`, and `right` keys to move the visualization window. Zooming can be performed using the `+` and `-` keys. Finally, the scroll wheel can be used to zoom under cursor. Returns ------- """ fig = gcf() cid = fig.canvas.mpl_connect('key_press_event', # @UnusedVariable on_key_press) cid = fig.canvas.mpl_connect('key_release_event', # @UnusedVariable on_key_release) cid = fig.canvas.mpl_connect('scroll_event', zoom)
python
def niplot(): """ This script extends the native matplolib keyboard bindings. This script allows to use the `up`, `down`, `left`, and `right` keys to move the visualization window. Zooming can be performed using the `+` and `-` keys. Finally, the scroll wheel can be used to zoom under cursor. Returns ------- """ fig = gcf() cid = fig.canvas.mpl_connect('key_press_event', # @UnusedVariable on_key_press) cid = fig.canvas.mpl_connect('key_release_event', # @UnusedVariable on_key_release) cid = fig.canvas.mpl_connect('scroll_event', zoom)
[ "def", "niplot", "(", ")", ":", "fig", "=", "gcf", "(", ")", "cid", "=", "fig", ".", "canvas", ".", "mpl_connect", "(", "'key_press_event'", ",", "# @UnusedVariable", "on_key_press", ")", "cid", "=", "fig", ".", "canvas", ".", "mpl_connect", "(", "'key_release_event'", ",", "# @UnusedVariable", "on_key_release", ")", "cid", "=", "fig", ".", "canvas", ".", "mpl_connect", "(", "'scroll_event'", ",", "zoom", ")" ]
This script extends the native matplolib keyboard bindings. This script allows to use the `up`, `down`, `left`, and `right` keys to move the visualization window. Zooming can be performed using the `+` and `-` keys. Finally, the scroll wheel can be used to zoom under cursor. Returns -------
[ "This", "script", "extends", "the", "native", "matplolib", "keyboard", "bindings", ".", "This", "script", "allows", "to", "use", "the", "up", "down", "left", "and", "right", "keys", "to", "move", "the", "visualization", "window", ".", "Zooming", "can", "be", "performed", "using", "the", "+", "and", "-", "keys", ".", "Finally", "the", "scroll", "wheel", "can", "be", "used", "to", "zoom", "under", "cursor", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/niplot.py#L101-L117
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/__notebook_support__.py
acquire_subsamples_gp1
def acquire_subsamples_gp1(input_data, file_name=None): """ Function invoked for plotting a grid-plot with 3x2 format, showing the differences in ECG signals accordingly to the chosen sampling frequency. Applied in the cell with tag "subsampling_grid_plot_1". ---------- Parameters ---------- input_data : dict Dictionary with ECG signal to present. file_name : str Path containing the destination folder where the Bokeh figure will be stored. """ # Generation of the HTML file where the plot will be stored. #file_name = _generate_bokeh_file(file_name) # Number of acquired samples (Original sample_rate = 4000 Hz) fs_orig = 4000 nbr_samples_orig = len(input_data) data_interp = {"4000": {}} data_interp["4000"]["data"] = input_data data_interp["4000"]["time"] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_orig) # Constants time_orig = data_interp["4000"]["time"] data_orig = data_interp["4000"]["data"] # ============ Interpolation of data accordingly to the desired sampling frequency ============ # sample_rate in [3000, 1000, 500, 200, 100] - Some of the available sample frequencies at Plux # acquisition systems # sample_rate in [50, 20] - Non-functional sampling frequencies (Not available at Plux devices # because of their limited application) for sample_rate in [3000, 1000, 500, 200, 100, 50, 20]: fs_str = str(sample_rate) nbr_samples_interp = int((nbr_samples_orig * sample_rate) / fs_orig) data_interp[fs_str] = {} data_interp[fs_str]["time"] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_interp) data_interp[fs_str]["data"] = numpy.interp(data_interp[fs_str]["time"], time_orig, data_orig) # List that store the figure handler. list_figures = [] # Generation of Bokeh Figures. for iter_nbr, sample_rate in enumerate(["4000", "3000", "1000", "500", "200", "100"]): # If figure number is a multiple of 3 or if we are generating the first figure... if iter_nbr == 0 or iter_nbr % 2 == 0: list_figures.append([]) # Plotting phase. list_figures[-1].append(figure(x_axis_label='Time (s)', y_axis_label='Raw Data', title="Sampling Frequency: " + sample_rate + " Hz", **opensignals_kwargs("figure"))) list_figures[-1][-1].line(data_interp[sample_rate]["time"][:int(sample_rate)], data_interp[sample_rate]["data"][:int(sample_rate)], **opensignals_kwargs("line"))
python
def acquire_subsamples_gp1(input_data, file_name=None): """ Function invoked for plotting a grid-plot with 3x2 format, showing the differences in ECG signals accordingly to the chosen sampling frequency. Applied in the cell with tag "subsampling_grid_plot_1". ---------- Parameters ---------- input_data : dict Dictionary with ECG signal to present. file_name : str Path containing the destination folder where the Bokeh figure will be stored. """ # Generation of the HTML file where the plot will be stored. #file_name = _generate_bokeh_file(file_name) # Number of acquired samples (Original sample_rate = 4000 Hz) fs_orig = 4000 nbr_samples_orig = len(input_data) data_interp = {"4000": {}} data_interp["4000"]["data"] = input_data data_interp["4000"]["time"] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_orig) # Constants time_orig = data_interp["4000"]["time"] data_orig = data_interp["4000"]["data"] # ============ Interpolation of data accordingly to the desired sampling frequency ============ # sample_rate in [3000, 1000, 500, 200, 100] - Some of the available sample frequencies at Plux # acquisition systems # sample_rate in [50, 20] - Non-functional sampling frequencies (Not available at Plux devices # because of their limited application) for sample_rate in [3000, 1000, 500, 200, 100, 50, 20]: fs_str = str(sample_rate) nbr_samples_interp = int((nbr_samples_orig * sample_rate) / fs_orig) data_interp[fs_str] = {} data_interp[fs_str]["time"] = numpy.linspace(0, nbr_samples_orig / fs_orig, nbr_samples_interp) data_interp[fs_str]["data"] = numpy.interp(data_interp[fs_str]["time"], time_orig, data_orig) # List that store the figure handler. list_figures = [] # Generation of Bokeh Figures. for iter_nbr, sample_rate in enumerate(["4000", "3000", "1000", "500", "200", "100"]): # If figure number is a multiple of 3 or if we are generating the first figure... if iter_nbr == 0 or iter_nbr % 2 == 0: list_figures.append([]) # Plotting phase. list_figures[-1].append(figure(x_axis_label='Time (s)', y_axis_label='Raw Data', title="Sampling Frequency: " + sample_rate + " Hz", **opensignals_kwargs("figure"))) list_figures[-1][-1].line(data_interp[sample_rate]["time"][:int(sample_rate)], data_interp[sample_rate]["data"][:int(sample_rate)], **opensignals_kwargs("line"))
[ "def", "acquire_subsamples_gp1", "(", "input_data", ",", "file_name", "=", "None", ")", ":", "# Generation of the HTML file where the plot will be stored.", "#file_name = _generate_bokeh_file(file_name)", "# Number of acquired samples (Original sample_rate = 4000 Hz)", "fs_orig", "=", "4000", "nbr_samples_orig", "=", "len", "(", "input_data", ")", "data_interp", "=", "{", "\"4000\"", ":", "{", "}", "}", "data_interp", "[", "\"4000\"", "]", "[", "\"data\"", "]", "=", "input_data", "data_interp", "[", "\"4000\"", "]", "[", "\"time\"", "]", "=", "numpy", ".", "linspace", "(", "0", ",", "nbr_samples_orig", "/", "fs_orig", ",", "nbr_samples_orig", ")", "# Constants", "time_orig", "=", "data_interp", "[", "\"4000\"", "]", "[", "\"time\"", "]", "data_orig", "=", "data_interp", "[", "\"4000\"", "]", "[", "\"data\"", "]", "# ============ Interpolation of data accordingly to the desired sampling frequency ============", "# sample_rate in [3000, 1000, 500, 200, 100] - Some of the available sample frequencies at Plux", "# acquisition systems", "# sample_rate in [50, 20] - Non-functional sampling frequencies (Not available at Plux devices", "# because of their limited application)", "for", "sample_rate", "in", "[", "3000", ",", "1000", ",", "500", ",", "200", ",", "100", ",", "50", ",", "20", "]", ":", "fs_str", "=", "str", "(", "sample_rate", ")", "nbr_samples_interp", "=", "int", "(", "(", "nbr_samples_orig", "*", "sample_rate", ")", "/", "fs_orig", ")", "data_interp", "[", "fs_str", "]", "=", "{", "}", "data_interp", "[", "fs_str", "]", "[", "\"time\"", "]", "=", "numpy", ".", "linspace", "(", "0", ",", "nbr_samples_orig", "/", "fs_orig", ",", "nbr_samples_interp", ")", "data_interp", "[", "fs_str", "]", "[", "\"data\"", "]", "=", "numpy", ".", "interp", "(", "data_interp", "[", "fs_str", "]", "[", "\"time\"", "]", ",", "time_orig", ",", "data_orig", ")", "# List that store the figure handler.", "list_figures", "=", "[", "]", "# Generation of Bokeh Figures.", "for", "iter_nbr", ",", "sample_rate", "in", "enumerate", "(", "[", "\"4000\"", ",", "\"3000\"", ",", "\"1000\"", ",", "\"500\"", ",", "\"200\"", ",", "\"100\"", "]", ")", ":", "# If figure number is a multiple of 3 or if we are generating the first figure...", "if", "iter_nbr", "==", "0", "or", "iter_nbr", "%", "2", "==", "0", ":", "list_figures", ".", "append", "(", "[", "]", ")", "# Plotting phase.", "list_figures", "[", "-", "1", "]", ".", "append", "(", "figure", "(", "x_axis_label", "=", "'Time (s)'", ",", "y_axis_label", "=", "'Raw Data'", ",", "title", "=", "\"Sampling Frequency: \"", "+", "sample_rate", "+", "\" Hz\"", ",", "*", "*", "opensignals_kwargs", "(", "\"figure\"", ")", ")", ")", "list_figures", "[", "-", "1", "]", "[", "-", "1", "]", ".", "line", "(", "data_interp", "[", "sample_rate", "]", "[", "\"time\"", "]", "[", ":", "int", "(", "sample_rate", ")", "]", ",", "data_interp", "[", "sample_rate", "]", "[", "\"data\"", "]", "[", ":", "int", "(", "sample_rate", ")", "]", ",", "*", "*", "opensignals_kwargs", "(", "\"line\"", ")", ")" ]
Function invoked for plotting a grid-plot with 3x2 format, showing the differences in ECG signals accordingly to the chosen sampling frequency. Applied in the cell with tag "subsampling_grid_plot_1". ---------- Parameters ---------- input_data : dict Dictionary with ECG signal to present. file_name : str Path containing the destination folder where the Bokeh figure will be stored.
[ "Function", "invoked", "for", "plotting", "a", "grid", "-", "plot", "with", "3x2", "format", "showing", "the", "differences", "in", "ECG", "signals", "accordingly", "to", "the", "chosen", "sampling", "frequency", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/__notebook_support__.py#L206-L266
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/__notebook_support__.py
download
def download(link, out): """ Downloading data from websites, such as previously acquired physiological signals, is an extremely relevant task, taking into consideration that, without data, processing cannot take place. With the current function a file can be easily downloaded through the "link" input. ---------- Parameters ---------- link : str String with the url that contains the file to be downloaded. out : str Name of the downloaded file (with extension). A destination path can also be included. """ # [Source: https://stackoverflow.com/questions/7243750/download-file-from-web-in-python-3] r = requests.get(link) with open(out, 'wb') as outfile: outfile.write(r.content)
python
def download(link, out): """ Downloading data from websites, such as previously acquired physiological signals, is an extremely relevant task, taking into consideration that, without data, processing cannot take place. With the current function a file can be easily downloaded through the "link" input. ---------- Parameters ---------- link : str String with the url that contains the file to be downloaded. out : str Name of the downloaded file (with extension). A destination path can also be included. """ # [Source: https://stackoverflow.com/questions/7243750/download-file-from-web-in-python-3] r = requests.get(link) with open(out, 'wb') as outfile: outfile.write(r.content)
[ "def", "download", "(", "link", ",", "out", ")", ":", "# [Source: https://stackoverflow.com/questions/7243750/download-file-from-web-in-python-3]", "r", "=", "requests", ".", "get", "(", "link", ")", "with", "open", "(", "out", ",", "'wb'", ")", "as", "outfile", ":", "outfile", ".", "write", "(", "r", ".", "content", ")" ]
Downloading data from websites, such as previously acquired physiological signals, is an extremely relevant task, taking into consideration that, without data, processing cannot take place. With the current function a file can be easily downloaded through the "link" input. ---------- Parameters ---------- link : str String with the url that contains the file to be downloaded. out : str Name of the downloaded file (with extension). A destination path can also be included.
[ "Downloading", "data", "from", "websites", "such", "as", "previously", "acquired", "physiological", "signals", "is", "an", "extremely", "relevant", "task", "taking", "into", "consideration", "that", "without", "data", "processing", "cannot", "take", "place", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/__notebook_support__.py#L1496-L1517
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/peaks.py
argrelmin
def argrelmin(data, axis=0, order=1, mode='clip'): """ Calculate the relative minima of `data`. .. versionadded:: 0.11.0 Parameters ---------- data : ndarray Array in which to find the relative minima. axis : int, optional Axis over which to select from `data`. Default is 0. order : int, optional How many points on each side to use for the comparison to consider ``comparator(n, n+x)`` to be True. mode : str, optional How the edges of the vector are treated. Available options are 'wrap' (wrap around) or 'clip' (treat overflow as the same as the last (or first) element). Default 'clip'. See numpy.take Returns ------- extrema : ndarray Indices of the minima, as an array of integers. See also -------- argrelextrema, argrelmax Notes ----- This function uses `argrelextrema` with np.less as comparator. """ return argrelextrema(data, np.less, axis, order, mode)
python
def argrelmin(data, axis=0, order=1, mode='clip'): """ Calculate the relative minima of `data`. .. versionadded:: 0.11.0 Parameters ---------- data : ndarray Array in which to find the relative minima. axis : int, optional Axis over which to select from `data`. Default is 0. order : int, optional How many points on each side to use for the comparison to consider ``comparator(n, n+x)`` to be True. mode : str, optional How the edges of the vector are treated. Available options are 'wrap' (wrap around) or 'clip' (treat overflow as the same as the last (or first) element). Default 'clip'. See numpy.take Returns ------- extrema : ndarray Indices of the minima, as an array of integers. See also -------- argrelextrema, argrelmax Notes ----- This function uses `argrelextrema` with np.less as comparator. """ return argrelextrema(data, np.less, axis, order, mode)
[ "def", "argrelmin", "(", "data", ",", "axis", "=", "0", ",", "order", "=", "1", ",", "mode", "=", "'clip'", ")", ":", "return", "argrelextrema", "(", "data", ",", "np", ".", "less", ",", "axis", ",", "order", ",", "mode", ")" ]
Calculate the relative minima of `data`. .. versionadded:: 0.11.0 Parameters ---------- data : ndarray Array in which to find the relative minima. axis : int, optional Axis over which to select from `data`. Default is 0. order : int, optional How many points on each side to use for the comparison to consider ``comparator(n, n+x)`` to be True. mode : str, optional How the edges of the vector are treated. Available options are 'wrap' (wrap around) or 'clip' (treat overflow as the same as the last (or first) element). Default 'clip'. See numpy.take Returns ------- extrema : ndarray Indices of the minima, as an array of integers. See also -------- argrelextrema, argrelmax Notes ----- This function uses `argrelextrema` with np.less as comparator.
[ "Calculate", "the", "relative", "minima", "of", "data", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/peaks.py#L75-L110
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/peaks.py
argrelmax
def argrelmax(data, axis=0, order=1, mode='clip'): """ Calculate the relative maxima of `data`. .. versionadded:: 0.11.0 Parameters ---------- data : ndarray Array in which to find the relative maxima. axis : int, optional Axis over which to select from `data`. Default is 0. order : int, optional How many points on each side to use for the comparison to consider ``comparator(n, n+x)`` to be True. mode : str, optional How the edges of the vector are treated. Available options are 'wrap' (wrap around) or 'clip' (treat overflow as the same as the last (or first) element). Default 'clip'. See `numpy.take`. Returns ------- extrema : ndarray Indices of the maxima, as an array of integers. See also -------- argrelextrema, argrelmin Notes ----- This function uses `argrelextrema` with np.greater as comparator. """ return argrelextrema(data, np.greater, axis, order, mode)
python
def argrelmax(data, axis=0, order=1, mode='clip'): """ Calculate the relative maxima of `data`. .. versionadded:: 0.11.0 Parameters ---------- data : ndarray Array in which to find the relative maxima. axis : int, optional Axis over which to select from `data`. Default is 0. order : int, optional How many points on each side to use for the comparison to consider ``comparator(n, n+x)`` to be True. mode : str, optional How the edges of the vector are treated. Available options are 'wrap' (wrap around) or 'clip' (treat overflow as the same as the last (or first) element). Default 'clip'. See `numpy.take`. Returns ------- extrema : ndarray Indices of the maxima, as an array of integers. See also -------- argrelextrema, argrelmin Notes ----- This function uses `argrelextrema` with np.greater as comparator. """ return argrelextrema(data, np.greater, axis, order, mode)
[ "def", "argrelmax", "(", "data", ",", "axis", "=", "0", ",", "order", "=", "1", ",", "mode", "=", "'clip'", ")", ":", "return", "argrelextrema", "(", "data", ",", "np", ".", "greater", ",", "axis", ",", "order", ",", "mode", ")" ]
Calculate the relative maxima of `data`. .. versionadded:: 0.11.0 Parameters ---------- data : ndarray Array in which to find the relative maxima. axis : int, optional Axis over which to select from `data`. Default is 0. order : int, optional How many points on each side to use for the comparison to consider ``comparator(n, n+x)`` to be True. mode : str, optional How the edges of the vector are treated. Available options are 'wrap' (wrap around) or 'clip' (treat overflow as the same as the last (or first) element). Default 'clip'. See `numpy.take`. Returns ------- extrema : ndarray Indices of the maxima, as an array of integers. See also -------- argrelextrema, argrelmin Notes ----- This function uses `argrelextrema` with np.greater as comparator.
[ "Calculate", "the", "relative", "maxima", "of", "data", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/peaks.py#L113-L148
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/peaks.py
peaks
def peaks(signal, tol=None): """ This function detects all the peaks of a signal and returns those time positions. To reduce the amount of peaks detected, a threshold is introduced so only the peaks above that value are considered. Parameters ---------- x: array-like the signal with the peaks to detect. tol: int the threshold used to limit the peak detection. in case none is given, the value used is the minimum of the signal (detection of peaks in all signal) Returns ------- peaks: array-like the time sample where the peak occurs. Example ------- >>> peaks([1,2,4,3,2,5,7,7,4,9,2]) array([2, 9]) >>> peaks([1,2,-4,-3,-5,4,5]) array([1, 3]) >>> peaks([1,-4,-3,4,5],0) array([], dtype=int32) """ if (tol is None): tol = min(signal) pks = argrelmax(clip(signal, tol, signal.max())) return pks[0]
python
def peaks(signal, tol=None): """ This function detects all the peaks of a signal and returns those time positions. To reduce the amount of peaks detected, a threshold is introduced so only the peaks above that value are considered. Parameters ---------- x: array-like the signal with the peaks to detect. tol: int the threshold used to limit the peak detection. in case none is given, the value used is the minimum of the signal (detection of peaks in all signal) Returns ------- peaks: array-like the time sample where the peak occurs. Example ------- >>> peaks([1,2,4,3,2,5,7,7,4,9,2]) array([2, 9]) >>> peaks([1,2,-4,-3,-5,4,5]) array([1, 3]) >>> peaks([1,-4,-3,4,5],0) array([], dtype=int32) """ if (tol is None): tol = min(signal) pks = argrelmax(clip(signal, tol, signal.max())) return pks[0]
[ "def", "peaks", "(", "signal", ",", "tol", "=", "None", ")", ":", "if", "(", "tol", "is", "None", ")", ":", "tol", "=", "min", "(", "signal", ")", "pks", "=", "argrelmax", "(", "clip", "(", "signal", ",", "tol", ",", "signal", ".", "max", "(", ")", ")", ")", "return", "pks", "[", "0", "]" ]
This function detects all the peaks of a signal and returns those time positions. To reduce the amount of peaks detected, a threshold is introduced so only the peaks above that value are considered. Parameters ---------- x: array-like the signal with the peaks to detect. tol: int the threshold used to limit the peak detection. in case none is given, the value used is the minimum of the signal (detection of peaks in all signal) Returns ------- peaks: array-like the time sample where the peak occurs. Example ------- >>> peaks([1,2,4,3,2,5,7,7,4,9,2]) array([2, 9]) >>> peaks([1,2,-4,-3,-5,4,5]) array([1, 3]) >>> peaks([1,-4,-3,4,5],0) array([], dtype=int32)
[ "This", "function", "detects", "all", "the", "peaks", "of", "a", "signal", "and", "returns", "those", "time", "positions", ".", "To", "reduce", "the", "amount", "of", "peaks", "detected", "a", "threshold", "is", "introduced", "so", "only", "the", "peaks", "above", "that", "value", "are", "considered", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/peaks.py#L197-L229
train
KE-works/pykechain
pykechain/helpers.py
get_project
def get_project(url=None, username=None, password=None, token=None, scope=None, scope_id=None, env_filename=None, status=ScopeStatus.ACTIVE): """ Retrieve and return the KE-chain project to be used throughout an app. This helper is made to bootstrap a pykechain enabled python script or an jupyter notebook with the correct project (technically this is a `pykechain.models.Scope` model). When no parameters are passed in this function, it will try to retrieve `url`, `token`, `scope` (or `scope_id`) from the environment variables or a neatly placed '.env' file. when the environment variable KECHAIN_FORCE_ENV_USE is set to true, (or ok, on, 1, yes) then the use of environmentvariables for the retrieval of the scope are enforced. The following environment variables can be set:: KECHAIN_URL - full url of KE-chain where to connect to eg: 'https://<some>.ke-chain.com' KECHAIN_TOKEN - authentication token for the KE-chain user provided from KE-chain user account control KECHAIN_USERNAME - the username for the credentials KECHAIN_PASSWORD - the password for the credentials KECHAIN_SCOPE - the name of the project / scope. Should be unique, otherwise use scope_id KECHAIN_SCOPE_ID - the UUID of the project / scope. KECHAIN_FORCE_ENV_USE - set to 'true', '1', 'ok', or 'yes' to always use the environment variables. KECHAIN_SCOPE_STATUS - the status of the Scope to retrieve, defaults to None to retrieve all scopes .. versionadded:: 1.12 :param url: (optional) url of KE-chain :type url: basestring or None :param username: (optional) username for authentication (together with password, if not token) :type username: basestring or None :param password: (optional) password for username/password authentication (together with username, if not token) :type password: basestring or None :param token: (optional) token for authentication (if not username/password) :type token: basestring or None :param scope: (optional) name of the scope to retrieve from KE-chain. :type scope: basestring or None :param scope_id: (optional) UUID of the scope to retrieve and return from KE-chain :type scope_id: basestring or None :param env_filename: (optional) name of the environment filename to bootstrap the Client :type env_filename: basestring or None :param status: (optional) status of the scope to retrieve, defaults to :attr:`enums.Scopestatus.ACTIVE` :type status: basestring or None :return: pykechain.models.Scope :raises NotFoundError: If the scope could not be found :raises ClientError: If the client connection to KE-chain was unsuccessful :raises APIError: If other Errors occur to retrieve the scope Example ------- An example with parameters provided >>> from pykechain import get_project >>> project = get_project(url='http://localhost:8000', ... username='foo', password='bar', scope='1st!') >>> print(project.name) 1st An example with a .env file on disk:: # This is an .env file on disk. KECHAIN_TOKEN=bd9377793f7e74a29dbb11fce969 KECHAIN_URL=http://localhost:8080 KECHAIN_SCOPE_ID=c9f0-228e-4d3a-9dc0-ec5a75d7 >>> project = get_project(env_filename='/path/to/.env') >>> project.id c9f0-228e-4d3a-9dc0-ec5a75d7 An example for get_project that will extract all from the environment variables >>> env_vars = os.environ >>> env_vars.get('KECHAIN_TOKEN') bd9377793f7e74a29dbb11fce969 >>> env_vars.get('KECHAIN_URL') http://localhost:8080 >>> env_vars.get('KECHAIN_SCOPE') Bike Project >>> project = get_project() >>> project.name Bike Project """ if env.bool(kecenv.KECHAIN_FORCE_ENV_USE, default=False): if not os.getenv(kecenv.KECHAIN_URL): raise ClientError( "Error: KECHAIN_URL should be provided as environment variable (use of env vars is enforced)") if not (os.getenv(kecenv.KECHAIN_TOKEN) or (os.getenv(kecenv.KECHAIN_PASSWORD) and os.getenv(kecenv.KECHAIN_PASSWORD))): raise ClientError("Error: KECHAIN_TOKEN or KECHAIN_USERNAME and KECHAIN_PASSWORD should be provided as " "environment variable(s) (use of env vars is enforced)") if not (os.getenv(kecenv.KECHAIN_SCOPE) or os.getenv(kecenv.KECHAIN_SCOPE_ID)): raise ClientError("Error: KECHAIN_SCOPE or KECHAIN_SCOPE_ID should be provided as environment variable " "(use of env vars is enforced)") if env.bool(kecenv.KECHAIN_FORCE_ENV_USE, default=False) or \ not any((url, username, password, token, scope, scope_id)): client = Client.from_env(env_filename=env_filename) scope_id = env(kecenv.KECHAIN_SCOPE_ID, default=None) scope = env(kecenv.KECHAIN_SCOPE, default=None) status = env(kecenv.KECHAIN_SCOPE_STATUS, default=None) elif (url and ((username and password) or (token)) and (scope or scope_id)) and \ not env.bool(kecenv.KECHAIN_FORCE_ENV_USE, default=False): client = Client(url=url) client.login(username=username, password=password, token=token) else: raise ClientError("Error: insufficient arguments to connect to KE-chain. " "See documentation of `pykechain.get_project()`") if scope_id: return client.scope(pk=scope_id, status=status) else: return client.scope(name=scope, status=status)
python
def get_project(url=None, username=None, password=None, token=None, scope=None, scope_id=None, env_filename=None, status=ScopeStatus.ACTIVE): """ Retrieve and return the KE-chain project to be used throughout an app. This helper is made to bootstrap a pykechain enabled python script or an jupyter notebook with the correct project (technically this is a `pykechain.models.Scope` model). When no parameters are passed in this function, it will try to retrieve `url`, `token`, `scope` (or `scope_id`) from the environment variables or a neatly placed '.env' file. when the environment variable KECHAIN_FORCE_ENV_USE is set to true, (or ok, on, 1, yes) then the use of environmentvariables for the retrieval of the scope are enforced. The following environment variables can be set:: KECHAIN_URL - full url of KE-chain where to connect to eg: 'https://<some>.ke-chain.com' KECHAIN_TOKEN - authentication token for the KE-chain user provided from KE-chain user account control KECHAIN_USERNAME - the username for the credentials KECHAIN_PASSWORD - the password for the credentials KECHAIN_SCOPE - the name of the project / scope. Should be unique, otherwise use scope_id KECHAIN_SCOPE_ID - the UUID of the project / scope. KECHAIN_FORCE_ENV_USE - set to 'true', '1', 'ok', or 'yes' to always use the environment variables. KECHAIN_SCOPE_STATUS - the status of the Scope to retrieve, defaults to None to retrieve all scopes .. versionadded:: 1.12 :param url: (optional) url of KE-chain :type url: basestring or None :param username: (optional) username for authentication (together with password, if not token) :type username: basestring or None :param password: (optional) password for username/password authentication (together with username, if not token) :type password: basestring or None :param token: (optional) token for authentication (if not username/password) :type token: basestring or None :param scope: (optional) name of the scope to retrieve from KE-chain. :type scope: basestring or None :param scope_id: (optional) UUID of the scope to retrieve and return from KE-chain :type scope_id: basestring or None :param env_filename: (optional) name of the environment filename to bootstrap the Client :type env_filename: basestring or None :param status: (optional) status of the scope to retrieve, defaults to :attr:`enums.Scopestatus.ACTIVE` :type status: basestring or None :return: pykechain.models.Scope :raises NotFoundError: If the scope could not be found :raises ClientError: If the client connection to KE-chain was unsuccessful :raises APIError: If other Errors occur to retrieve the scope Example ------- An example with parameters provided >>> from pykechain import get_project >>> project = get_project(url='http://localhost:8000', ... username='foo', password='bar', scope='1st!') >>> print(project.name) 1st An example with a .env file on disk:: # This is an .env file on disk. KECHAIN_TOKEN=bd9377793f7e74a29dbb11fce969 KECHAIN_URL=http://localhost:8080 KECHAIN_SCOPE_ID=c9f0-228e-4d3a-9dc0-ec5a75d7 >>> project = get_project(env_filename='/path/to/.env') >>> project.id c9f0-228e-4d3a-9dc0-ec5a75d7 An example for get_project that will extract all from the environment variables >>> env_vars = os.environ >>> env_vars.get('KECHAIN_TOKEN') bd9377793f7e74a29dbb11fce969 >>> env_vars.get('KECHAIN_URL') http://localhost:8080 >>> env_vars.get('KECHAIN_SCOPE') Bike Project >>> project = get_project() >>> project.name Bike Project """ if env.bool(kecenv.KECHAIN_FORCE_ENV_USE, default=False): if not os.getenv(kecenv.KECHAIN_URL): raise ClientError( "Error: KECHAIN_URL should be provided as environment variable (use of env vars is enforced)") if not (os.getenv(kecenv.KECHAIN_TOKEN) or (os.getenv(kecenv.KECHAIN_PASSWORD) and os.getenv(kecenv.KECHAIN_PASSWORD))): raise ClientError("Error: KECHAIN_TOKEN or KECHAIN_USERNAME and KECHAIN_PASSWORD should be provided as " "environment variable(s) (use of env vars is enforced)") if not (os.getenv(kecenv.KECHAIN_SCOPE) or os.getenv(kecenv.KECHAIN_SCOPE_ID)): raise ClientError("Error: KECHAIN_SCOPE or KECHAIN_SCOPE_ID should be provided as environment variable " "(use of env vars is enforced)") if env.bool(kecenv.KECHAIN_FORCE_ENV_USE, default=False) or \ not any((url, username, password, token, scope, scope_id)): client = Client.from_env(env_filename=env_filename) scope_id = env(kecenv.KECHAIN_SCOPE_ID, default=None) scope = env(kecenv.KECHAIN_SCOPE, default=None) status = env(kecenv.KECHAIN_SCOPE_STATUS, default=None) elif (url and ((username and password) or (token)) and (scope or scope_id)) and \ not env.bool(kecenv.KECHAIN_FORCE_ENV_USE, default=False): client = Client(url=url) client.login(username=username, password=password, token=token) else: raise ClientError("Error: insufficient arguments to connect to KE-chain. " "See documentation of `pykechain.get_project()`") if scope_id: return client.scope(pk=scope_id, status=status) else: return client.scope(name=scope, status=status)
[ "def", "get_project", "(", "url", "=", "None", ",", "username", "=", "None", ",", "password", "=", "None", ",", "token", "=", "None", ",", "scope", "=", "None", ",", "scope_id", "=", "None", ",", "env_filename", "=", "None", ",", "status", "=", "ScopeStatus", ".", "ACTIVE", ")", ":", "if", "env", ".", "bool", "(", "kecenv", ".", "KECHAIN_FORCE_ENV_USE", ",", "default", "=", "False", ")", ":", "if", "not", "os", ".", "getenv", "(", "kecenv", ".", "KECHAIN_URL", ")", ":", "raise", "ClientError", "(", "\"Error: KECHAIN_URL should be provided as environment variable (use of env vars is enforced)\"", ")", "if", "not", "(", "os", ".", "getenv", "(", "kecenv", ".", "KECHAIN_TOKEN", ")", "or", "(", "os", ".", "getenv", "(", "kecenv", ".", "KECHAIN_PASSWORD", ")", "and", "os", ".", "getenv", "(", "kecenv", ".", "KECHAIN_PASSWORD", ")", ")", ")", ":", "raise", "ClientError", "(", "\"Error: KECHAIN_TOKEN or KECHAIN_USERNAME and KECHAIN_PASSWORD should be provided as \"", "\"environment variable(s) (use of env vars is enforced)\"", ")", "if", "not", "(", "os", ".", "getenv", "(", "kecenv", ".", "KECHAIN_SCOPE", ")", "or", "os", ".", "getenv", "(", "kecenv", ".", "KECHAIN_SCOPE_ID", ")", ")", ":", "raise", "ClientError", "(", "\"Error: KECHAIN_SCOPE or KECHAIN_SCOPE_ID should be provided as environment variable \"", "\"(use of env vars is enforced)\"", ")", "if", "env", ".", "bool", "(", "kecenv", ".", "KECHAIN_FORCE_ENV_USE", ",", "default", "=", "False", ")", "or", "not", "any", "(", "(", "url", ",", "username", ",", "password", ",", "token", ",", "scope", ",", "scope_id", ")", ")", ":", "client", "=", "Client", ".", "from_env", "(", "env_filename", "=", "env_filename", ")", "scope_id", "=", "env", "(", "kecenv", ".", "KECHAIN_SCOPE_ID", ",", "default", "=", "None", ")", "scope", "=", "env", "(", "kecenv", ".", "KECHAIN_SCOPE", ",", "default", "=", "None", ")", "status", "=", "env", "(", "kecenv", ".", "KECHAIN_SCOPE_STATUS", ",", "default", "=", "None", ")", "elif", "(", "url", "and", "(", "(", "username", "and", "password", ")", "or", "(", "token", ")", ")", "and", "(", "scope", "or", "scope_id", ")", ")", "and", "not", "env", ".", "bool", "(", "kecenv", ".", "KECHAIN_FORCE_ENV_USE", ",", "default", "=", "False", ")", ":", "client", "=", "Client", "(", "url", "=", "url", ")", "client", ".", "login", "(", "username", "=", "username", ",", "password", "=", "password", ",", "token", "=", "token", ")", "else", ":", "raise", "ClientError", "(", "\"Error: insufficient arguments to connect to KE-chain. \"", "\"See documentation of `pykechain.get_project()`\"", ")", "if", "scope_id", ":", "return", "client", ".", "scope", "(", "pk", "=", "scope_id", ",", "status", "=", "status", ")", "else", ":", "return", "client", ".", "scope", "(", "name", "=", "scope", ",", "status", "=", "status", ")" ]
Retrieve and return the KE-chain project to be used throughout an app. This helper is made to bootstrap a pykechain enabled python script or an jupyter notebook with the correct project (technically this is a `pykechain.models.Scope` model). When no parameters are passed in this function, it will try to retrieve `url`, `token`, `scope` (or `scope_id`) from the environment variables or a neatly placed '.env' file. when the environment variable KECHAIN_FORCE_ENV_USE is set to true, (or ok, on, 1, yes) then the use of environmentvariables for the retrieval of the scope are enforced. The following environment variables can be set:: KECHAIN_URL - full url of KE-chain where to connect to eg: 'https://<some>.ke-chain.com' KECHAIN_TOKEN - authentication token for the KE-chain user provided from KE-chain user account control KECHAIN_USERNAME - the username for the credentials KECHAIN_PASSWORD - the password for the credentials KECHAIN_SCOPE - the name of the project / scope. Should be unique, otherwise use scope_id KECHAIN_SCOPE_ID - the UUID of the project / scope. KECHAIN_FORCE_ENV_USE - set to 'true', '1', 'ok', or 'yes' to always use the environment variables. KECHAIN_SCOPE_STATUS - the status of the Scope to retrieve, defaults to None to retrieve all scopes .. versionadded:: 1.12 :param url: (optional) url of KE-chain :type url: basestring or None :param username: (optional) username for authentication (together with password, if not token) :type username: basestring or None :param password: (optional) password for username/password authentication (together with username, if not token) :type password: basestring or None :param token: (optional) token for authentication (if not username/password) :type token: basestring or None :param scope: (optional) name of the scope to retrieve from KE-chain. :type scope: basestring or None :param scope_id: (optional) UUID of the scope to retrieve and return from KE-chain :type scope_id: basestring or None :param env_filename: (optional) name of the environment filename to bootstrap the Client :type env_filename: basestring or None :param status: (optional) status of the scope to retrieve, defaults to :attr:`enums.Scopestatus.ACTIVE` :type status: basestring or None :return: pykechain.models.Scope :raises NotFoundError: If the scope could not be found :raises ClientError: If the client connection to KE-chain was unsuccessful :raises APIError: If other Errors occur to retrieve the scope Example ------- An example with parameters provided >>> from pykechain import get_project >>> project = get_project(url='http://localhost:8000', ... username='foo', password='bar', scope='1st!') >>> print(project.name) 1st An example with a .env file on disk:: # This is an .env file on disk. KECHAIN_TOKEN=bd9377793f7e74a29dbb11fce969 KECHAIN_URL=http://localhost:8080 KECHAIN_SCOPE_ID=c9f0-228e-4d3a-9dc0-ec5a75d7 >>> project = get_project(env_filename='/path/to/.env') >>> project.id c9f0-228e-4d3a-9dc0-ec5a75d7 An example for get_project that will extract all from the environment variables >>> env_vars = os.environ >>> env_vars.get('KECHAIN_TOKEN') bd9377793f7e74a29dbb11fce969 >>> env_vars.get('KECHAIN_URL') http://localhost:8080 >>> env_vars.get('KECHAIN_SCOPE') Bike Project >>> project = get_project() >>> project.name Bike Project
[ "Retrieve", "and", "return", "the", "KE", "-", "chain", "project", "to", "be", "used", "throughout", "an", "app", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/helpers.py#L9-L118
train
mikeboers/MultiMap
multimap.py
MultiMap._rebuild_key_ids
def _rebuild_key_ids(self): """Rebuild the internal key to index mapping.""" self._key_ids = collections.defaultdict(list) for i, x in enumerate(self._pairs): self._key_ids[x[0]].append(i)
python
def _rebuild_key_ids(self): """Rebuild the internal key to index mapping.""" self._key_ids = collections.defaultdict(list) for i, x in enumerate(self._pairs): self._key_ids[x[0]].append(i)
[ "def", "_rebuild_key_ids", "(", "self", ")", ":", "self", ".", "_key_ids", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "i", ",", "x", "in", "enumerate", "(", "self", ".", "_pairs", ")", ":", "self", ".", "_key_ids", "[", "x", "[", "0", "]", "]", ".", "append", "(", "i", ")" ]
Rebuild the internal key to index mapping.
[ "Rebuild", "the", "internal", "key", "to", "index", "mapping", "." ]
0251e5d5df693cc247b4ac5b95adfdd10e3bec04
https://github.com/mikeboers/MultiMap/blob/0251e5d5df693cc247b4ac5b95adfdd10e3bec04/multimap.py#L71-L75
train
mikeboers/MultiMap
multimap.py
MultiMap.iteritems
def iteritems(self): """Iterator across all the non-duplicate keys and their values. Only yields the first key of duplicates. """ keys_yielded = set() for k, v in self._pairs: if k not in keys_yielded: keys_yielded.add(k) yield k, v
python
def iteritems(self): """Iterator across all the non-duplicate keys and their values. Only yields the first key of duplicates. """ keys_yielded = set() for k, v in self._pairs: if k not in keys_yielded: keys_yielded.add(k) yield k, v
[ "def", "iteritems", "(", "self", ")", ":", "keys_yielded", "=", "set", "(", ")", "for", "k", ",", "v", "in", "self", ".", "_pairs", ":", "if", "k", "not", "in", "keys_yielded", ":", "keys_yielded", ".", "add", "(", "k", ")", "yield", "k", ",", "v" ]
Iterator across all the non-duplicate keys and their values. Only yields the first key of duplicates.
[ "Iterator", "across", "all", "the", "non", "-", "duplicate", "keys", "and", "their", "values", ".", "Only", "yields", "the", "first", "key", "of", "duplicates", "." ]
0251e5d5df693cc247b4ac5b95adfdd10e3bec04
https://github.com/mikeboers/MultiMap/blob/0251e5d5df693cc247b4ac5b95adfdd10e3bec04/multimap.py#L206-L216
train
KE-works/pykechain
pykechain/models/team.py
Team._update
def _update(self, resource, update_dict=None, params=None, **kwargs): """Update the object.""" url = self._client._build_url(resource, **kwargs) response = self._client._request('PUT', url, json=update_dict, params=params) if response.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not update {} ({})".format(self.__class__.__name__, response.json().get('results'))) else: self.refresh()
python
def _update(self, resource, update_dict=None, params=None, **kwargs): """Update the object.""" url = self._client._build_url(resource, **kwargs) response = self._client._request('PUT', url, json=update_dict, params=params) if response.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not update {} ({})".format(self.__class__.__name__, response.json().get('results'))) else: self.refresh()
[ "def", "_update", "(", "self", ",", "resource", ",", "update_dict", "=", "None", ",", "params", "=", "None", ",", "*", "*", "kwargs", ")", ":", "url", "=", "self", ".", "_client", ".", "_build_url", "(", "resource", ",", "*", "*", "kwargs", ")", "response", "=", "self", ".", "_client", ".", "_request", "(", "'PUT'", ",", "url", ",", "json", "=", "update_dict", ",", "params", "=", "params", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not update {} ({})\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "response", ".", "json", "(", ")", ".", "get", "(", "'results'", ")", ")", ")", "else", ":", "self", ".", "refresh", "(", ")" ]
Update the object.
[ "Update", "the", "object", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/team.py#L26-L34
train
KE-works/pykechain
pykechain/models/team.py
Team.members
def members(self, role=None): """Members of the team. You may provide the role in the team, to retrieve only the teammmber with that role. Normally there is a single owner, that has administration rights of the team. Normal team members do not have any rights to administer the team itself such as altering the teamname, team image and team members. Administrators do have the right to administer the the team members. :param role: (optional) member belonging to a role :class:`pykechain.enums.TeamRoles` to return. :type role: basestring or None :raises IllegalArgumentError: when providing incorrect roles :return: list of dictionaries with members (pk, username, role, email) Example ------- >>> my_team = client.team(name='My own team') >>> my_team.members() [{"pk":1, "username"="first user", "role"="OWNER", "email":"[email protected]"}, ...] """ if role and role not in TeamRoles.values(): raise IllegalArgumentError("role should be one of `TeamRoles` {}, got '{}'".format(TeamRoles.values(), role)) member_list = self._json_data.get('members') if role: return [teammember for teammember in member_list if teammember.get('role') == role] else: return member_list
python
def members(self, role=None): """Members of the team. You may provide the role in the team, to retrieve only the teammmber with that role. Normally there is a single owner, that has administration rights of the team. Normal team members do not have any rights to administer the team itself such as altering the teamname, team image and team members. Administrators do have the right to administer the the team members. :param role: (optional) member belonging to a role :class:`pykechain.enums.TeamRoles` to return. :type role: basestring or None :raises IllegalArgumentError: when providing incorrect roles :return: list of dictionaries with members (pk, username, role, email) Example ------- >>> my_team = client.team(name='My own team') >>> my_team.members() [{"pk":1, "username"="first user", "role"="OWNER", "email":"[email protected]"}, ...] """ if role and role not in TeamRoles.values(): raise IllegalArgumentError("role should be one of `TeamRoles` {}, got '{}'".format(TeamRoles.values(), role)) member_list = self._json_data.get('members') if role: return [teammember for teammember in member_list if teammember.get('role') == role] else: return member_list
[ "def", "members", "(", "self", ",", "role", "=", "None", ")", ":", "if", "role", "and", "role", "not", "in", "TeamRoles", ".", "values", "(", ")", ":", "raise", "IllegalArgumentError", "(", "\"role should be one of `TeamRoles` {}, got '{}'\"", ".", "format", "(", "TeamRoles", ".", "values", "(", ")", ",", "role", ")", ")", "member_list", "=", "self", ".", "_json_data", ".", "get", "(", "'members'", ")", "if", "role", ":", "return", "[", "teammember", "for", "teammember", "in", "member_list", "if", "teammember", ".", "get", "(", "'role'", ")", "==", "role", "]", "else", ":", "return", "member_list" ]
Members of the team. You may provide the role in the team, to retrieve only the teammmber with that role. Normally there is a single owner, that has administration rights of the team. Normal team members do not have any rights to administer the team itself such as altering the teamname, team image and team members. Administrators do have the right to administer the the team members. :param role: (optional) member belonging to a role :class:`pykechain.enums.TeamRoles` to return. :type role: basestring or None :raises IllegalArgumentError: when providing incorrect roles :return: list of dictionaries with members (pk, username, role, email) Example ------- >>> my_team = client.team(name='My own team') >>> my_team.members() [{"pk":1, "username"="first user", "role"="OWNER", "email":"[email protected]"}, ...]
[ "Members", "of", "the", "team", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/team.py#L36-L63
train
KE-works/pykechain
pykechain/models/team.py
Team.scopes
def scopes(self, **kwargs): """Scopes associated to the team.""" return self._client.scopes(team=self.id, **kwargs)
python
def scopes(self, **kwargs): """Scopes associated to the team.""" return self._client.scopes(team=self.id, **kwargs)
[ "def", "scopes", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_client", ".", "scopes", "(", "team", "=", "self", ".", "id", ",", "*", "*", "kwargs", ")" ]
Scopes associated to the team.
[ "Scopes", "associated", "to", "the", "team", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/team.py#L136-L138
train
samuelcolvin/grablib
grablib/build.py
insert_hash
def insert_hash(path: Path, content: Union[str, bytes], *, hash_length=7, hash_algorithm=hashlib.md5): """ Insert a hash based on the content into the path after the first dot. hash_length 7 matches git commit short references """ if isinstance(content, str): content = content.encode() hash_ = hash_algorithm(content).hexdigest()[:hash_length] if '.' in path.name: new_name = re.sub(r'\.', f'.{hash_}.', path.name, count=1) else: new_name = f'{path.name}.{hash_}' return path.with_name(new_name)
python
def insert_hash(path: Path, content: Union[str, bytes], *, hash_length=7, hash_algorithm=hashlib.md5): """ Insert a hash based on the content into the path after the first dot. hash_length 7 matches git commit short references """ if isinstance(content, str): content = content.encode() hash_ = hash_algorithm(content).hexdigest()[:hash_length] if '.' in path.name: new_name = re.sub(r'\.', f'.{hash_}.', path.name, count=1) else: new_name = f'{path.name}.{hash_}' return path.with_name(new_name)
[ "def", "insert_hash", "(", "path", ":", "Path", ",", "content", ":", "Union", "[", "str", ",", "bytes", "]", ",", "*", ",", "hash_length", "=", "7", ",", "hash_algorithm", "=", "hashlib", ".", "md5", ")", ":", "if", "isinstance", "(", "content", ",", "str", ")", ":", "content", "=", "content", ".", "encode", "(", ")", "hash_", "=", "hash_algorithm", "(", "content", ")", ".", "hexdigest", "(", ")", "[", ":", "hash_length", "]", "if", "'.'", "in", "path", ".", "name", ":", "new_name", "=", "re", ".", "sub", "(", "r'\\.'", ",", "f'.{hash_}.'", ",", "path", ".", "name", ",", "count", "=", "1", ")", "else", ":", "new_name", "=", "f'{path.name}.{hash_}'", "return", "path", ".", "with_name", "(", "new_name", ")" ]
Insert a hash based on the content into the path after the first dot. hash_length 7 matches git commit short references
[ "Insert", "a", "hash", "based", "on", "the", "content", "into", "the", "path", "after", "the", "first", "dot", "." ]
2fca8a3950f29fb2a97a7bd75c0839060a91cedf
https://github.com/samuelcolvin/grablib/blob/2fca8a3950f29fb2a97a7bd75c0839060a91cedf/grablib/build.py#L20-L33
train
KE-works/pykechain
pykechain/enums.py
Enum.options
def options(cls): """Provide a sorted list of options.""" return sorted((value, name) for (name, value) in cls.__dict__.items() if not name.startswith('__'))
python
def options(cls): """Provide a sorted list of options.""" return sorted((value, name) for (name, value) in cls.__dict__.items() if not name.startswith('__'))
[ "def", "options", "(", "cls", ")", ":", "return", "sorted", "(", "(", "value", ",", "name", ")", "for", "(", "name", ",", "value", ")", "in", "cls", ".", "__dict__", ".", "items", "(", ")", "if", "not", "name", ".", "startswith", "(", "'__'", ")", ")" ]
Provide a sorted list of options.
[ "Provide", "a", "sorted", "list", "of", "options", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/enums.py#L14-L16
train
mozilla/FoxPuppet
foxpuppet/windows/browser/window.py
BrowserWindow.navbar
def navbar(self): """Provide access to the Navigation Bar. Returns: :py:class:`NavBar`: FoxPuppet NavBar object. """ window = BaseWindow(self.selenium, self.selenium.current_window_handle) with self.selenium.context(self.selenium.CONTEXT_CHROME): el = self.selenium.find_element(*self._nav_bar_locator) return NavBar(window, el)
python
def navbar(self): """Provide access to the Navigation Bar. Returns: :py:class:`NavBar`: FoxPuppet NavBar object. """ window = BaseWindow(self.selenium, self.selenium.current_window_handle) with self.selenium.context(self.selenium.CONTEXT_CHROME): el = self.selenium.find_element(*self._nav_bar_locator) return NavBar(window, el)
[ "def", "navbar", "(", "self", ")", ":", "window", "=", "BaseWindow", "(", "self", ".", "selenium", ",", "self", ".", "selenium", ".", "current_window_handle", ")", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "el", "=", "self", ".", "selenium", ".", "find_element", "(", "*", "self", ".", "_nav_bar_locator", ")", "return", "NavBar", "(", "window", ",", "el", ")" ]
Provide access to the Navigation Bar. Returns: :py:class:`NavBar`: FoxPuppet NavBar object.
[ "Provide", "access", "to", "the", "Navigation", "Bar", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L30-L40
train
mozilla/FoxPuppet
foxpuppet/windows/browser/window.py
BrowserWindow.notification
def notification(self): """Provide access to the currently displayed notification. Returns: :py:class:`BaseNotification`: FoxPuppet BaseNotification object. """ with self.selenium.context(self.selenium.CONTEXT_CHROME): try: root = self.selenium.find_element(*self._notification_locator) return BaseNotification.create(self, root) except NoSuchElementException: pass try: notifications = self.selenium.find_elements( *self._app_menu_notification_locator ) root = next(n for n in notifications if n.is_displayed()) return BaseNotification.create(self, root) except StopIteration: pass return None
python
def notification(self): """Provide access to the currently displayed notification. Returns: :py:class:`BaseNotification`: FoxPuppet BaseNotification object. """ with self.selenium.context(self.selenium.CONTEXT_CHROME): try: root = self.selenium.find_element(*self._notification_locator) return BaseNotification.create(self, root) except NoSuchElementException: pass try: notifications = self.selenium.find_elements( *self._app_menu_notification_locator ) root = next(n for n in notifications if n.is_displayed()) return BaseNotification.create(self, root) except StopIteration: pass return None
[ "def", "notification", "(", "self", ")", ":", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "try", ":", "root", "=", "self", ".", "selenium", ".", "find_element", "(", "*", "self", ".", "_notification_locator", ")", "return", "BaseNotification", ".", "create", "(", "self", ",", "root", ")", "except", "NoSuchElementException", ":", "pass", "try", ":", "notifications", "=", "self", ".", "selenium", ".", "find_elements", "(", "*", "self", ".", "_app_menu_notification_locator", ")", "root", "=", "next", "(", "n", "for", "n", "in", "notifications", "if", "n", ".", "is_displayed", "(", ")", ")", "return", "BaseNotification", ".", "create", "(", "self", ",", "root", ")", "except", "StopIteration", ":", "pass", "return", "None" ]
Provide access to the currently displayed notification. Returns: :py:class:`BaseNotification`: FoxPuppet BaseNotification object.
[ "Provide", "access", "to", "the", "currently", "displayed", "notification", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L43-L64
train
mozilla/FoxPuppet
foxpuppet/windows/browser/window.py
BrowserWindow.wait_for_notification
def wait_for_notification(self, notification_class=BaseNotification): """Wait for the specified notification to be displayed. Args: notification_class (:py:class:`BaseNotification`, optional): The notification class to wait for. If `None` is specified it will wait for any notification to be closed. Defaults to `BaseNotification`. Returns: :py:class:`BaseNotification`: Firefox notification. """ if notification_class: if notification_class is BaseNotification: message = "No notification was shown." else: message = "{0} was not shown.".format(notification_class.__name__) self.wait.until( lambda _: isinstance(self.notification, notification_class), message=message, ) return self.notification else: self.wait.until( lambda _: self.notification is None, message="Unexpected notification shown.", )
python
def wait_for_notification(self, notification_class=BaseNotification): """Wait for the specified notification to be displayed. Args: notification_class (:py:class:`BaseNotification`, optional): The notification class to wait for. If `None` is specified it will wait for any notification to be closed. Defaults to `BaseNotification`. Returns: :py:class:`BaseNotification`: Firefox notification. """ if notification_class: if notification_class is BaseNotification: message = "No notification was shown." else: message = "{0} was not shown.".format(notification_class.__name__) self.wait.until( lambda _: isinstance(self.notification, notification_class), message=message, ) return self.notification else: self.wait.until( lambda _: self.notification is None, message="Unexpected notification shown.", )
[ "def", "wait_for_notification", "(", "self", ",", "notification_class", "=", "BaseNotification", ")", ":", "if", "notification_class", ":", "if", "notification_class", "is", "BaseNotification", ":", "message", "=", "\"No notification was shown.\"", "else", ":", "message", "=", "\"{0} was not shown.\"", ".", "format", "(", "notification_class", ".", "__name__", ")", "self", ".", "wait", ".", "until", "(", "lambda", "_", ":", "isinstance", "(", "self", ".", "notification", ",", "notification_class", ")", ",", "message", "=", "message", ",", ")", "return", "self", ".", "notification", "else", ":", "self", ".", "wait", ".", "until", "(", "lambda", "_", ":", "self", ".", "notification", "is", "None", ",", "message", "=", "\"Unexpected notification shown.\"", ",", ")" ]
Wait for the specified notification to be displayed. Args: notification_class (:py:class:`BaseNotification`, optional): The notification class to wait for. If `None` is specified it will wait for any notification to be closed. Defaults to `BaseNotification`. Returns: :py:class:`BaseNotification`: Firefox notification.
[ "Wait", "for", "the", "specified", "notification", "to", "be", "displayed", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L66-L93
train
mozilla/FoxPuppet
foxpuppet/windows/browser/window.py
BrowserWindow.open_window
def open_window(self, private=False): """Open a new browser window. Args: private (bool): Optional parameter to open a private browsing window. Defaults to False. Returns: :py:class:`BrowserWindow`: Opened window. """ handles_before = self.selenium.window_handles self.switch_to() with self.selenium.context(self.selenium.CONTEXT_CHROME): # Opens private or non-private window self.selenium.find_element(*self._file_menu_button_locator).click() if private: self.selenium.find_element( *self._file_menu_private_window_locator ).click() else: self.selenium.find_element( *self._file_menu_new_window_button_locator ).click() return self.wait.until( expected.new_browser_window_is_opened(self.selenium, handles_before), message="No new browser window opened", )
python
def open_window(self, private=False): """Open a new browser window. Args: private (bool): Optional parameter to open a private browsing window. Defaults to False. Returns: :py:class:`BrowserWindow`: Opened window. """ handles_before = self.selenium.window_handles self.switch_to() with self.selenium.context(self.selenium.CONTEXT_CHROME): # Opens private or non-private window self.selenium.find_element(*self._file_menu_button_locator).click() if private: self.selenium.find_element( *self._file_menu_private_window_locator ).click() else: self.selenium.find_element( *self._file_menu_new_window_button_locator ).click() return self.wait.until( expected.new_browser_window_is_opened(self.selenium, handles_before), message="No new browser window opened", )
[ "def", "open_window", "(", "self", ",", "private", "=", "False", ")", ":", "handles_before", "=", "self", ".", "selenium", ".", "window_handles", "self", ".", "switch_to", "(", ")", "with", "self", ".", "selenium", ".", "context", "(", "self", ".", "selenium", ".", "CONTEXT_CHROME", ")", ":", "# Opens private or non-private window", "self", ".", "selenium", ".", "find_element", "(", "*", "self", ".", "_file_menu_button_locator", ")", ".", "click", "(", ")", "if", "private", ":", "self", ".", "selenium", ".", "find_element", "(", "*", "self", ".", "_file_menu_private_window_locator", ")", ".", "click", "(", ")", "else", ":", "self", ".", "selenium", ".", "find_element", "(", "*", "self", ".", "_file_menu_new_window_button_locator", ")", ".", "click", "(", ")", "return", "self", ".", "wait", ".", "until", "(", "expected", ".", "new_browser_window_is_opened", "(", "self", ".", "selenium", ",", "handles_before", ")", ",", "message", "=", "\"No new browser window opened\"", ",", ")" ]
Open a new browser window. Args: private (bool): Optional parameter to open a private browsing window. Defaults to False. Returns: :py:class:`BrowserWindow`: Opened window.
[ "Open", "a", "new", "browser", "window", "." ]
6575eb4c72fd024c986b254e198c8b4e6f68cddd
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L115-L144
train
SuryaSankar/flask-sqlalchemy-booster
flask_sqlalchemy_booster/model_booster/dictizable_mixin.py
DictizableMixin.to_serializable_dict
def to_serializable_dict(self, attrs_to_serialize=None, rels_to_expand=None, rels_to_serialize=None, key_modifications=None): """ An alias for `todict` """ return self.todict( attrs_to_serialize=attrs_to_serialize, rels_to_expand=rels_to_expand, rels_to_serialize=rels_to_serialize, key_modifications=key_modifications)
python
def to_serializable_dict(self, attrs_to_serialize=None, rels_to_expand=None, rels_to_serialize=None, key_modifications=None): """ An alias for `todict` """ return self.todict( attrs_to_serialize=attrs_to_serialize, rels_to_expand=rels_to_expand, rels_to_serialize=rels_to_serialize, key_modifications=key_modifications)
[ "def", "to_serializable_dict", "(", "self", ",", "attrs_to_serialize", "=", "None", ",", "rels_to_expand", "=", "None", ",", "rels_to_serialize", "=", "None", ",", "key_modifications", "=", "None", ")", ":", "return", "self", ".", "todict", "(", "attrs_to_serialize", "=", "attrs_to_serialize", ",", "rels_to_expand", "=", "rels_to_expand", ",", "rels_to_serialize", "=", "rels_to_serialize", ",", "key_modifications", "=", "key_modifications", ")" ]
An alias for `todict`
[ "An", "alias", "for", "todict" ]
444048d167ab7718f758e943665ef32d101423a5
https://github.com/SuryaSankar/flask-sqlalchemy-booster/blob/444048d167ab7718f758e943665ef32d101423a5/flask_sqlalchemy_booster/model_booster/dictizable_mixin.py#L309-L319
train
SuryaSankar/flask-sqlalchemy-booster
flask_sqlalchemy_booster/model_booster/dictizable_mixin.py
DictizableMixin.serialize_attrs
def serialize_attrs(self, *args): """Converts and instance to a dictionary with only the specified attributes as keys Args: *args (list): The attributes to serialize Examples: >>> customer = Customer.create(name="James Bond", email="[email protected]", phone="007", city="London") >>> customer.serialize_attrs('name', 'email') {'name': u'James Bond', 'email': u'[email protected]'} """ # return dict([(a, getattr(self, a)) for a in args]) cls = type(self) result = {} # result = { # a: getattr(self, a) # for a in args # if hasattr(cls, a) and # a not in cls.attrs_forbidden_for_serialization() # } for a in args: if hasattr(cls, a) and a not in cls.attrs_forbidden_for_serialization(): val = getattr(self, a) if is_list_like(val): result[a] = list(val) else: result[a] = val return result
python
def serialize_attrs(self, *args): """Converts and instance to a dictionary with only the specified attributes as keys Args: *args (list): The attributes to serialize Examples: >>> customer = Customer.create(name="James Bond", email="[email protected]", phone="007", city="London") >>> customer.serialize_attrs('name', 'email') {'name': u'James Bond', 'email': u'[email protected]'} """ # return dict([(a, getattr(self, a)) for a in args]) cls = type(self) result = {} # result = { # a: getattr(self, a) # for a in args # if hasattr(cls, a) and # a not in cls.attrs_forbidden_for_serialization() # } for a in args: if hasattr(cls, a) and a not in cls.attrs_forbidden_for_serialization(): val = getattr(self, a) if is_list_like(val): result[a] = list(val) else: result[a] = val return result
[ "def", "serialize_attrs", "(", "self", ",", "*", "args", ")", ":", "# return dict([(a, getattr(self, a)) for a in args])", "cls", "=", "type", "(", "self", ")", "result", "=", "{", "}", "# result = {", "# a: getattr(self, a)", "# for a in args", "# if hasattr(cls, a) and", "# a not in cls.attrs_forbidden_for_serialization()", "# }", "for", "a", "in", "args", ":", "if", "hasattr", "(", "cls", ",", "a", ")", "and", "a", "not", "in", "cls", ".", "attrs_forbidden_for_serialization", "(", ")", ":", "val", "=", "getattr", "(", "self", ",", "a", ")", "if", "is_list_like", "(", "val", ")", ":", "result", "[", "a", "]", "=", "list", "(", "val", ")", "else", ":", "result", "[", "a", "]", "=", "val", "return", "result" ]
Converts and instance to a dictionary with only the specified attributes as keys Args: *args (list): The attributes to serialize Examples: >>> customer = Customer.create(name="James Bond", email="[email protected]", phone="007", city="London") >>> customer.serialize_attrs('name', 'email') {'name': u'James Bond', 'email': u'[email protected]'}
[ "Converts", "and", "instance", "to", "a", "dictionary", "with", "only", "the", "specified", "attributes", "as", "keys" ]
444048d167ab7718f758e943665ef32d101423a5
https://github.com/SuryaSankar/flask-sqlalchemy-booster/blob/444048d167ab7718f758e943665ef32d101423a5/flask_sqlalchemy_booster/model_booster/dictizable_mixin.py#L532-L563
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/freq_analysis.py
fundamental_frequency
def fundamental_frequency(s,FS): # TODO: review fundamental frequency to guarantee that f0 exists # suggestion peak level should be bigger # TODO: explain code """Compute fundamental frequency along the specified axes. Parameters ---------- s: ndarray input from which fundamental frequency is computed. FS: int sampling frequency Returns ------- f0: int its integer multiple best explain the content of the signal spectrum. """ s = s - mean(s) f, fs = plotfft(s, FS, doplot=False) #fs = smooth(fs, 50.0) fs = fs[1:int(len(fs) / 2)] f = f[1:int(len(f) / 2)] cond = find(f > 0.5)[0] bp = bigPeaks(fs[cond:], 0) if bp==[]: f0=0 else: bp = bp + cond f0 = f[min(bp)] return f0
python
def fundamental_frequency(s,FS): # TODO: review fundamental frequency to guarantee that f0 exists # suggestion peak level should be bigger # TODO: explain code """Compute fundamental frequency along the specified axes. Parameters ---------- s: ndarray input from which fundamental frequency is computed. FS: int sampling frequency Returns ------- f0: int its integer multiple best explain the content of the signal spectrum. """ s = s - mean(s) f, fs = plotfft(s, FS, doplot=False) #fs = smooth(fs, 50.0) fs = fs[1:int(len(fs) / 2)] f = f[1:int(len(f) / 2)] cond = find(f > 0.5)[0] bp = bigPeaks(fs[cond:], 0) if bp==[]: f0=0 else: bp = bp + cond f0 = f[min(bp)] return f0
[ "def", "fundamental_frequency", "(", "s", ",", "FS", ")", ":", "# TODO: review fundamental frequency to guarantee that f0 exists ", "# suggestion peak level should be bigger ", "# TODO: explain code", "s", "=", "s", "-", "mean", "(", "s", ")", "f", ",", "fs", "=", "plotfft", "(", "s", ",", "FS", ",", "doplot", "=", "False", ")", "#fs = smooth(fs, 50.0)", "fs", "=", "fs", "[", "1", ":", "int", "(", "len", "(", "fs", ")", "/", "2", ")", "]", "f", "=", "f", "[", "1", ":", "int", "(", "len", "(", "f", ")", "/", "2", ")", "]", "cond", "=", "find", "(", "f", ">", "0.5", ")", "[", "0", "]", "bp", "=", "bigPeaks", "(", "fs", "[", "cond", ":", "]", ",", "0", ")", "if", "bp", "==", "[", "]", ":", "f0", "=", "0", "else", ":", "bp", "=", "bp", "+", "cond", "f0", "=", "f", "[", "min", "(", "bp", ")", "]", "return", "f0" ]
Compute fundamental frequency along the specified axes. Parameters ---------- s: ndarray input from which fundamental frequency is computed. FS: int sampling frequency Returns ------- f0: int its integer multiple best explain the content of the signal spectrum.
[ "Compute", "fundamental", "frequency", "along", "the", "specified", "axes", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/freq_analysis.py#L11-L49
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/freq_analysis.py
max_frequency
def max_frequency (sig,FS): """Compute max frequency along the specified axes. Parameters ---------- sig: ndarray input from which max frequency is computed. FS: int sampling frequency Returns ------- f_max: int 0.95 of max_frequency using cumsum. """ f, fs = plotfft(sig, FS, doplot=False) t = cumsum(fs) ind_mag = find (t>t[-1]*0.95)[0] f_max=f[ind_mag] return f_max
python
def max_frequency (sig,FS): """Compute max frequency along the specified axes. Parameters ---------- sig: ndarray input from which max frequency is computed. FS: int sampling frequency Returns ------- f_max: int 0.95 of max_frequency using cumsum. """ f, fs = plotfft(sig, FS, doplot=False) t = cumsum(fs) ind_mag = find (t>t[-1]*0.95)[0] f_max=f[ind_mag] return f_max
[ "def", "max_frequency", "(", "sig", ",", "FS", ")", ":", "f", ",", "fs", "=", "plotfft", "(", "sig", ",", "FS", ",", "doplot", "=", "False", ")", "t", "=", "cumsum", "(", "fs", ")", "ind_mag", "=", "find", "(", "t", ">", "t", "[", "-", "1", "]", "*", "0.95", ")", "[", "0", "]", "f_max", "=", "f", "[", "ind_mag", "]", "return", "f_max" ]
Compute max frequency along the specified axes. Parameters ---------- sig: ndarray input from which max frequency is computed. FS: int sampling frequency Returns ------- f_max: int 0.95 of max_frequency using cumsum.
[ "Compute", "max", "frequency", "along", "the", "specified", "axes", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/freq_analysis.py#L52-L72
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/freq_analysis.py
median_frequency
def median_frequency(sig,FS): """Compute median frequency along the specified axes. Parameters ---------- sig: ndarray input from which median frequency is computed. FS: int sampling frequency Returns ------- f_max: int 0.50 of max_frequency using cumsum. """ f, fs = plotfft(sig, FS, doplot=False) t = cumsum(fs) ind_mag = find (t>t[-1]*0.50)[0] f_median=f[ind_mag] return f_median
python
def median_frequency(sig,FS): """Compute median frequency along the specified axes. Parameters ---------- sig: ndarray input from which median frequency is computed. FS: int sampling frequency Returns ------- f_max: int 0.50 of max_frequency using cumsum. """ f, fs = plotfft(sig, FS, doplot=False) t = cumsum(fs) ind_mag = find (t>t[-1]*0.50)[0] f_median=f[ind_mag] return f_median
[ "def", "median_frequency", "(", "sig", ",", "FS", ")", ":", "f", ",", "fs", "=", "plotfft", "(", "sig", ",", "FS", ",", "doplot", "=", "False", ")", "t", "=", "cumsum", "(", "fs", ")", "ind_mag", "=", "find", "(", "t", ">", "t", "[", "-", "1", "]", "*", "0.50", ")", "[", "0", "]", "f_median", "=", "f", "[", "ind_mag", "]", "return", "f_median" ]
Compute median frequency along the specified axes. Parameters ---------- sig: ndarray input from which median frequency is computed. FS: int sampling frequency Returns ------- f_max: int 0.50 of max_frequency using cumsum.
[ "Compute", "median", "frequency", "along", "the", "specified", "axes", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/external_packages/novainstrumentation/freq_analysis.py#L75-L95
train
kytos/kytos-utils
kytos/cli/commands/napps/parser.py
call
def call(subcommand, args): """Call a subcommand passing the args.""" args['<napp>'] = parse_napps(args['<napp>']) func = getattr(NAppsAPI, subcommand) func(args)
python
def call(subcommand, args): """Call a subcommand passing the args.""" args['<napp>'] = parse_napps(args['<napp>']) func = getattr(NAppsAPI, subcommand) func(args)
[ "def", "call", "(", "subcommand", ",", "args", ")", ":", "args", "[", "'<napp>'", "]", "=", "parse_napps", "(", "args", "[", "'<napp>'", "]", ")", "func", "=", "getattr", "(", "NAppsAPI", ",", "subcommand", ")", "func", "(", "args", ")" ]
Call a subcommand passing the args.
[ "Call", "a", "subcommand", "passing", "the", "args", "." ]
b4750c618d15cff75970ea6124bda4d2b9a33578
https://github.com/kytos/kytos-utils/blob/b4750c618d15cff75970ea6124bda4d2b9a33578/kytos/cli/commands/napps/parser.py#L57-L61
train
kytos/kytos-utils
kytos/cli/commands/napps/parser.py
parse_napp
def parse_napp(napp_id): """Convert a napp_id in tuple with username, napp name and version. Args: napp_id: String with the form 'username/napp[:version]' (version is optional). If no version is found, it will be None. Returns: tuple: A tuple with (username, napp, version) Raises: KytosException: If a NApp has not the form _username/name_. """ # `napp_id` regex, composed by two mandatory parts (username, napp_name) # and one optional (version). # username and napp_name need to start with a letter, are composed of # letters, numbers and uderscores and must have at least three characters. # They are separated by a colon. # version is optional and can take any format. Is is separated by a hyphen, # if a version is defined. regex = r'([a-zA-Z][a-zA-Z0-9_]{2,})/([a-zA-Z][a-zA-Z0-9_]{2,}):?(.+)?' compiled_regex = re.compile(regex) matched = compiled_regex.fullmatch(napp_id) if not matched: msg = '"{}" NApp has not the form username/napp_name[:version].' raise KytosException(msg.format(napp_id)) return matched.groups()
python
def parse_napp(napp_id): """Convert a napp_id in tuple with username, napp name and version. Args: napp_id: String with the form 'username/napp[:version]' (version is optional). If no version is found, it will be None. Returns: tuple: A tuple with (username, napp, version) Raises: KytosException: If a NApp has not the form _username/name_. """ # `napp_id` regex, composed by two mandatory parts (username, napp_name) # and one optional (version). # username and napp_name need to start with a letter, are composed of # letters, numbers and uderscores and must have at least three characters. # They are separated by a colon. # version is optional and can take any format. Is is separated by a hyphen, # if a version is defined. regex = r'([a-zA-Z][a-zA-Z0-9_]{2,})/([a-zA-Z][a-zA-Z0-9_]{2,}):?(.+)?' compiled_regex = re.compile(regex) matched = compiled_regex.fullmatch(napp_id) if not matched: msg = '"{}" NApp has not the form username/napp_name[:version].' raise KytosException(msg.format(napp_id)) return matched.groups()
[ "def", "parse_napp", "(", "napp_id", ")", ":", "# `napp_id` regex, composed by two mandatory parts (username, napp_name)", "# and one optional (version).", "# username and napp_name need to start with a letter, are composed of", "# letters, numbers and uderscores and must have at least three characters.", "# They are separated by a colon.", "# version is optional and can take any format. Is is separated by a hyphen,", "# if a version is defined.", "regex", "=", "r'([a-zA-Z][a-zA-Z0-9_]{2,})/([a-zA-Z][a-zA-Z0-9_]{2,}):?(.+)?'", "compiled_regex", "=", "re", ".", "compile", "(", "regex", ")", "matched", "=", "compiled_regex", ".", "fullmatch", "(", "napp_id", ")", "if", "not", "matched", ":", "msg", "=", "'\"{}\" NApp has not the form username/napp_name[:version].'", "raise", "KytosException", "(", "msg", ".", "format", "(", "napp_id", ")", ")", "return", "matched", ".", "groups", "(", ")" ]
Convert a napp_id in tuple with username, napp name and version. Args: napp_id: String with the form 'username/napp[:version]' (version is optional). If no version is found, it will be None. Returns: tuple: A tuple with (username, napp, version) Raises: KytosException: If a NApp has not the form _username/name_.
[ "Convert", "a", "napp_id", "in", "tuple", "with", "username", "napp", "name", "and", "version", "." ]
b4750c618d15cff75970ea6124bda4d2b9a33578
https://github.com/kytos/kytos-utils/blob/b4750c618d15cff75970ea6124bda4d2b9a33578/kytos/cli/commands/napps/parser.py#L88-L118
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/factory.py
_generate_notebook_header
def _generate_notebook_header(notebook_object, notebook_type, notebook_title="Notebook Title", tags="tags", difficulty_stars=1, notebook_description="Notebook Description"): """ Internal function that is used for generation of the generic notebooks header. ---------- Parameters ---------- notebook_object : notebook object Object of "notebook" class where the header will be created. notebook_type : str Notebook type: - "Main_Files_Signal_Samples" - "Main_Files_By_Category" - "Main_Files_By_Difficulty" - "Main_Files_By_Tag" - "Acquire" - "Open" - "Visualise" - "Process" - "Detect" - "Extract" - "Train_and_Classify" - "Explain" notebook_title : None or str The Notebook title should only be defined when 'notebook_type' is: - "Acquire" - "Open" - "Visualise" - "Process" - "Detect" - "Extract" - "Train_and_Classify" - "Explain" tags : str Sequence of tags that characterize the Notebook. difficulty_stars : int This input defines the difficulty level of the Notebook instructions. notebook_description : str An introductory text to present the Notebook and involve the reader. """ # ============================= Creation of Header ==================================== header_temp = HEADER_ALL_CATEGORIES.replace("header_image_color_i", "header_image_color_" + str(NOTEBOOK_KEYS[notebook_type])) header_temp = header_temp.replace("header_image_i", "header_image_" + str(NOTEBOOK_KEYS[notebook_type])) header_temp = header_temp.replace("Notebook Title", notebook_title) notebook_object["cells"].append(nb.v4.new_markdown_cell(header_temp, **{"metadata": {"tags": ["intro_info_title"]}})) # =============== Inclusion of the div with "Difficulty" and "Tags" =================== tags_and_diff = HEADER_TAGS.replace('<td class="shield_right" id="tags">tags</td>', '<td class="shield_right" id="tags">' + "&#9729;".join(tags) + '</td>') for star in range(1, 6): if star <= difficulty_stars: tags_and_diff = tags_and_diff.replace("fa fa-star " + str(star), "fa fa-star " "checked") else: tags_and_diff = tags_and_diff.replace("fa fa-star " + str(star), "fa fa-star") notebook_object["cells"].append(nb.v4.new_markdown_cell(tags_and_diff, **{"metadata": {"tags": ["intro_info_tags"]}})) # ================= Insertion of the div reserved to the Notebook Description ================== notebook_object["cells"].append(nb.v4.new_markdown_cell(notebook_description, **{"metadata": {"tags": ["test"]}})) notebook_object["cells"].append(nb.v4.new_markdown_cell(SEPARATOR)) # ======================= Insertion of a blank Markdown and Code cell ========================== notebook_object["cells"].append(nb.v4.new_markdown_cell(MD_EXAMPLES)) notebook_object["cells"].append(nb.v4.new_code_cell(CODE_EXAMPLES))
python
def _generate_notebook_header(notebook_object, notebook_type, notebook_title="Notebook Title", tags="tags", difficulty_stars=1, notebook_description="Notebook Description"): """ Internal function that is used for generation of the generic notebooks header. ---------- Parameters ---------- notebook_object : notebook object Object of "notebook" class where the header will be created. notebook_type : str Notebook type: - "Main_Files_Signal_Samples" - "Main_Files_By_Category" - "Main_Files_By_Difficulty" - "Main_Files_By_Tag" - "Acquire" - "Open" - "Visualise" - "Process" - "Detect" - "Extract" - "Train_and_Classify" - "Explain" notebook_title : None or str The Notebook title should only be defined when 'notebook_type' is: - "Acquire" - "Open" - "Visualise" - "Process" - "Detect" - "Extract" - "Train_and_Classify" - "Explain" tags : str Sequence of tags that characterize the Notebook. difficulty_stars : int This input defines the difficulty level of the Notebook instructions. notebook_description : str An introductory text to present the Notebook and involve the reader. """ # ============================= Creation of Header ==================================== header_temp = HEADER_ALL_CATEGORIES.replace("header_image_color_i", "header_image_color_" + str(NOTEBOOK_KEYS[notebook_type])) header_temp = header_temp.replace("header_image_i", "header_image_" + str(NOTEBOOK_KEYS[notebook_type])) header_temp = header_temp.replace("Notebook Title", notebook_title) notebook_object["cells"].append(nb.v4.new_markdown_cell(header_temp, **{"metadata": {"tags": ["intro_info_title"]}})) # =============== Inclusion of the div with "Difficulty" and "Tags" =================== tags_and_diff = HEADER_TAGS.replace('<td class="shield_right" id="tags">tags</td>', '<td class="shield_right" id="tags">' + "&#9729;".join(tags) + '</td>') for star in range(1, 6): if star <= difficulty_stars: tags_and_diff = tags_and_diff.replace("fa fa-star " + str(star), "fa fa-star " "checked") else: tags_and_diff = tags_and_diff.replace("fa fa-star " + str(star), "fa fa-star") notebook_object["cells"].append(nb.v4.new_markdown_cell(tags_and_diff, **{"metadata": {"tags": ["intro_info_tags"]}})) # ================= Insertion of the div reserved to the Notebook Description ================== notebook_object["cells"].append(nb.v4.new_markdown_cell(notebook_description, **{"metadata": {"tags": ["test"]}})) notebook_object["cells"].append(nb.v4.new_markdown_cell(SEPARATOR)) # ======================= Insertion of a blank Markdown and Code cell ========================== notebook_object["cells"].append(nb.v4.new_markdown_cell(MD_EXAMPLES)) notebook_object["cells"].append(nb.v4.new_code_cell(CODE_EXAMPLES))
[ "def", "_generate_notebook_header", "(", "notebook_object", ",", "notebook_type", ",", "notebook_title", "=", "\"Notebook Title\"", ",", "tags", "=", "\"tags\"", ",", "difficulty_stars", "=", "1", ",", "notebook_description", "=", "\"Notebook Description\"", ")", ":", "# ============================= Creation of Header ====================================", "header_temp", "=", "HEADER_ALL_CATEGORIES", ".", "replace", "(", "\"header_image_color_i\"", ",", "\"header_image_color_\"", "+", "str", "(", "NOTEBOOK_KEYS", "[", "notebook_type", "]", ")", ")", "header_temp", "=", "header_temp", ".", "replace", "(", "\"header_image_i\"", ",", "\"header_image_\"", "+", "str", "(", "NOTEBOOK_KEYS", "[", "notebook_type", "]", ")", ")", "header_temp", "=", "header_temp", ".", "replace", "(", "\"Notebook Title\"", ",", "notebook_title", ")", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_markdown_cell", "(", "header_temp", ",", "*", "*", "{", "\"metadata\"", ":", "{", "\"tags\"", ":", "[", "\"intro_info_title\"", "]", "}", "}", ")", ")", "# =============== Inclusion of the div with \"Difficulty\" and \"Tags\" ===================", "tags_and_diff", "=", "HEADER_TAGS", ".", "replace", "(", "'<td class=\"shield_right\" id=\"tags\">tags</td>'", ",", "'<td class=\"shield_right\" id=\"tags\">'", "+", "\"&#9729;\"", ".", "join", "(", "tags", ")", "+", "'</td>'", ")", "for", "star", "in", "range", "(", "1", ",", "6", ")", ":", "if", "star", "<=", "difficulty_stars", ":", "tags_and_diff", "=", "tags_and_diff", ".", "replace", "(", "\"fa fa-star \"", "+", "str", "(", "star", ")", ",", "\"fa fa-star \"", "\"checked\"", ")", "else", ":", "tags_and_diff", "=", "tags_and_diff", ".", "replace", "(", "\"fa fa-star \"", "+", "str", "(", "star", ")", ",", "\"fa fa-star\"", ")", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_markdown_cell", "(", "tags_and_diff", ",", "*", "*", "{", "\"metadata\"", ":", "{", "\"tags\"", ":", "[", "\"intro_info_tags\"", "]", "}", "}", ")", ")", "# ================= Insertion of the div reserved to the Notebook Description ==================", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_markdown_cell", "(", "notebook_description", ",", "*", "*", "{", "\"metadata\"", ":", "{", "\"tags\"", ":", "[", "\"test\"", "]", "}", "}", ")", ")", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_markdown_cell", "(", "SEPARATOR", ")", ")", "# ======================= Insertion of a blank Markdown and Code cell ==========================", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_markdown_cell", "(", "MD_EXAMPLES", ")", ")", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_code_cell", "(", "CODE_EXAMPLES", ")", ")" ]
Internal function that is used for generation of the generic notebooks header. ---------- Parameters ---------- notebook_object : notebook object Object of "notebook" class where the header will be created. notebook_type : str Notebook type: - "Main_Files_Signal_Samples" - "Main_Files_By_Category" - "Main_Files_By_Difficulty" - "Main_Files_By_Tag" - "Acquire" - "Open" - "Visualise" - "Process" - "Detect" - "Extract" - "Train_and_Classify" - "Explain" notebook_title : None or str The Notebook title should only be defined when 'notebook_type' is: - "Acquire" - "Open" - "Visualise" - "Process" - "Detect" - "Extract" - "Train_and_Classify" - "Explain" tags : str Sequence of tags that characterize the Notebook. difficulty_stars : int This input defines the difficulty level of the Notebook instructions. notebook_description : str An introductory text to present the Notebook and involve the reader.
[ "Internal", "function", "that", "is", "used", "for", "generation", "of", "the", "generic", "notebooks", "header", "." ]
aaa01d4125180b3a34f1e26e0d3ff08c23f666d3
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/factory.py#L414-L492
train
fancybits/pychannels
pychannels/__init__.py
Channels._request
def _request(self, method, path, params=None): """Make the actual request and returns the parsed response.""" url = self._base_url + path try: if method == 'GET': response = requests.get(url, timeout=TIMEOUT) elif method == "POST": response = requests.post(url, params, timeout=TIMEOUT) elif method == "PUT": response = requests.put(url, params, timeout=TIMEOUT) elif method == "DELETE": response = requests.delete(url, timeout=TIMEOUT) if response: return response.json() else: return {'status': 'error'} except requests.exceptions.HTTPError: return {'status': 'error'} except requests.exceptions.Timeout: return {'status': 'offline'} except requests.exceptions.RequestException: return {'status': 'offline'}
python
def _request(self, method, path, params=None): """Make the actual request and returns the parsed response.""" url = self._base_url + path try: if method == 'GET': response = requests.get(url, timeout=TIMEOUT) elif method == "POST": response = requests.post(url, params, timeout=TIMEOUT) elif method == "PUT": response = requests.put(url, params, timeout=TIMEOUT) elif method == "DELETE": response = requests.delete(url, timeout=TIMEOUT) if response: return response.json() else: return {'status': 'error'} except requests.exceptions.HTTPError: return {'status': 'error'} except requests.exceptions.Timeout: return {'status': 'offline'} except requests.exceptions.RequestException: return {'status': 'offline'}
[ "def", "_request", "(", "self", ",", "method", ",", "path", ",", "params", "=", "None", ")", ":", "url", "=", "self", ".", "_base_url", "+", "path", "try", ":", "if", "method", "==", "'GET'", ":", "response", "=", "requests", ".", "get", "(", "url", ",", "timeout", "=", "TIMEOUT", ")", "elif", "method", "==", "\"POST\"", ":", "response", "=", "requests", ".", "post", "(", "url", ",", "params", ",", "timeout", "=", "TIMEOUT", ")", "elif", "method", "==", "\"PUT\"", ":", "response", "=", "requests", ".", "put", "(", "url", ",", "params", ",", "timeout", "=", "TIMEOUT", ")", "elif", "method", "==", "\"DELETE\"", ":", "response", "=", "requests", ".", "delete", "(", "url", ",", "timeout", "=", "TIMEOUT", ")", "if", "response", ":", "return", "response", ".", "json", "(", ")", "else", ":", "return", "{", "'status'", ":", "'error'", "}", "except", "requests", ".", "exceptions", ".", "HTTPError", ":", "return", "{", "'status'", ":", "'error'", "}", "except", "requests", ".", "exceptions", ".", "Timeout", ":", "return", "{", "'status'", ":", "'offline'", "}", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "return", "{", "'status'", ":", "'offline'", "}" ]
Make the actual request and returns the parsed response.
[ "Make", "the", "actual", "request", "and", "returns", "the", "parsed", "response", "." ]
080f269b6d17d4622a0787000befe31bebc1a15d
https://github.com/fancybits/pychannels/blob/080f269b6d17d4622a0787000befe31bebc1a15d/pychannels/__init__.py#L30-L53
train
uranusjr/django-gunicorn
djgunicorn/config.py
post_worker_init
def post_worker_init(worker): """Hook into Gunicorn to display message after launching. This mimics the behaviour of Django's stock runserver command. """ quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C' sys.stdout.write( "Django version {djangover}, Gunicorn version {gunicornver}, " "using settings {settings!r}\n" "Starting development server at {urls}\n" "Quit the server with {quit_command}.\n".format( djangover=django.get_version(), gunicornver=gunicorn.__version__, settings=os.environ.get('DJANGO_SETTINGS_MODULE'), urls=', '.join('http://{0}/'.format(b) for b in worker.cfg.bind), quit_command=quit_command, ), )
python
def post_worker_init(worker): """Hook into Gunicorn to display message after launching. This mimics the behaviour of Django's stock runserver command. """ quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C' sys.stdout.write( "Django version {djangover}, Gunicorn version {gunicornver}, " "using settings {settings!r}\n" "Starting development server at {urls}\n" "Quit the server with {quit_command}.\n".format( djangover=django.get_version(), gunicornver=gunicorn.__version__, settings=os.environ.get('DJANGO_SETTINGS_MODULE'), urls=', '.join('http://{0}/'.format(b) for b in worker.cfg.bind), quit_command=quit_command, ), )
[ "def", "post_worker_init", "(", "worker", ")", ":", "quit_command", "=", "'CTRL-BREAK'", "if", "sys", ".", "platform", "==", "'win32'", "else", "'CONTROL-C'", "sys", ".", "stdout", ".", "write", "(", "\"Django version {djangover}, Gunicorn version {gunicornver}, \"", "\"using settings {settings!r}\\n\"", "\"Starting development server at {urls}\\n\"", "\"Quit the server with {quit_command}.\\n\"", ".", "format", "(", "djangover", "=", "django", ".", "get_version", "(", ")", ",", "gunicornver", "=", "gunicorn", ".", "__version__", ",", "settings", "=", "os", ".", "environ", ".", "get", "(", "'DJANGO_SETTINGS_MODULE'", ")", ",", "urls", "=", "', '", ".", "join", "(", "'http://{0}/'", ".", "format", "(", "b", ")", "for", "b", "in", "worker", ".", "cfg", ".", "bind", ")", ",", "quit_command", "=", "quit_command", ",", ")", ",", ")" ]
Hook into Gunicorn to display message after launching. This mimics the behaviour of Django's stock runserver command.
[ "Hook", "into", "Gunicorn", "to", "display", "message", "after", "launching", "." ]
4fb16f48048ff5fff8f889a007f376236646497b
https://github.com/uranusjr/django-gunicorn/blob/4fb16f48048ff5fff8f889a007f376236646497b/djgunicorn/config.py#L18-L35
train
KE-works/pykechain
pykechain/models/property_attachment.py
AttachmentProperty.value
def value(self): """Retrieve the data value of this attachment. Will show the filename of the attachment if there is an attachment available otherwise None Use save_as in order to download as a file. Example ------- >>> file_attachment_property = project.part('Bike').property('file_attachment') >>> if file_attachment_property.value: ... file_attachment_property.save_as('file.ext') ... else: ... print('file attachment not set, its value is None') """ if 'value' in self._json_data and self._json_data['value']: return "[Attachment: {}]".format(self._json_data['value'].split('/')[-1]) else: return None
python
def value(self): """Retrieve the data value of this attachment. Will show the filename of the attachment if there is an attachment available otherwise None Use save_as in order to download as a file. Example ------- >>> file_attachment_property = project.part('Bike').property('file_attachment') >>> if file_attachment_property.value: ... file_attachment_property.save_as('file.ext') ... else: ... print('file attachment not set, its value is None') """ if 'value' in self._json_data and self._json_data['value']: return "[Attachment: {}]".format(self._json_data['value'].split('/')[-1]) else: return None
[ "def", "value", "(", "self", ")", ":", "if", "'value'", "in", "self", ".", "_json_data", "and", "self", ".", "_json_data", "[", "'value'", "]", ":", "return", "\"[Attachment: {}]\"", ".", "format", "(", "self", ".", "_json_data", "[", "'value'", "]", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ")", "else", ":", "return", "None" ]
Retrieve the data value of this attachment. Will show the filename of the attachment if there is an attachment available otherwise None Use save_as in order to download as a file. Example ------- >>> file_attachment_property = project.part('Bike').property('file_attachment') >>> if file_attachment_property.value: ... file_attachment_property.save_as('file.ext') ... else: ... print('file attachment not set, its value is None')
[ "Retrieve", "the", "data", "value", "of", "this", "attachment", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/property_attachment.py#L32-L51
train
KE-works/pykechain
pykechain/models/property_attachment.py
AttachmentProperty.filename
def filename(self): """Filename of the attachment, without the full 'attachment' path.""" if self.value and 'value' in self._json_data and self._json_data['value']: return self._json_data['value'].split('/')[-1] return None
python
def filename(self): """Filename of the attachment, without the full 'attachment' path.""" if self.value and 'value' in self._json_data and self._json_data['value']: return self._json_data['value'].split('/')[-1] return None
[ "def", "filename", "(", "self", ")", ":", "if", "self", ".", "value", "and", "'value'", "in", "self", ".", "_json_data", "and", "self", ".", "_json_data", "[", "'value'", "]", ":", "return", "self", ".", "_json_data", "[", "'value'", "]", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "return", "None" ]
Filename of the attachment, without the full 'attachment' path.
[ "Filename", "of", "the", "attachment", "without", "the", "full", "attachment", "path", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/property_attachment.py#L68-L72
train
KE-works/pykechain
pykechain/models/property_attachment.py
AttachmentProperty.upload
def upload(self, data, **kwargs): """Upload a file to the attachment property. When providing a :class:`matplotlib.figure.Figure` object as data, the figure is uploaded as PNG. For this, `matplotlib`_ should be installed. :param filename: File path :type filename: basestring :raises APIError: When unable to upload the file to KE-chain :raises OSError: When the path to the file is incorrect or file could not be found .. _matplotlib: https://matplotlib.org/ """ try: import matplotlib.figure if isinstance(data, matplotlib.figure.Figure): self._upload_plot(data, **kwargs) return except ImportError: pass if isinstance(data, str): with open(data, 'rb') as fp: self._upload(fp) else: self._upload_json(data, **kwargs)
python
def upload(self, data, **kwargs): """Upload a file to the attachment property. When providing a :class:`matplotlib.figure.Figure` object as data, the figure is uploaded as PNG. For this, `matplotlib`_ should be installed. :param filename: File path :type filename: basestring :raises APIError: When unable to upload the file to KE-chain :raises OSError: When the path to the file is incorrect or file could not be found .. _matplotlib: https://matplotlib.org/ """ try: import matplotlib.figure if isinstance(data, matplotlib.figure.Figure): self._upload_plot(data, **kwargs) return except ImportError: pass if isinstance(data, str): with open(data, 'rb') as fp: self._upload(fp) else: self._upload_json(data, **kwargs)
[ "def", "upload", "(", "self", ",", "data", ",", "*", "*", "kwargs", ")", ":", "try", ":", "import", "matplotlib", ".", "figure", "if", "isinstance", "(", "data", ",", "matplotlib", ".", "figure", ".", "Figure", ")", ":", "self", ".", "_upload_plot", "(", "data", ",", "*", "*", "kwargs", ")", "return", "except", "ImportError", ":", "pass", "if", "isinstance", "(", "data", ",", "str", ")", ":", "with", "open", "(", "data", ",", "'rb'", ")", "as", "fp", ":", "self", ".", "_upload", "(", "fp", ")", "else", ":", "self", ".", "_upload_json", "(", "data", ",", "*", "*", "kwargs", ")" ]
Upload a file to the attachment property. When providing a :class:`matplotlib.figure.Figure` object as data, the figure is uploaded as PNG. For this, `matplotlib`_ should be installed. :param filename: File path :type filename: basestring :raises APIError: When unable to upload the file to KE-chain :raises OSError: When the path to the file is incorrect or file could not be found .. _matplotlib: https://matplotlib.org/
[ "Upload", "a", "file", "to", "the", "attachment", "property", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/property_attachment.py#L90-L116
train
KE-works/pykechain
pykechain/models/property_attachment.py
AttachmentProperty.save_as
def save_as(self, filename): """Download the attachment to a file. :param filename: File path :type filename: basestring :raises APIError: When unable to download the data :raises OSError: When unable to save the data to disk """ with open(filename, 'w+b') as f: for chunk in self._download(): f.write(chunk)
python
def save_as(self, filename): """Download the attachment to a file. :param filename: File path :type filename: basestring :raises APIError: When unable to download the data :raises OSError: When unable to save the data to disk """ with open(filename, 'w+b') as f: for chunk in self._download(): f.write(chunk)
[ "def", "save_as", "(", "self", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "'w+b'", ")", "as", "f", ":", "for", "chunk", "in", "self", ".", "_download", "(", ")", ":", "f", ".", "write", "(", "chunk", ")" ]
Download the attachment to a file. :param filename: File path :type filename: basestring :raises APIError: When unable to download the data :raises OSError: When unable to save the data to disk
[ "Download", "the", "attachment", "to", "a", "file", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/property_attachment.py#L118-L128
train
apihackers/devpi-semantic-ui
devpi_semantic_ui/__init__.py
devpiserver_cmdline_run
def devpiserver_cmdline_run(xom): ''' Load theme when `theme` parameter is 'semantic-ui'. ''' if xom.config.args.theme == 'semantic-ui': xom.config.args.theme = resource_filename('devpi_semantic_ui', '') xom.log.info("Semantic UI Theme loaded")
python
def devpiserver_cmdline_run(xom): ''' Load theme when `theme` parameter is 'semantic-ui'. ''' if xom.config.args.theme == 'semantic-ui': xom.config.args.theme = resource_filename('devpi_semantic_ui', '') xom.log.info("Semantic UI Theme loaded")
[ "def", "devpiserver_cmdline_run", "(", "xom", ")", ":", "if", "xom", ".", "config", ".", "args", ".", "theme", "==", "'semantic-ui'", ":", "xom", ".", "config", ".", "args", ".", "theme", "=", "resource_filename", "(", "'devpi_semantic_ui'", ",", "''", ")", "xom", ".", "log", ".", "info", "(", "\"Semantic UI Theme loaded\"", ")" ]
Load theme when `theme` parameter is 'semantic-ui'.
[ "Load", "theme", "when", "theme", "parameter", "is", "semantic", "-", "ui", "." ]
32bab6a7c3441c855d7005f088c48e7a1af5a72c
https://github.com/apihackers/devpi-semantic-ui/blob/32bab6a7c3441c855d7005f088c48e7a1af5a72c/devpi_semantic_ui/__init__.py#L6-L12
train
thomasdelaet/python-velbus
velbus/modules/vmb4ry.py
VMB4RYModule.is_on
def is_on(self, channel): """ Check if a switch is turned on :return: bool """ if channel in self._is_on: return self._is_on[channel] return False
python
def is_on(self, channel): """ Check if a switch is turned on :return: bool """ if channel in self._is_on: return self._is_on[channel] return False
[ "def", "is_on", "(", "self", ",", "channel", ")", ":", "if", "channel", "in", "self", ".", "_is_on", ":", "return", "self", ".", "_is_on", "[", "channel", "]", "return", "False" ]
Check if a switch is turned on :return: bool
[ "Check", "if", "a", "switch", "is", "turned", "on" ]
af2f8af43f1a24bf854eff9f3126fd7b5c41b3dd
https://github.com/thomasdelaet/python-velbus/blob/af2f8af43f1a24bf854eff9f3126fd7b5c41b3dd/velbus/modules/vmb4ry.py#L19-L27
train
thomasdelaet/python-velbus
velbus/modules/vmb4ry.py
VMB4RYModule.turn_on
def turn_on(self, channel, callback=None): """ Turn on switch. :return: None """ if callback is None: def callb(): """No-op""" pass callback = callb message = velbus.SwitchRelayOnMessage(self._address) message.relay_channels = [channel] self._controller.send(message, callback)
python
def turn_on(self, channel, callback=None): """ Turn on switch. :return: None """ if callback is None: def callb(): """No-op""" pass callback = callb message = velbus.SwitchRelayOnMessage(self._address) message.relay_channels = [channel] self._controller.send(message, callback)
[ "def", "turn_on", "(", "self", ",", "channel", ",", "callback", "=", "None", ")", ":", "if", "callback", "is", "None", ":", "def", "callb", "(", ")", ":", "\"\"\"No-op\"\"\"", "pass", "callback", "=", "callb", "message", "=", "velbus", ".", "SwitchRelayOnMessage", "(", "self", ".", "_address", ")", "message", ".", "relay_channels", "=", "[", "channel", "]", "self", ".", "_controller", ".", "send", "(", "message", ",", "callback", ")" ]
Turn on switch. :return: None
[ "Turn", "on", "switch", "." ]
af2f8af43f1a24bf854eff9f3126fd7b5c41b3dd
https://github.com/thomasdelaet/python-velbus/blob/af2f8af43f1a24bf854eff9f3126fd7b5c41b3dd/velbus/modules/vmb4ry.py#L29-L42
train
thomasdelaet/python-velbus
velbus/modules/vmb4ry.py
VMB4RYModule.turn_off
def turn_off(self, channel, callback=None): """ Turn off switch. :return: None """ if callback is None: def callb(): """No-op""" pass callback = callb message = velbus.SwitchRelayOffMessage(self._address) message.relay_channels = [channel] self._controller.send(message, callback)
python
def turn_off(self, channel, callback=None): """ Turn off switch. :return: None """ if callback is None: def callb(): """No-op""" pass callback = callb message = velbus.SwitchRelayOffMessage(self._address) message.relay_channels = [channel] self._controller.send(message, callback)
[ "def", "turn_off", "(", "self", ",", "channel", ",", "callback", "=", "None", ")", ":", "if", "callback", "is", "None", ":", "def", "callb", "(", ")", ":", "\"\"\"No-op\"\"\"", "pass", "callback", "=", "callb", "message", "=", "velbus", ".", "SwitchRelayOffMessage", "(", "self", ".", "_address", ")", "message", ".", "relay_channels", "=", "[", "channel", "]", "self", ".", "_controller", ".", "send", "(", "message", ",", "callback", ")" ]
Turn off switch. :return: None
[ "Turn", "off", "switch", "." ]
af2f8af43f1a24bf854eff9f3126fd7b5c41b3dd
https://github.com/thomasdelaet/python-velbus/blob/af2f8af43f1a24bf854eff9f3126fd7b5c41b3dd/velbus/modules/vmb4ry.py#L44-L57
train
kizniche/sht-sensor
sht_sensor/sensor.py
Sht.read_dew_point
def read_dew_point(self, t=None, rh=None): 'With t and rh provided, does not access the hardware.' if t is None: t, rh = self.read_t(), None if rh is None: rh = self.read_rh(t) t_range = 'water' if t >= 0 else 'ice' tn, m = self.c.tn[t_range], self.c.m[t_range] return ( # ch 4.4 tn * (math.log(rh / 100.0) + (m * t) / (tn + t)) / (m - math.log(rh / 100.0) - m * t / (tn + t)) )
python
def read_dew_point(self, t=None, rh=None): 'With t and rh provided, does not access the hardware.' if t is None: t, rh = self.read_t(), None if rh is None: rh = self.read_rh(t) t_range = 'water' if t >= 0 else 'ice' tn, m = self.c.tn[t_range], self.c.m[t_range] return ( # ch 4.4 tn * (math.log(rh / 100.0) + (m * t) / (tn + t)) / (m - math.log(rh / 100.0) - m * t / (tn + t)) )
[ "def", "read_dew_point", "(", "self", ",", "t", "=", "None", ",", "rh", "=", "None", ")", ":", "if", "t", "is", "None", ":", "t", ",", "rh", "=", "self", ".", "read_t", "(", ")", ",", "None", "if", "rh", "is", "None", ":", "rh", "=", "self", ".", "read_rh", "(", "t", ")", "t_range", "=", "'water'", "if", "t", ">=", "0", "else", "'ice'", "tn", ",", "m", "=", "self", ".", "c", ".", "tn", "[", "t_range", "]", ",", "self", ".", "c", ".", "m", "[", "t_range", "]", "return", "(", "# ch 4.4", "tn", "*", "(", "math", ".", "log", "(", "rh", "/", "100.0", ")", "+", "(", "m", "*", "t", ")", "/", "(", "tn", "+", "t", ")", ")", "/", "(", "m", "-", "math", ".", "log", "(", "rh", "/", "100.0", ")", "-", "m", "*", "t", "/", "(", "tn", "+", "t", ")", ")", ")" ]
With t and rh provided, does not access the hardware.
[ "With", "t", "and", "rh", "provided", "does", "not", "access", "the", "hardware", "." ]
e44758327eec781297e68f3f59b6937b7c5758e3
https://github.com/kizniche/sht-sensor/blob/e44758327eec781297e68f3f59b6937b7c5758e3/sht_sensor/sensor.py#L319-L327
train
KE-works/pykechain
pykechain/models/property_selectlist.py
SelectListProperty._put_options
def _put_options(self, options_list): """Save the options to KE-chain. Makes a single API call. :param options_list: list of options to set. :raises APIError: when unable to update the options """ new_options = self._options.copy() # make a full copy of the dict not to only link it and update dict in place new_options.update({"value_choices": options_list}) validate(new_options, options_json_schema) url = self._client._build_url('property', property_id=self.id) response = self._client._request('PUT', url, json={'options': new_options}) if response.status_code != 200: # pragma: no cover raise APIError("Could not update property value. Response: {}".format(str(response))) else: self._options = new_options
python
def _put_options(self, options_list): """Save the options to KE-chain. Makes a single API call. :param options_list: list of options to set. :raises APIError: when unable to update the options """ new_options = self._options.copy() # make a full copy of the dict not to only link it and update dict in place new_options.update({"value_choices": options_list}) validate(new_options, options_json_schema) url = self._client._build_url('property', property_id=self.id) response = self._client._request('PUT', url, json={'options': new_options}) if response.status_code != 200: # pragma: no cover raise APIError("Could not update property value. Response: {}".format(str(response))) else: self._options = new_options
[ "def", "_put_options", "(", "self", ",", "options_list", ")", ":", "new_options", "=", "self", ".", "_options", ".", "copy", "(", ")", "# make a full copy of the dict not to only link it and update dict in place", "new_options", ".", "update", "(", "{", "\"value_choices\"", ":", "options_list", "}", ")", "validate", "(", "new_options", ",", "options_json_schema", ")", "url", "=", "self", ".", "_client", ".", "_build_url", "(", "'property'", ",", "property_id", "=", "self", ".", "id", ")", "response", "=", "self", ".", "_client", ".", "_request", "(", "'PUT'", ",", "url", ",", "json", "=", "{", "'options'", ":", "new_options", "}", ")", "if", "response", ".", "status_code", "!=", "200", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not update property value. Response: {}\"", ".", "format", "(", "str", "(", "response", ")", ")", ")", "else", ":", "self", ".", "_options", "=", "new_options" ]
Save the options to KE-chain. Makes a single API call. :param options_list: list of options to set. :raises APIError: when unable to update the options
[ "Save", "the", "options", "to", "KE", "-", "chain", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/property_selectlist.py#L85-L103
train
Thermondo/viewflow-extensions
viewflow_extensions/utils.py
make_form_or_formset_fields_not_required
def make_form_or_formset_fields_not_required(form_or_formset): """Take a Form or FormSet and set all fields to not required.""" if isinstance(form_or_formset, BaseFormSet): for single_form in form_or_formset: make_form_fields_not_required(single_form) else: make_form_fields_not_required(form_or_formset)
python
def make_form_or_formset_fields_not_required(form_or_formset): """Take a Form or FormSet and set all fields to not required.""" if isinstance(form_or_formset, BaseFormSet): for single_form in form_or_formset: make_form_fields_not_required(single_form) else: make_form_fields_not_required(form_or_formset)
[ "def", "make_form_or_formset_fields_not_required", "(", "form_or_formset", ")", ":", "if", "isinstance", "(", "form_or_formset", ",", "BaseFormSet", ")", ":", "for", "single_form", "in", "form_or_formset", ":", "make_form_fields_not_required", "(", "single_form", ")", "else", ":", "make_form_fields_not_required", "(", "form_or_formset", ")" ]
Take a Form or FormSet and set all fields to not required.
[ "Take", "a", "Form", "or", "FormSet", "and", "set", "all", "fields", "to", "not", "required", "." ]
5d2bbfe28ced7dda3e6832b96ea031c1b871053e
https://github.com/Thermondo/viewflow-extensions/blob/5d2bbfe28ced7dda3e6832b96ea031c1b871053e/viewflow_extensions/utils.py#L4-L10
train
KE-works/pykechain
pykechain/models/activity.py
Activity.scope_id
def scope_id(self): """ ID of the scope this Activity belongs to. This property will always produce a scope_id, even when the scope object was not included in an earlier response. When the :class:`Scope` is not included in this task, it will make an additional call to the KE-chain API. :return: the scope id (uuid string) :raises NotFoundError: if the scope could not be found """ if self.scope: scope_id = self.scope and self.scope.get('id') else: pseudo_self = self._client.activity(pk=self.id, fields="id,scope") if pseudo_self.scope and pseudo_self.scope.get('id'): self.scope = pseudo_self.scope scope_id = self.scope.get('id') else: raise NotFoundError("This activity '{}'({}) does not belong to a scope, something is weird!". format(self.name, self.id)) return scope_id
python
def scope_id(self): """ ID of the scope this Activity belongs to. This property will always produce a scope_id, even when the scope object was not included in an earlier response. When the :class:`Scope` is not included in this task, it will make an additional call to the KE-chain API. :return: the scope id (uuid string) :raises NotFoundError: if the scope could not be found """ if self.scope: scope_id = self.scope and self.scope.get('id') else: pseudo_self = self._client.activity(pk=self.id, fields="id,scope") if pseudo_self.scope and pseudo_self.scope.get('id'): self.scope = pseudo_self.scope scope_id = self.scope.get('id') else: raise NotFoundError("This activity '{}'({}) does not belong to a scope, something is weird!". format(self.name, self.id)) return scope_id
[ "def", "scope_id", "(", "self", ")", ":", "if", "self", ".", "scope", ":", "scope_id", "=", "self", ".", "scope", "and", "self", ".", "scope", ".", "get", "(", "'id'", ")", "else", ":", "pseudo_self", "=", "self", ".", "_client", ".", "activity", "(", "pk", "=", "self", ".", "id", ",", "fields", "=", "\"id,scope\"", ")", "if", "pseudo_self", ".", "scope", "and", "pseudo_self", ".", "scope", ".", "get", "(", "'id'", ")", ":", "self", ".", "scope", "=", "pseudo_self", ".", "scope", "scope_id", "=", "self", ".", "scope", ".", "get", "(", "'id'", ")", "else", ":", "raise", "NotFoundError", "(", "\"This activity '{}'({}) does not belong to a scope, something is weird!\"", ".", "format", "(", "self", ".", "name", ",", "self", ".", "id", ")", ")", "return", "scope_id" ]
ID of the scope this Activity belongs to. This property will always produce a scope_id, even when the scope object was not included in an earlier response. When the :class:`Scope` is not included in this task, it will make an additional call to the KE-chain API. :return: the scope id (uuid string) :raises NotFoundError: if the scope could not be found
[ "ID", "of", "the", "scope", "this", "Activity", "belongs", "to", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity.py#L26-L48
train
KE-works/pykechain
pykechain/models/activity.py
Activity.is_rootlevel
def is_rootlevel(self): """ Determine if Activity is at the root level of a project. :return: Return True if it is a root level activity, otherwise return False :rtype: bool """ container_id = self._json_data.get('container') if container_id: return container_id == self._json_data.get('root_container') else: return False
python
def is_rootlevel(self): """ Determine if Activity is at the root level of a project. :return: Return True if it is a root level activity, otherwise return False :rtype: bool """ container_id = self._json_data.get('container') if container_id: return container_id == self._json_data.get('root_container') else: return False
[ "def", "is_rootlevel", "(", "self", ")", ":", "container_id", "=", "self", ".", "_json_data", ".", "get", "(", "'container'", ")", "if", "container_id", ":", "return", "container_id", "==", "self", ".", "_json_data", ".", "get", "(", "'root_container'", ")", "else", ":", "return", "False" ]
Determine if Activity is at the root level of a project. :return: Return True if it is a root level activity, otherwise return False :rtype: bool
[ "Determine", "if", "Activity", "is", "at", "the", "root", "level", "of", "a", "project", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity.py#L54-L65
train
KE-works/pykechain
pykechain/models/activity.py
Activity.is_configured
def is_configured(self): """ Determine if the Activity is configured with input and output properties. Makes an additional lightweight call to the API to determine if any associated models are there. :return: Return True if it is configured, otherwise return False :rtype: bool """ # check configured based on if we get at least 1 part back associated_models = self.parts(category=Category.MODEL, limit=1) if associated_models: return True else: return False
python
def is_configured(self): """ Determine if the Activity is configured with input and output properties. Makes an additional lightweight call to the API to determine if any associated models are there. :return: Return True if it is configured, otherwise return False :rtype: bool """ # check configured based on if we get at least 1 part back associated_models = self.parts(category=Category.MODEL, limit=1) if associated_models: return True else: return False
[ "def", "is_configured", "(", "self", ")", ":", "# check configured based on if we get at least 1 part back", "associated_models", "=", "self", ".", "parts", "(", "category", "=", "Category", ".", "MODEL", ",", "limit", "=", "1", ")", "if", "associated_models", ":", "return", "True", "else", ":", "return", "False" ]
Determine if the Activity is configured with input and output properties. Makes an additional lightweight call to the API to determine if any associated models are there. :return: Return True if it is configured, otherwise return False :rtype: bool
[ "Determine", "if", "the", "Activity", "is", "configured", "with", "input", "and", "output", "properties", "." ]
b0296cf34328fd41660bf6f0b9114fd0167c40c4
https://github.com/KE-works/pykechain/blob/b0296cf34328fd41660bf6f0b9114fd0167c40c4/pykechain/models/activity.py#L85-L99
train