repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_series_episodes | def get_series_episodes(self, series_id, episode_number=None, aired_season=None, aired_episode=None,
dvd_season=None, dvd_episode=None, imdb_id=None, page=1):
"""
Retrieves all episodes for a particular series given its TheTVDB and filtered by additional optional details.
:param series_id: The TheTVDB id of the series.
:param episode_number: The optional absolute episode number.
:param aired_season: The optional aired season number.
:param aired_episode: The optional aired episode number.
:param dvd_season: The optional DVD season number.
:param dvd_episode: The optional DVD episode number.
:param imdb_id: The optional IMDB Id of the series.
:param page: The page number. If none is provided, 1 is used by default.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
arguments = locals()
optional_parameters = {'episode_number': 'absoluteNumber', 'aired_season': 'airedSeason',
'aired_episode': 'airedEpisode', 'dvd_season': 'dvdSeason', 'dvd_episode': 'dvdEpisode',
'imdb_id': 'imdbId', 'page': 'page'}
query_string = utils.query_param_string_from_option_args(optional_parameters, arguments)
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/series/%d/episodes/query?%s' %
(series_id, query_string), headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def get_series_episodes(self, series_id, episode_number=None, aired_season=None, aired_episode=None,
dvd_season=None, dvd_episode=None, imdb_id=None, page=1):
"""
Retrieves all episodes for a particular series given its TheTVDB and filtered by additional optional details.
:param series_id: The TheTVDB id of the series.
:param episode_number: The optional absolute episode number.
:param aired_season: The optional aired season number.
:param aired_episode: The optional aired episode number.
:param dvd_season: The optional DVD season number.
:param dvd_episode: The optional DVD episode number.
:param imdb_id: The optional IMDB Id of the series.
:param page: The page number. If none is provided, 1 is used by default.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
arguments = locals()
optional_parameters = {'episode_number': 'absoluteNumber', 'aired_season': 'airedSeason',
'aired_episode': 'airedEpisode', 'dvd_season': 'dvdSeason', 'dvd_episode': 'dvdEpisode',
'imdb_id': 'imdbId', 'page': 'page'}
query_string = utils.query_param_string_from_option_args(optional_parameters, arguments)
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/series/%d/episodes/query?%s' %
(series_id, query_string), headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"get_series_episodes",
"(",
"self",
",",
"series_id",
",",
"episode_number",
"=",
"None",
",",
"aired_season",
"=",
"None",
",",
"aired_episode",
"=",
"None",
",",
"dvd_season",
"=",
"None",
",",
"dvd_episode",
"=",
"None",
",",
"imdb_id",
"=",
"None",
",",
"page",
"=",
"1",
")",
":",
"arguments",
"=",
"locals",
"(",
")",
"optional_parameters",
"=",
"{",
"'episode_number'",
":",
"'absoluteNumber'",
",",
"'aired_season'",
":",
"'airedSeason'",
",",
"'aired_episode'",
":",
"'airedEpisode'",
",",
"'dvd_season'",
":",
"'dvdSeason'",
",",
"'dvd_episode'",
":",
"'dvdEpisode'",
",",
"'imdb_id'",
":",
"'imdbId'",
",",
"'page'",
":",
"'page'",
"}",
"query_string",
"=",
"utils",
".",
"query_param_string_from_option_args",
"(",
"optional_parameters",
",",
"arguments",
")",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/series/%d/episodes/query?%s'",
"%",
"(",
"series_id",
",",
"query_string",
")",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Retrieves all episodes for a particular series given its TheTVDB and filtered by additional optional details.
:param series_id: The TheTVDB id of the series.
:param episode_number: The optional absolute episode number.
:param aired_season: The optional aired season number.
:param aired_episode: The optional aired episode number.
:param dvd_season: The optional DVD season number.
:param dvd_episode: The optional DVD episode number.
:param imdb_id: The optional IMDB Id of the series.
:param page: The page number. If none is provided, 1 is used by default.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Retrieves",
"all",
"episodes",
"for",
"a",
"particular",
"series",
"given",
"its",
"TheTVDB",
"and",
"filtered",
"by",
"additional",
"optional",
"details",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L167-L194 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_updated | def get_updated(self, from_time, to_time=None):
"""
Retrives a list of series that have changed on TheTVDB since a provided from time parameter and optionally to an
specified to time.
:param from_time: An epoch representation of the date from which to restrict the query to.
:param to_time: An optional epcoh representation of the date to which to restrict the query to.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
arguments = locals()
optional_parameters = {'to_time': 'toTime'}
query_string = 'fromTime=%s&%s' % (from_time,
utils.query_param_string_from_option_args(optional_parameters, arguments))
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/uodated/query?%s' % query_string,
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def get_updated(self, from_time, to_time=None):
"""
Retrives a list of series that have changed on TheTVDB since a provided from time parameter and optionally to an
specified to time.
:param from_time: An epoch representation of the date from which to restrict the query to.
:param to_time: An optional epcoh representation of the date to which to restrict the query to.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
arguments = locals()
optional_parameters = {'to_time': 'toTime'}
query_string = 'fromTime=%s&%s' % (from_time,
utils.query_param_string_from_option_args(optional_parameters, arguments))
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/uodated/query?%s' % query_string,
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"get_updated",
"(",
"self",
",",
"from_time",
",",
"to_time",
"=",
"None",
")",
":",
"arguments",
"=",
"locals",
"(",
")",
"optional_parameters",
"=",
"{",
"'to_time'",
":",
"'toTime'",
"}",
"query_string",
"=",
"'fromTime=%s&%s'",
"%",
"(",
"from_time",
",",
"utils",
".",
"query_param_string_from_option_args",
"(",
"optional_parameters",
",",
"arguments",
")",
")",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/uodated/query?%s'",
"%",
"query_string",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Retrives a list of series that have changed on TheTVDB since a provided from time parameter and optionally to an
specified to time.
:param from_time: An epoch representation of the date from which to restrict the query to.
:param to_time: An optional epcoh representation of the date to which to restrict the query to.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Retrives",
"a",
"list",
"of",
"series",
"that",
"have",
"changed",
"on",
"TheTVDB",
"since",
"a",
"provided",
"from",
"time",
"parameter",
"and",
"optionally",
"to",
"an",
"specified",
"to",
"time",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L252-L271 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_user | def get_user(self):
"""
Retrieves information about the user currently using the api.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user',
headers=self.__get_header_with_auth())) | python | def get_user(self):
"""
Retrieves information about the user currently using the api.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user',
headers=self.__get_header_with_auth())) | [
"def",
"get_user",
"(",
"self",
")",
":",
"return",
"self",
".",
"parse_raw_response",
"(",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user'",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
")"
] | Retrieves information about the user currently using the api.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Retrieves",
"information",
"about",
"the",
"user",
"currently",
"using",
"the",
"api",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L274-L282 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_user_favorites | def get_user_favorites(self):
"""
Retrieves the list of tv series the current user has flagged as favorite.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user/favorites',
headers=self.__get_header_with_auth())) | python | def get_user_favorites(self):
"""
Retrieves the list of tv series the current user has flagged as favorite.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user/favorites',
headers=self.__get_header_with_auth())) | [
"def",
"get_user_favorites",
"(",
"self",
")",
":",
"return",
"self",
".",
"parse_raw_response",
"(",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user/favorites'",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
")"
] | Retrieves the list of tv series the current user has flagged as favorite.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Retrieves",
"the",
"list",
"of",
"tv",
"series",
"the",
"current",
"user",
"has",
"flagged",
"as",
"favorite",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L285-L293 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.delete_user_favorite | def delete_user_favorite(self, series_id):
"""
Deletes the series of the provided id from the favorites list of the current user.
:param series_id: The TheTVDB id of the series.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('delete',
self.API_BASE_URL + '/user/favorites/%d' % series_id,
headers=self.__get_header_with_auth())) | python | def delete_user_favorite(self, series_id):
"""
Deletes the series of the provided id from the favorites list of the current user.
:param series_id: The TheTVDB id of the series.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('delete',
self.API_BASE_URL + '/user/favorites/%d' % series_id,
headers=self.__get_header_with_auth())) | [
"def",
"delete_user_favorite",
"(",
"self",
",",
"series_id",
")",
":",
"return",
"self",
".",
"parse_raw_response",
"(",
"requests_util",
".",
"run_request",
"(",
"'delete'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user/favorites/%d'",
"%",
"series_id",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
")"
] | Deletes the series of the provided id from the favorites list of the current user.
:param series_id: The TheTVDB id of the series.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Deletes",
"the",
"series",
"of",
"the",
"provided",
"id",
"from",
"the",
"favorites",
"list",
"of",
"the",
"current",
"user",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L296-L306 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.__get_user_ratings | def __get_user_ratings(self):
"""
Returns a list of the ratings provided by the current user.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user/ratings',
headers=self.__get_header_with_auth())) | python | def __get_user_ratings(self):
"""
Returns a list of the ratings provided by the current user.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user/ratings',
headers=self.__get_header_with_auth())) | [
"def",
"__get_user_ratings",
"(",
"self",
")",
":",
"return",
"self",
".",
"parse_raw_response",
"(",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user/ratings'",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
")"
] | Returns a list of the ratings provided by the current user.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Returns",
"a",
"list",
"of",
"the",
"ratings",
"provided",
"by",
"the",
"current",
"user",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L322-L330 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_user_ratings | def get_user_ratings(self, item_type=None):
"""
Returns a list of the ratings for the type of item provided, for the current user.
:param item_type: One of: series, episode or banner.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
if item_type:
query_string = 'itemType=%s' % item_type
return self.parse_raw_response(
requests_util.run_request('get', self.API_BASE_URL + '/user/ratings/qeury?%s' % query_string,
headers=self.__get_header_with_auth()))
else:
return self.__get_user_ratings() | python | def get_user_ratings(self, item_type=None):
"""
Returns a list of the ratings for the type of item provided, for the current user.
:param item_type: One of: series, episode or banner.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
if item_type:
query_string = 'itemType=%s' % item_type
return self.parse_raw_response(
requests_util.run_request('get', self.API_BASE_URL + '/user/ratings/qeury?%s' % query_string,
headers=self.__get_header_with_auth()))
else:
return self.__get_user_ratings() | [
"def",
"get_user_ratings",
"(",
"self",
",",
"item_type",
"=",
"None",
")",
":",
"if",
"item_type",
":",
"query_string",
"=",
"'itemType=%s'",
"%",
"item_type",
"return",
"self",
".",
"parse_raw_response",
"(",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user/ratings/qeury?%s'",
"%",
"query_string",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
")",
"else",
":",
"return",
"self",
".",
"__get_user_ratings",
"(",
")"
] | Returns a list of the ratings for the type of item provided, for the current user.
:param item_type: One of: series, episode or banner.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Returns",
"a",
"list",
"of",
"the",
"ratings",
"for",
"the",
"type",
"of",
"item",
"provided",
"for",
"the",
"current",
"user",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L333-L348 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.add_user_rating | def add_user_rating(self, item_type, item_id, item_rating):
"""
Adds the rating for the item indicated for the current user.
:param item_type: One of: series, episode, banner.
:param item_id: The TheTVDB id of the item.
:param item_rating: The rating from 0 to 10.
:return:
"""
raw_response = requests_util.run_request('put',
self.API_BASE_URL + '/user/ratings/%s/%d/%d' %
(item_type, item_id, item_rating),
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def add_user_rating(self, item_type, item_id, item_rating):
"""
Adds the rating for the item indicated for the current user.
:param item_type: One of: series, episode, banner.
:param item_id: The TheTVDB id of the item.
:param item_rating: The rating from 0 to 10.
:return:
"""
raw_response = requests_util.run_request('put',
self.API_BASE_URL + '/user/ratings/%s/%d/%d' %
(item_type, item_id, item_rating),
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"add_user_rating",
"(",
"self",
",",
"item_type",
",",
"item_id",
",",
"item_rating",
")",
":",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'put'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user/ratings/%s/%d/%d'",
"%",
"(",
"item_type",
",",
"item_id",
",",
"item_rating",
")",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Adds the rating for the item indicated for the current user.
:param item_type: One of: series, episode, banner.
:param item_id: The TheTVDB id of the item.
:param item_rating: The rating from 0 to 10.
:return: | [
"Adds",
"the",
"rating",
"for",
"the",
"item",
"indicated",
"for",
"the",
"current",
"user",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L351-L366 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.delete_user_rating | def delete_user_rating(self, item_type, item_id):
"""
Deletes from the list of rating of the current user, the rating provided for the specified element type.
:param item_type: One of: series, episode, banner.
:param item_id: The TheTVDB Id of the item.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('delete',
self.API_BASE_URL + '/user/ratings/%s/%d' %
(item_type, item_id), headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def delete_user_rating(self, item_type, item_id):
"""
Deletes from the list of rating of the current user, the rating provided for the specified element type.
:param item_type: One of: series, episode, banner.
:param item_id: The TheTVDB Id of the item.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('delete',
self.API_BASE_URL + '/user/ratings/%s/%d' %
(item_type, item_id), headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"delete_user_rating",
"(",
"self",
",",
"item_type",
",",
"item_id",
")",
":",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'delete'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/user/ratings/%s/%d'",
"%",
"(",
"item_type",
",",
"item_id",
")",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Deletes from the list of rating of the current user, the rating provided for the specified element type.
:param item_type: One of: series, episode, banner.
:param item_id: The TheTVDB Id of the item.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Deletes",
"from",
"the",
"list",
"of",
"rating",
"of",
"the",
"current",
"user",
"the",
"rating",
"provided",
"for",
"the",
"specified",
"element",
"type",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L369-L382 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_episode | def get_episode(self, episode_id):
"""
Returns the full information of the episode belonging to the Id provided.
:param episode_id: The TheTVDB id of the episode.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/episodes/%d' % episode_id,
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def get_episode(self, episode_id):
"""
Returns the full information of the episode belonging to the Id provided.
:param episode_id: The TheTVDB id of the episode.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/episodes/%d' % episode_id,
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"get_episode",
"(",
"self",
",",
"episode_id",
")",
":",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/episodes/%d'",
"%",
"episode_id",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Returns the full information of the episode belonging to the Id provided.
:param episode_id: The TheTVDB id of the episode.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Returns",
"the",
"full",
"information",
"of",
"the",
"episode",
"belonging",
"to",
"the",
"Id",
"provided",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L385-L396 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_languages | def get_languages(self):
"""
Returns a list of all language options available in TheTVDB.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/languages',
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def get_languages(self):
"""
Returns a list of all language options available in TheTVDB.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/languages',
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"get_languages",
"(",
"self",
")",
":",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/languages'",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Returns a list of all language options available in TheTVDB.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Returns",
"a",
"list",
"of",
"all",
"language",
"options",
"available",
"in",
"TheTVDB",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L399-L409 | train |
thilux/tvdb_client | tvdb_client/clients/ApiV2Client.py | ApiV2Client.get_language | def get_language(self, language_id):
"""
Retrieves information about the language of the given id.
:param language_id: The TheTVDB Id of the language.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/languages/%d' % language_id,
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | python | def get_language(self, language_id):
"""
Retrieves information about the language of the given id.
:param language_id: The TheTVDB Id of the language.
:return: a python dictionary with either the result of the search or an error from TheTVDB.
"""
raw_response = requests_util.run_request('get', self.API_BASE_URL + '/languages/%d' % language_id,
headers=self.__get_header_with_auth())
return self.parse_raw_response(raw_response) | [
"def",
"get_language",
"(",
"self",
",",
"language_id",
")",
":",
"raw_response",
"=",
"requests_util",
".",
"run_request",
"(",
"'get'",
",",
"self",
".",
"API_BASE_URL",
"+",
"'/languages/%d'",
"%",
"language_id",
",",
"headers",
"=",
"self",
".",
"__get_header_with_auth",
"(",
")",
")",
"return",
"self",
".",
"parse_raw_response",
"(",
"raw_response",
")"
] | Retrieves information about the language of the given id.
:param language_id: The TheTVDB Id of the language.
:return: a python dictionary with either the result of the search or an error from TheTVDB. | [
"Retrieves",
"information",
"about",
"the",
"language",
"of",
"the",
"given",
"id",
"."
] | 2d5106f260367c0abe1284683697874df6343f78 | https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L412-L423 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/__init__.py | SetCredentials | def SetCredentials(api_key,api_passwd):
"""Establish API key and password associated with APIv1 commands."""
global V1_API_KEY
global V1_API_PASSWD
global _V1_ENABLED
_V1_ENABLED = True
V1_API_KEY = api_key
V1_API_PASSWD = api_passwd | python | def SetCredentials(api_key,api_passwd):
"""Establish API key and password associated with APIv1 commands."""
global V1_API_KEY
global V1_API_PASSWD
global _V1_ENABLED
_V1_ENABLED = True
V1_API_KEY = api_key
V1_API_PASSWD = api_passwd | [
"def",
"SetCredentials",
"(",
"api_key",
",",
"api_passwd",
")",
":",
"global",
"V1_API_KEY",
"global",
"V1_API_PASSWD",
"global",
"_V1_ENABLED",
"_V1_ENABLED",
"=",
"True",
"V1_API_KEY",
"=",
"api_key",
"V1_API_PASSWD",
"=",
"api_passwd"
] | Establish API key and password associated with APIv1 commands. | [
"Establish",
"API",
"key",
"and",
"password",
"associated",
"with",
"APIv1",
"commands",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/__init__.py#L54-L61 | train |
vmonaco/pohmm | pohmm/utils.py | normalize | def normalize(A, axis=None, inplace=False):
"""
Normalize the input array so that it sums to 1.
Parameters
----------
A: array, shape (n_samples, n_features)
Non-normalized input data.
axis: int
Dimension along which normalization is performed.
Returns
-------
normalized_A: array, shape (n_samples, n_features)
A with values normalized (summing to 1) along the prescribed axis
"""
if not inplace:
A = A.copy()
A += np.finfo(float).eps
Asum = A.sum(axis)
if axis and A.ndim > 1:
# Make sure we don't divide by zero.
Asum[Asum == 0] = 1
shape = list(A.shape)
shape[axis] = 1
Asum.shape = shape
A /= Asum
return A | python | def normalize(A, axis=None, inplace=False):
"""
Normalize the input array so that it sums to 1.
Parameters
----------
A: array, shape (n_samples, n_features)
Non-normalized input data.
axis: int
Dimension along which normalization is performed.
Returns
-------
normalized_A: array, shape (n_samples, n_features)
A with values normalized (summing to 1) along the prescribed axis
"""
if not inplace:
A = A.copy()
A += np.finfo(float).eps
Asum = A.sum(axis)
if axis and A.ndim > 1:
# Make sure we don't divide by zero.
Asum[Asum == 0] = 1
shape = list(A.shape)
shape[axis] = 1
Asum.shape = shape
A /= Asum
return A | [
"def",
"normalize",
"(",
"A",
",",
"axis",
"=",
"None",
",",
"inplace",
"=",
"False",
")",
":",
"if",
"not",
"inplace",
":",
"A",
"=",
"A",
".",
"copy",
"(",
")",
"A",
"+=",
"np",
".",
"finfo",
"(",
"float",
")",
".",
"eps",
"Asum",
"=",
"A",
".",
"sum",
"(",
"axis",
")",
"if",
"axis",
"and",
"A",
".",
"ndim",
">",
"1",
":",
"# Make sure we don't divide by zero.",
"Asum",
"[",
"Asum",
"==",
"0",
"]",
"=",
"1",
"shape",
"=",
"list",
"(",
"A",
".",
"shape",
")",
"shape",
"[",
"axis",
"]",
"=",
"1",
"Asum",
".",
"shape",
"=",
"shape",
"A",
"/=",
"Asum",
"return",
"A"
] | Normalize the input array so that it sums to 1.
Parameters
----------
A: array, shape (n_samples, n_features)
Non-normalized input data.
axis: int
Dimension along which normalization is performed.
Returns
-------
normalized_A: array, shape (n_samples, n_features)
A with values normalized (summing to 1) along the prescribed axis | [
"Normalize",
"the",
"input",
"array",
"so",
"that",
"it",
"sums",
"to",
"1",
"."
] | c00f8a62d3005a171d424549a55d46c421859ae9 | https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L40-L68 | train |
vmonaco/pohmm | pohmm/utils.py | ph2full | def ph2full(ptrans, htrans):
"""
Convert a p-state transition matrix and h-state matrices to the full transation matrix
The full transmat hase N=n_pstates*n_hstates states
"""
n_pstates = len(ptrans)
n_hstates = len(htrans[0, 0])
N = n_pstates * n_hstates
trans = np.zeros((N, N))
for pidx in range(n_pstates):
for hidx in range(n_hstates):
trans[pidx * n_hstates + hidx] = (ptrans[pidx, :, np.newaxis] * htrans[pidx, :, hidx]).flatten()
return trans | python | def ph2full(ptrans, htrans):
"""
Convert a p-state transition matrix and h-state matrices to the full transation matrix
The full transmat hase N=n_pstates*n_hstates states
"""
n_pstates = len(ptrans)
n_hstates = len(htrans[0, 0])
N = n_pstates * n_hstates
trans = np.zeros((N, N))
for pidx in range(n_pstates):
for hidx in range(n_hstates):
trans[pidx * n_hstates + hidx] = (ptrans[pidx, :, np.newaxis] * htrans[pidx, :, hidx]).flatten()
return trans | [
"def",
"ph2full",
"(",
"ptrans",
",",
"htrans",
")",
":",
"n_pstates",
"=",
"len",
"(",
"ptrans",
")",
"n_hstates",
"=",
"len",
"(",
"htrans",
"[",
"0",
",",
"0",
"]",
")",
"N",
"=",
"n_pstates",
"*",
"n_hstates",
"trans",
"=",
"np",
".",
"zeros",
"(",
"(",
"N",
",",
"N",
")",
")",
"for",
"pidx",
"in",
"range",
"(",
"n_pstates",
")",
":",
"for",
"hidx",
"in",
"range",
"(",
"n_hstates",
")",
":",
"trans",
"[",
"pidx",
"*",
"n_hstates",
"+",
"hidx",
"]",
"=",
"(",
"ptrans",
"[",
"pidx",
",",
":",
",",
"np",
".",
"newaxis",
"]",
"*",
"htrans",
"[",
"pidx",
",",
":",
",",
"hidx",
"]",
")",
".",
"flatten",
"(",
")",
"return",
"trans"
] | Convert a p-state transition matrix and h-state matrices to the full transation matrix
The full transmat hase N=n_pstates*n_hstates states | [
"Convert",
"a",
"p",
"-",
"state",
"transition",
"matrix",
"and",
"h",
"-",
"state",
"matrices",
"to",
"the",
"full",
"transation",
"matrix"
] | c00f8a62d3005a171d424549a55d46c421859ae9 | https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L71-L85 | train |
vmonaco/pohmm | pohmm/utils.py | full2ph | def full2ph(trans, n_pstates):
"""
Convert a full transmat to the respective p-state and h-state transmats
"""
n_hstates = len(trans) / n_pstates
htrans = np.zeros((n_pstates, n_pstates, n_hstates, n_hstates))
for pidx1, pidx2 in product(range(n_pstates), range(n_pstates)):
idx1 = pidx1 * n_hstates
idx2 = pidx2 * n_hstates
htrans[pidx1, pidx2] = trans[idx1:idx1 + n_hstates, idx2:idx2 + n_hstates]
ptrans = normalize(htrans.sum(axis=-1).sum(axis=-1), axis=1)
htrans = normalize(htrans, axis=3)
return ptrans, htrans | python | def full2ph(trans, n_pstates):
"""
Convert a full transmat to the respective p-state and h-state transmats
"""
n_hstates = len(trans) / n_pstates
htrans = np.zeros((n_pstates, n_pstates, n_hstates, n_hstates))
for pidx1, pidx2 in product(range(n_pstates), range(n_pstates)):
idx1 = pidx1 * n_hstates
idx2 = pidx2 * n_hstates
htrans[pidx1, pidx2] = trans[idx1:idx1 + n_hstates, idx2:idx2 + n_hstates]
ptrans = normalize(htrans.sum(axis=-1).sum(axis=-1), axis=1)
htrans = normalize(htrans, axis=3)
return ptrans, htrans | [
"def",
"full2ph",
"(",
"trans",
",",
"n_pstates",
")",
":",
"n_hstates",
"=",
"len",
"(",
"trans",
")",
"/",
"n_pstates",
"htrans",
"=",
"np",
".",
"zeros",
"(",
"(",
"n_pstates",
",",
"n_pstates",
",",
"n_hstates",
",",
"n_hstates",
")",
")",
"for",
"pidx1",
",",
"pidx2",
"in",
"product",
"(",
"range",
"(",
"n_pstates",
")",
",",
"range",
"(",
"n_pstates",
")",
")",
":",
"idx1",
"=",
"pidx1",
"*",
"n_hstates",
"idx2",
"=",
"pidx2",
"*",
"n_hstates",
"htrans",
"[",
"pidx1",
",",
"pidx2",
"]",
"=",
"trans",
"[",
"idx1",
":",
"idx1",
"+",
"n_hstates",
",",
"idx2",
":",
"idx2",
"+",
"n_hstates",
"]",
"ptrans",
"=",
"normalize",
"(",
"htrans",
".",
"sum",
"(",
"axis",
"=",
"-",
"1",
")",
".",
"sum",
"(",
"axis",
"=",
"-",
"1",
")",
",",
"axis",
"=",
"1",
")",
"htrans",
"=",
"normalize",
"(",
"htrans",
",",
"axis",
"=",
"3",
")",
"return",
"ptrans",
",",
"htrans"
] | Convert a full transmat to the respective p-state and h-state transmats | [
"Convert",
"a",
"full",
"transmat",
"to",
"the",
"respective",
"p",
"-",
"state",
"and",
"h",
"-",
"state",
"transmats"
] | c00f8a62d3005a171d424549a55d46c421859ae9 | https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L88-L103 | train |
vmonaco/pohmm | pohmm/utils.py | gen_stochastic_matrix | def gen_stochastic_matrix(size, random_state=None):
"""
Generate a unfiformly-random stochastic array or matrix
"""
if not type(size) is tuple:
size = (1, size)
assert len(size) == 2
n = random_state.uniform(size=(size[0], size[1] - 1))
n = np.concatenate([np.zeros((size[0], 1)), n, np.ones((size[0], 1))], axis=1)
A = np.diff(np.sort(n))
return A.squeeze() | python | def gen_stochastic_matrix(size, random_state=None):
"""
Generate a unfiformly-random stochastic array or matrix
"""
if not type(size) is tuple:
size = (1, size)
assert len(size) == 2
n = random_state.uniform(size=(size[0], size[1] - 1))
n = np.concatenate([np.zeros((size[0], 1)), n, np.ones((size[0], 1))], axis=1)
A = np.diff(np.sort(n))
return A.squeeze() | [
"def",
"gen_stochastic_matrix",
"(",
"size",
",",
"random_state",
"=",
"None",
")",
":",
"if",
"not",
"type",
"(",
"size",
")",
"is",
"tuple",
":",
"size",
"=",
"(",
"1",
",",
"size",
")",
"assert",
"len",
"(",
"size",
")",
"==",
"2",
"n",
"=",
"random_state",
".",
"uniform",
"(",
"size",
"=",
"(",
"size",
"[",
"0",
"]",
",",
"size",
"[",
"1",
"]",
"-",
"1",
")",
")",
"n",
"=",
"np",
".",
"concatenate",
"(",
"[",
"np",
".",
"zeros",
"(",
"(",
"size",
"[",
"0",
"]",
",",
"1",
")",
")",
",",
"n",
",",
"np",
".",
"ones",
"(",
"(",
"size",
"[",
"0",
"]",
",",
"1",
")",
")",
"]",
",",
"axis",
"=",
"1",
")",
"A",
"=",
"np",
".",
"diff",
"(",
"np",
".",
"sort",
"(",
"n",
")",
")",
"return",
"A",
".",
"squeeze",
"(",
")"
] | Generate a unfiformly-random stochastic array or matrix | [
"Generate",
"a",
"unfiformly",
"-",
"random",
"stochastic",
"array",
"or",
"matrix"
] | c00f8a62d3005a171d424549a55d46c421859ae9 | https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L106-L119 | train |
vmonaco/pohmm | pohmm/utils.py | steadystate | def steadystate(A, max_iter=100):
"""
Empirically determine the steady state probabilities from a stochastic matrix
"""
P = np.linalg.matrix_power(A, max_iter)
# Determine the unique rows in A
v = []
for i in range(len(P)):
if not np.any([np.allclose(P[i], vi, ) for vi in v]):
v.append(P[i])
return normalize(np.sum(v, axis=0)) | python | def steadystate(A, max_iter=100):
"""
Empirically determine the steady state probabilities from a stochastic matrix
"""
P = np.linalg.matrix_power(A, max_iter)
# Determine the unique rows in A
v = []
for i in range(len(P)):
if not np.any([np.allclose(P[i], vi, ) for vi in v]):
v.append(P[i])
return normalize(np.sum(v, axis=0)) | [
"def",
"steadystate",
"(",
"A",
",",
"max_iter",
"=",
"100",
")",
":",
"P",
"=",
"np",
".",
"linalg",
".",
"matrix_power",
"(",
"A",
",",
"max_iter",
")",
"# Determine the unique rows in A",
"v",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"P",
")",
")",
":",
"if",
"not",
"np",
".",
"any",
"(",
"[",
"np",
".",
"allclose",
"(",
"P",
"[",
"i",
"]",
",",
"vi",
",",
")",
"for",
"vi",
"in",
"v",
"]",
")",
":",
"v",
".",
"append",
"(",
"P",
"[",
"i",
"]",
")",
"return",
"normalize",
"(",
"np",
".",
"sum",
"(",
"v",
",",
"axis",
"=",
"0",
")",
")"
] | Empirically determine the steady state probabilities from a stochastic matrix | [
"Empirically",
"determine",
"the",
"steady",
"state",
"probabilities",
"from",
"a",
"stochastic",
"matrix"
] | c00f8a62d3005a171d424549a55d46c421859ae9 | https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L122-L134 | train |
teepark/greenhouse | greenhouse/io/ipc.py | pipe | def pipe():
"""create an inter-process communication pipe
:returns:
a pair of :class:`File` objects ``(read, write)`` for the two ends of
the pipe
"""
r, w = os.pipe()
return File.fromfd(r, 'rb'), File.fromfd(w, 'wb') | python | def pipe():
"""create an inter-process communication pipe
:returns:
a pair of :class:`File` objects ``(read, write)`` for the two ends of
the pipe
"""
r, w = os.pipe()
return File.fromfd(r, 'rb'), File.fromfd(w, 'wb') | [
"def",
"pipe",
"(",
")",
":",
"r",
",",
"w",
"=",
"os",
".",
"pipe",
"(",
")",
"return",
"File",
".",
"fromfd",
"(",
"r",
",",
"'rb'",
")",
",",
"File",
".",
"fromfd",
"(",
"w",
",",
"'wb'",
")"
] | create an inter-process communication pipe
:returns:
a pair of :class:`File` objects ``(read, write)`` for the two ends of
the pipe | [
"create",
"an",
"inter",
"-",
"process",
"communication",
"pipe"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/ipc.py#L11-L19 | train |
cltl/KafNafParserPy | KafNafParserPy/span_data.py | Cspan.get_id_head | def get_id_head(self):
'''
Returns the id of the target that is set as "head"
@rtype: string
@return: the target id (or None) of the head target
'''
id_head = None
for target_node in self:
if target_node.is_head():
id_head = target_node.get_id()
break
return id_head | python | def get_id_head(self):
'''
Returns the id of the target that is set as "head"
@rtype: string
@return: the target id (or None) of the head target
'''
id_head = None
for target_node in self:
if target_node.is_head():
id_head = target_node.get_id()
break
return id_head | [
"def",
"get_id_head",
"(",
"self",
")",
":",
"id_head",
"=",
"None",
"for",
"target_node",
"in",
"self",
":",
"if",
"target_node",
".",
"is_head",
"(",
")",
":",
"id_head",
"=",
"target_node",
".",
"get_id",
"(",
")",
"break",
"return",
"id_head"
] | Returns the id of the target that is set as "head"
@rtype: string
@return: the target id (or None) of the head target | [
"Returns",
"the",
"id",
"of",
"the",
"target",
"that",
"is",
"set",
"as",
"head"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L99-L110 | train |
cltl/KafNafParserPy | KafNafParserPy/span_data.py | Cspan.add_target_id | def add_target_id(self,this_id):
"""
Adds a new target to the span with the specified id
@type this_id: string
@param this_id: the id of the new target
"""
new_target = Ctarget()
new_target.set_id(this_id)
self.node.append(new_target.get_node()) | python | def add_target_id(self,this_id):
"""
Adds a new target to the span with the specified id
@type this_id: string
@param this_id: the id of the new target
"""
new_target = Ctarget()
new_target.set_id(this_id)
self.node.append(new_target.get_node()) | [
"def",
"add_target_id",
"(",
"self",
",",
"this_id",
")",
":",
"new_target",
"=",
"Ctarget",
"(",
")",
"new_target",
".",
"set_id",
"(",
"this_id",
")",
"self",
".",
"node",
".",
"append",
"(",
"new_target",
".",
"get_node",
"(",
")",
")"
] | Adds a new target to the span with the specified id
@type this_id: string
@param this_id: the id of the new target | [
"Adds",
"a",
"new",
"target",
"to",
"the",
"span",
"with",
"the",
"specified",
"id"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L112-L120 | train |
cltl/KafNafParserPy | KafNafParserPy/span_data.py | Cspan.create_from_ids | def create_from_ids(self,list_ids):
"""
Adds new targets to the span with the specified ids
@type list_ids: list
@param list_ids: list of identifiers
"""
for this_id in list_ids:
new_target = Ctarget()
new_target.set_id(this_id)
self.node.append(new_target.get_node()) | python | def create_from_ids(self,list_ids):
"""
Adds new targets to the span with the specified ids
@type list_ids: list
@param list_ids: list of identifiers
"""
for this_id in list_ids:
new_target = Ctarget()
new_target.set_id(this_id)
self.node.append(new_target.get_node()) | [
"def",
"create_from_ids",
"(",
"self",
",",
"list_ids",
")",
":",
"for",
"this_id",
"in",
"list_ids",
":",
"new_target",
"=",
"Ctarget",
"(",
")",
"new_target",
".",
"set_id",
"(",
"this_id",
")",
"self",
".",
"node",
".",
"append",
"(",
"new_target",
".",
"get_node",
"(",
")",
")"
] | Adds new targets to the span with the specified ids
@type list_ids: list
@param list_ids: list of identifiers | [
"Adds",
"new",
"targets",
"to",
"the",
"span",
"with",
"the",
"specified",
"ids"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L122-L131 | train |
cltl/KafNafParserPy | KafNafParserPy/span_data.py | Cspan.create_from_targets | def create_from_targets(self,list_targs):
"""
Adds new targets to the span that are defined in a list
@type list_targs: list
@param list_targs: list of Ctargets
"""
for this_target in list_targs:
self.node.append(this_target.get_node()) | python | def create_from_targets(self,list_targs):
"""
Adds new targets to the span that are defined in a list
@type list_targs: list
@param list_targs: list of Ctargets
"""
for this_target in list_targs:
self.node.append(this_target.get_node()) | [
"def",
"create_from_targets",
"(",
"self",
",",
"list_targs",
")",
":",
"for",
"this_target",
"in",
"list_targs",
":",
"self",
".",
"node",
".",
"append",
"(",
"this_target",
".",
"get_node",
"(",
")",
")"
] | Adds new targets to the span that are defined in a list
@type list_targs: list
@param list_targs: list of Ctargets | [
"Adds",
"new",
"targets",
"to",
"the",
"span",
"that",
"are",
"defined",
"in",
"a",
"list"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L133-L140 | train |
cltl/KafNafParserPy | KafNafParserPy/attribution_data.py | Cattribution.get_statement | def get_statement(self, statement_id):
"""
Returns the statement object for the supplied identifier
@type statement_id: string
@param statement_id: statement identifier
"""
if statement_id in self.idx:
return Cstatement(self.idx[statement_id], self.type)
else:
return None | python | def get_statement(self, statement_id):
"""
Returns the statement object for the supplied identifier
@type statement_id: string
@param statement_id: statement identifier
"""
if statement_id in self.idx:
return Cstatement(self.idx[statement_id], self.type)
else:
return None | [
"def",
"get_statement",
"(",
"self",
",",
"statement_id",
")",
":",
"if",
"statement_id",
"in",
"self",
".",
"idx",
":",
"return",
"Cstatement",
"(",
"self",
".",
"idx",
"[",
"statement_id",
"]",
",",
"self",
".",
"type",
")",
"else",
":",
"return",
"None"
] | Returns the statement object for the supplied identifier
@type statement_id: string
@param statement_id: statement identifier | [
"Returns",
"the",
"statement",
"object",
"for",
"the",
"supplied",
"identifier"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/attribution_data.py#L279-L288 | train |
cltl/KafNafParserPy | KafNafParserPy/attribution_data.py | Cattribution.add_statement | def add_statement(self, statement_obj):
"""
Adds a statement object to the layer
@type statement_obj: L{Cstatement}
@param statement_obj: the statement object
"""
if statement_obj.get_id() in self.idx:
raise ValueError("Statement with id {} already exists!"
.format(statement_obj.get_id()))
self.node.append(statement_obj.get_node())
self.idx[statement_obj.get_id()] = statement_obj | python | def add_statement(self, statement_obj):
"""
Adds a statement object to the layer
@type statement_obj: L{Cstatement}
@param statement_obj: the statement object
"""
if statement_obj.get_id() in self.idx:
raise ValueError("Statement with id {} already exists!"
.format(statement_obj.get_id()))
self.node.append(statement_obj.get_node())
self.idx[statement_obj.get_id()] = statement_obj | [
"def",
"add_statement",
"(",
"self",
",",
"statement_obj",
")",
":",
"if",
"statement_obj",
".",
"get_id",
"(",
")",
"in",
"self",
".",
"idx",
":",
"raise",
"ValueError",
"(",
"\"Statement with id {} already exists!\"",
".",
"format",
"(",
"statement_obj",
".",
"get_id",
"(",
")",
")",
")",
"self",
".",
"node",
".",
"append",
"(",
"statement_obj",
".",
"get_node",
"(",
")",
")",
"self",
".",
"idx",
"[",
"statement_obj",
".",
"get_id",
"(",
")",
"]",
"=",
"statement_obj"
] | Adds a statement object to the layer
@type statement_obj: L{Cstatement}
@param statement_obj: the statement object | [
"Adds",
"a",
"statement",
"object",
"to",
"the",
"layer"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/attribution_data.py#L290-L300 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/datacenter.py | Datacenter.RootGroup | def RootGroup(self):
"""Returns group object for datacenter root group.
>>> clc.v2.Datacenter().RootGroup()
<clc.APIv2.group.Group object at 0x105feacd0>
>>> print _
WA1 Hardware
"""
return(clc.v2.Group(id=self.root_group_id,alias=self.alias,session=self.session)) | python | def RootGroup(self):
"""Returns group object for datacenter root group.
>>> clc.v2.Datacenter().RootGroup()
<clc.APIv2.group.Group object at 0x105feacd0>
>>> print _
WA1 Hardware
"""
return(clc.v2.Group(id=self.root_group_id,alias=self.alias,session=self.session)) | [
"def",
"RootGroup",
"(",
"self",
")",
":",
"return",
"(",
"clc",
".",
"v2",
".",
"Group",
"(",
"id",
"=",
"self",
".",
"root_group_id",
",",
"alias",
"=",
"self",
".",
"alias",
",",
"session",
"=",
"self",
".",
"session",
")",
")"
] | Returns group object for datacenter root group.
>>> clc.v2.Datacenter().RootGroup()
<clc.APIv2.group.Group object at 0x105feacd0>
>>> print _
WA1 Hardware | [
"Returns",
"group",
"object",
"for",
"datacenter",
"root",
"group",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/datacenter.py#L81-L91 | train |
pyfca/pyfca | pyfca/implications.py | LL | def LL(n):
"""constructs the LL context"""
if (n<=0):return Context('0')
else:
LL1=LL(n-1)
r1 = C1(3**(n-1),2**(n-1)) - LL1 - LL1
r2 = LL1 - LL1 - LL1
return r1 + r2 | python | def LL(n):
"""constructs the LL context"""
if (n<=0):return Context('0')
else:
LL1=LL(n-1)
r1 = C1(3**(n-1),2**(n-1)) - LL1 - LL1
r2 = LL1 - LL1 - LL1
return r1 + r2 | [
"def",
"LL",
"(",
"n",
")",
":",
"if",
"(",
"n",
"<=",
"0",
")",
":",
"return",
"Context",
"(",
"'0'",
")",
"else",
":",
"LL1",
"=",
"LL",
"(",
"n",
"-",
"1",
")",
"r1",
"=",
"C1",
"(",
"3",
"**",
"(",
"n",
"-",
"1",
")",
",",
"2",
"**",
"(",
"n",
"-",
"1",
")",
")",
"-",
"LL1",
"-",
"LL1",
"r2",
"=",
"LL1",
"-",
"LL1",
"-",
"LL1",
"return",
"r1",
"+",
"r2"
] | constructs the LL context | [
"constructs",
"the",
"LL",
"context"
] | cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0 | https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L532-L539 | train |
pyfca/pyfca | pyfca/implications.py | HH | def HH(n):
"""constructs the HH context"""
if (n<=0):return Context('1')
else:
LL1=LL(n-1)
HH1=HH(n-1)
r1 = C1(3**(n-1),2**(n-1)) - LL1 - HH1
r2 = HH1 - HH1 - HH1
return r1 + r2 | python | def HH(n):
"""constructs the HH context"""
if (n<=0):return Context('1')
else:
LL1=LL(n-1)
HH1=HH(n-1)
r1 = C1(3**(n-1),2**(n-1)) - LL1 - HH1
r2 = HH1 - HH1 - HH1
return r1 + r2 | [
"def",
"HH",
"(",
"n",
")",
":",
"if",
"(",
"n",
"<=",
"0",
")",
":",
"return",
"Context",
"(",
"'1'",
")",
"else",
":",
"LL1",
"=",
"LL",
"(",
"n",
"-",
"1",
")",
"HH1",
"=",
"HH",
"(",
"n",
"-",
"1",
")",
"r1",
"=",
"C1",
"(",
"3",
"**",
"(",
"n",
"-",
"1",
")",
",",
"2",
"**",
"(",
"n",
"-",
"1",
")",
")",
"-",
"LL1",
"-",
"HH1",
"r2",
"=",
"HH1",
"-",
"HH1",
"-",
"HH1",
"return",
"r1",
"+",
"r2"
] | constructs the HH context | [
"constructs",
"the",
"HH",
"context"
] | cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0 | https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L540-L548 | train |
pyfca/pyfca | pyfca/implications.py | AA | def AA(n):
"""constructs the AA context"""
if (n<=1):return Context('10\n00')
else:
AA1=AA(n-1)
r1 = C1(2**(n-1),2**(n-1)) - AA1
r2 = AA1 - AA1
return r1 + r2 | python | def AA(n):
"""constructs the AA context"""
if (n<=1):return Context('10\n00')
else:
AA1=AA(n-1)
r1 = C1(2**(n-1),2**(n-1)) - AA1
r2 = AA1 - AA1
return r1 + r2 | [
"def",
"AA",
"(",
"n",
")",
":",
"if",
"(",
"n",
"<=",
"1",
")",
":",
"return",
"Context",
"(",
"'10\\n00'",
")",
"else",
":",
"AA1",
"=",
"AA",
"(",
"n",
"-",
"1",
")",
"r1",
"=",
"C1",
"(",
"2",
"**",
"(",
"n",
"-",
"1",
")",
",",
"2",
"**",
"(",
"n",
"-",
"1",
")",
")",
"-",
"AA1",
"r2",
"=",
"AA1",
"-",
"AA1",
"return",
"r1",
"+",
"r2"
] | constructs the AA context | [
"constructs",
"the",
"AA",
"context"
] | cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0 | https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L550-L557 | train |
pyfca/pyfca | pyfca/implications.py | BB | def BB(n):
"""constructs the BB context"""
if (n<=1):return Context('0\n1')
else:
BB1=BB(n-1)
AA1=AA(n-1)
r1 = C1((n-1)*2**(n-2),2**(n-1)) - AA1 - BB1
r2 = BB1 - C1(2**(n-1),2**(n-1)) - BB1;
return r1 + r2 | python | def BB(n):
"""constructs the BB context"""
if (n<=1):return Context('0\n1')
else:
BB1=BB(n-1)
AA1=AA(n-1)
r1 = C1((n-1)*2**(n-2),2**(n-1)) - AA1 - BB1
r2 = BB1 - C1(2**(n-1),2**(n-1)) - BB1;
return r1 + r2 | [
"def",
"BB",
"(",
"n",
")",
":",
"if",
"(",
"n",
"<=",
"1",
")",
":",
"return",
"Context",
"(",
"'0\\n1'",
")",
"else",
":",
"BB1",
"=",
"BB",
"(",
"n",
"-",
"1",
")",
"AA1",
"=",
"AA",
"(",
"n",
"-",
"1",
")",
"r1",
"=",
"C1",
"(",
"(",
"n",
"-",
"1",
")",
"*",
"2",
"**",
"(",
"n",
"-",
"2",
")",
",",
"2",
"**",
"(",
"n",
"-",
"1",
")",
")",
"-",
"AA1",
"-",
"BB1",
"r2",
"=",
"BB1",
"-",
"C1",
"(",
"2",
"**",
"(",
"n",
"-",
"1",
")",
",",
"2",
"**",
"(",
"n",
"-",
"1",
")",
")",
"-",
"BB1",
"return",
"r1",
"+",
"r2"
] | constructs the BB context | [
"constructs",
"the",
"BB",
"context"
] | cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0 | https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L558-L566 | train |
nickpandolfi/Cyther | cyther/instructions.py | Instruction.processAndSetDefaults | def processAndSetDefaults(self):
"""
The heart of the 'Instruction' object. This method will make sure that
all fields not entered will be defaulted to a correct value. Also
checks for incongruities in the data entered, if it was by the user.
"""
# INPUT, OUTPUT, GIVEN + BUILDABLE DEPS
if not self.input:
raise ValueError(NO_INPUT_FILE)
if not self.output:
# Build directory must exist, right?
if not self.build_directory:
File()
pass # Can it be built? / reference self.output_format for this
else:
pass # if it is not congruent with other info provided
if not self.build_directory:
pass # Initialize it
for dependency in self.given_dependencies:
pass # Check if the dependcy exists
if self.output_format != self.output.getType():
raise ValueError("")
# Given dependencies must actually exist!
# output_name must be at a lower extenion level than input_name
# The build directory
return | python | def processAndSetDefaults(self):
"""
The heart of the 'Instruction' object. This method will make sure that
all fields not entered will be defaulted to a correct value. Also
checks for incongruities in the data entered, if it was by the user.
"""
# INPUT, OUTPUT, GIVEN + BUILDABLE DEPS
if not self.input:
raise ValueError(NO_INPUT_FILE)
if not self.output:
# Build directory must exist, right?
if not self.build_directory:
File()
pass # Can it be built? / reference self.output_format for this
else:
pass # if it is not congruent with other info provided
if not self.build_directory:
pass # Initialize it
for dependency in self.given_dependencies:
pass # Check if the dependcy exists
if self.output_format != self.output.getType():
raise ValueError("")
# Given dependencies must actually exist!
# output_name must be at a lower extenion level than input_name
# The build directory
return | [
"def",
"processAndSetDefaults",
"(",
"self",
")",
":",
"# INPUT, OUTPUT, GIVEN + BUILDABLE DEPS",
"if",
"not",
"self",
".",
"input",
":",
"raise",
"ValueError",
"(",
"NO_INPUT_FILE",
")",
"if",
"not",
"self",
".",
"output",
":",
"# Build directory must exist, right?",
"if",
"not",
"self",
".",
"build_directory",
":",
"File",
"(",
")",
"pass",
"# Can it be built? / reference self.output_format for this",
"else",
":",
"pass",
"# if it is not congruent with other info provided",
"if",
"not",
"self",
".",
"build_directory",
":",
"pass",
"# Initialize it",
"for",
"dependency",
"in",
"self",
".",
"given_dependencies",
":",
"pass",
"# Check if the dependcy exists",
"if",
"self",
".",
"output_format",
"!=",
"self",
".",
"output",
".",
"getType",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"\"",
")",
"# Given dependencies must actually exist!",
"# output_name must be at a lower extenion level than input_name",
"# The build directory",
"return"
] | The heart of the 'Instruction' object. This method will make sure that
all fields not entered will be defaulted to a correct value. Also
checks for incongruities in the data entered, if it was by the user. | [
"The",
"heart",
"of",
"the",
"Instruction",
"object",
".",
"This",
"method",
"will",
"make",
"sure",
"that",
"all",
"fields",
"not",
"entered",
"will",
"be",
"defaulted",
"to",
"a",
"correct",
"value",
".",
"Also",
"checks",
"for",
"incongruities",
"in",
"the",
"data",
"entered",
"if",
"it",
"was",
"by",
"the",
"user",
"."
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/instructions.py#L53-L82 | train |
teepark/greenhouse | greenhouse/scheduler.py | greenlet | def greenlet(func, args=(), kwargs=None):
"""create a new greenlet from a function and arguments
:param func: the function the new greenlet should run
:type func: function
:param args: any positional arguments for the function
:type args: tuple
:param kwargs: any keyword arguments for the function
:type kwargs: dict or None
the only major difference between this function and that of the basic
greenlet api is that this one sets the new greenlet's parent to be the
greenhouse main loop greenlet, which is a requirement for greenlets that
will wind up in the greenhouse scheduler.
"""
if args or kwargs:
def target():
return func(*args, **(kwargs or {}))
else:
target = func
return compat.greenlet(target, state.mainloop) | python | def greenlet(func, args=(), kwargs=None):
"""create a new greenlet from a function and arguments
:param func: the function the new greenlet should run
:type func: function
:param args: any positional arguments for the function
:type args: tuple
:param kwargs: any keyword arguments for the function
:type kwargs: dict or None
the only major difference between this function and that of the basic
greenlet api is that this one sets the new greenlet's parent to be the
greenhouse main loop greenlet, which is a requirement for greenlets that
will wind up in the greenhouse scheduler.
"""
if args or kwargs:
def target():
return func(*args, **(kwargs or {}))
else:
target = func
return compat.greenlet(target, state.mainloop) | [
"def",
"greenlet",
"(",
"func",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"args",
"or",
"kwargs",
":",
"def",
"target",
"(",
")",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"(",
"kwargs",
"or",
"{",
"}",
")",
")",
"else",
":",
"target",
"=",
"func",
"return",
"compat",
".",
"greenlet",
"(",
"target",
",",
"state",
".",
"mainloop",
")"
] | create a new greenlet from a function and arguments
:param func: the function the new greenlet should run
:type func: function
:param args: any positional arguments for the function
:type args: tuple
:param kwargs: any keyword arguments for the function
:type kwargs: dict or None
the only major difference between this function and that of the basic
greenlet api is that this one sets the new greenlet's parent to be the
greenhouse main loop greenlet, which is a requirement for greenlets that
will wind up in the greenhouse scheduler. | [
"create",
"a",
"new",
"greenlet",
"from",
"a",
"function",
"and",
"arguments"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L208-L228 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule | def schedule(target=None, args=(), kwargs=None):
"""insert a greenlet into the scheduler
If provided a function, it is wrapped in a new greenlet
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator, either preloading ``args``
and/or ``kwargs`` or not:
>>> @schedule
>>> def f():
... print 'hello from f'
>>> @schedule(args=('world',))
>>> def f(name):
... print 'hello %s' % name
"""
if target is None:
def decorator(target):
return schedule(target, args=args, kwargs=kwargs)
return decorator
if isinstance(target, compat.greenlet) or target is compat.main_greenlet:
glet = target
else:
glet = greenlet(target, args, kwargs)
state.paused.append(glet)
return target | python | def schedule(target=None, args=(), kwargs=None):
"""insert a greenlet into the scheduler
If provided a function, it is wrapped in a new greenlet
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator, either preloading ``args``
and/or ``kwargs`` or not:
>>> @schedule
>>> def f():
... print 'hello from f'
>>> @schedule(args=('world',))
>>> def f(name):
... print 'hello %s' % name
"""
if target is None:
def decorator(target):
return schedule(target, args=args, kwargs=kwargs)
return decorator
if isinstance(target, compat.greenlet) or target is compat.main_greenlet:
glet = target
else:
glet = greenlet(target, args, kwargs)
state.paused.append(glet)
return target | [
"def",
"schedule",
"(",
"target",
"=",
"None",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"target",
"is",
"None",
":",
"def",
"decorator",
"(",
"target",
")",
":",
"return",
"schedule",
"(",
"target",
",",
"args",
"=",
"args",
",",
"kwargs",
"=",
"kwargs",
")",
"return",
"decorator",
"if",
"isinstance",
"(",
"target",
",",
"compat",
".",
"greenlet",
")",
"or",
"target",
"is",
"compat",
".",
"main_greenlet",
":",
"glet",
"=",
"target",
"else",
":",
"glet",
"=",
"greenlet",
"(",
"target",
",",
"args",
",",
"kwargs",
")",
"state",
".",
"paused",
".",
"append",
"(",
"glet",
")",
"return",
"target"
] | insert a greenlet into the scheduler
If provided a function, it is wrapped in a new greenlet
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator, either preloading ``args``
and/or ``kwargs`` or not:
>>> @schedule
>>> def f():
... print 'hello from f'
>>> @schedule(args=('world',))
>>> def f(name):
... print 'hello %s' % name | [
"insert",
"a",
"greenlet",
"into",
"the",
"scheduler"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L263-L300 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule_at | def schedule_at(unixtime, target=None, args=(), kwargs=None):
"""insert a greenlet into the scheduler to be run at a set time
If provided a function, it is wrapped in a new greenlet
:param unixtime:
the unix timestamp at which the new greenlet should be started
:type unixtime: int or float
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_at(1296423834)
>>> def f():
... print 'hello from f'
and args/kwargs can also be preloaded:
>>> @schedule_at(1296423834, args=('world',))
>>> def f(name):
... print 'hello %s' % name
"""
if target is None:
def decorator(target):
return schedule_at(unixtime, target, args=args, kwargs=kwargs)
return decorator
if isinstance(target, compat.greenlet) or target is compat.main_greenlet:
glet = target
else:
glet = greenlet(target, args, kwargs)
state.timed_paused.insert(unixtime, glet)
return target | python | def schedule_at(unixtime, target=None, args=(), kwargs=None):
"""insert a greenlet into the scheduler to be run at a set time
If provided a function, it is wrapped in a new greenlet
:param unixtime:
the unix timestamp at which the new greenlet should be started
:type unixtime: int or float
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_at(1296423834)
>>> def f():
... print 'hello from f'
and args/kwargs can also be preloaded:
>>> @schedule_at(1296423834, args=('world',))
>>> def f(name):
... print 'hello %s' % name
"""
if target is None:
def decorator(target):
return schedule_at(unixtime, target, args=args, kwargs=kwargs)
return decorator
if isinstance(target, compat.greenlet) or target is compat.main_greenlet:
glet = target
else:
glet = greenlet(target, args, kwargs)
state.timed_paused.insert(unixtime, glet)
return target | [
"def",
"schedule_at",
"(",
"unixtime",
",",
"target",
"=",
"None",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"target",
"is",
"None",
":",
"def",
"decorator",
"(",
"target",
")",
":",
"return",
"schedule_at",
"(",
"unixtime",
",",
"target",
",",
"args",
"=",
"args",
",",
"kwargs",
"=",
"kwargs",
")",
"return",
"decorator",
"if",
"isinstance",
"(",
"target",
",",
"compat",
".",
"greenlet",
")",
"or",
"target",
"is",
"compat",
".",
"main_greenlet",
":",
"glet",
"=",
"target",
"else",
":",
"glet",
"=",
"greenlet",
"(",
"target",
",",
"args",
",",
"kwargs",
")",
"state",
".",
"timed_paused",
".",
"insert",
"(",
"unixtime",
",",
"glet",
")",
"return",
"target"
] | insert a greenlet into the scheduler to be run at a set time
If provided a function, it is wrapped in a new greenlet
:param unixtime:
the unix timestamp at which the new greenlet should be started
:type unixtime: int or float
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_at(1296423834)
>>> def f():
... print 'hello from f'
and args/kwargs can also be preloaded:
>>> @schedule_at(1296423834, args=('world',))
>>> def f(name):
... print 'hello %s' % name | [
"insert",
"a",
"greenlet",
"into",
"the",
"scheduler",
"to",
"be",
"run",
"at",
"a",
"set",
"time"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L303-L344 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule_in | def schedule_in(secs, target=None, args=(), kwargs=None):
"""insert a greenlet into the scheduler to run after a set time
If provided a function, it is wrapped in a new greenlet
:param secs: the number of seconds to wait before running the target
:type unixtime: int or float
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_in(30)
>>> def f():
... print 'hello from f'
and args/kwargs can also be preloaded:
>>> @schedule_in(30, args=('world',))
>>> def f(name):
... print 'hello %s' % name
"""
return schedule_at(time.time() + secs, target, args, kwargs) | python | def schedule_in(secs, target=None, args=(), kwargs=None):
"""insert a greenlet into the scheduler to run after a set time
If provided a function, it is wrapped in a new greenlet
:param secs: the number of seconds to wait before running the target
:type unixtime: int or float
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_in(30)
>>> def f():
... print 'hello from f'
and args/kwargs can also be preloaded:
>>> @schedule_in(30, args=('world',))
>>> def f(name):
... print 'hello %s' % name
"""
return schedule_at(time.time() + secs, target, args, kwargs) | [
"def",
"schedule_in",
"(",
"secs",
",",
"target",
"=",
"None",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"None",
")",
":",
"return",
"schedule_at",
"(",
"time",
".",
"time",
"(",
")",
"+",
"secs",
",",
"target",
",",
"args",
",",
"kwargs",
")"
] | insert a greenlet into the scheduler to run after a set time
If provided a function, it is wrapped in a new greenlet
:param secs: the number of seconds to wait before running the target
:type unixtime: int or float
:param target: what to schedule
:type target: function or greenlet
:param args:
arguments for the function (only used if ``target`` is a function)
:type args: tuple
:param kwargs:
keyword arguments for the function (only used if ``target`` is a
function)
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_in(30)
>>> def f():
... print 'hello from f'
and args/kwargs can also be preloaded:
>>> @schedule_in(30, args=('world',))
>>> def f(name):
... print 'hello %s' % name | [
"insert",
"a",
"greenlet",
"into",
"the",
"scheduler",
"to",
"run",
"after",
"a",
"set",
"time"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L347-L378 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule_recurring | def schedule_recurring(interval, target=None, maxtimes=0, starting_at=0,
args=(), kwargs=None):
"""insert a greenlet into the scheduler to run regularly at an interval
If provided a function, it is wrapped in a new greenlet
:param interval: the number of seconds between invocations
:type interval: int or float
:param target: what to schedule
:type target: function or greenlet
:param maxtimes: if provided, do not run more than ``maxtimes`` iterations
:type maxtimes: int
:param starting_at:
the unix timestamp of when to schedule it for the first time (defaults
to the time of the ``schedule_recurring`` call)
:type starting_at: int or float
:param args: arguments for the function
:type args: tuple
:param kwargs: keyword arguments for the function
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_recurring(30)
>>> def f():
... print "the regular 'hello' from f"
and args/kwargs can also be preloaded:
>>> @schedule_recurring(30, args=('world',))
>>> def f(name):
... print 'the regular hello %s' % name
"""
starting_at = starting_at or time.time()
if target is None:
def decorator(target):
return schedule_recurring(
interval, target, maxtimes, starting_at, args, kwargs)
return decorator
func = target
if isinstance(target, compat.greenlet) or target is compat.main_greenlet:
if target.dead:
raise TypeError("can't schedule a dead greenlet")
func = target.run
def run_and_schedule_one(tstamp, count):
# pass in the time scheduled instead of just checking
# time.time() so that delays don't add up
if not maxtimes or count < maxtimes:
tstamp += interval
func(*args, **(kwargs or {}))
schedule_at(tstamp, run_and_schedule_one,
args=(tstamp, count + 1))
firstrun = starting_at + interval
schedule_at(firstrun, run_and_schedule_one, args=(firstrun, 0))
return target | python | def schedule_recurring(interval, target=None, maxtimes=0, starting_at=0,
args=(), kwargs=None):
"""insert a greenlet into the scheduler to run regularly at an interval
If provided a function, it is wrapped in a new greenlet
:param interval: the number of seconds between invocations
:type interval: int or float
:param target: what to schedule
:type target: function or greenlet
:param maxtimes: if provided, do not run more than ``maxtimes`` iterations
:type maxtimes: int
:param starting_at:
the unix timestamp of when to schedule it for the first time (defaults
to the time of the ``schedule_recurring`` call)
:type starting_at: int or float
:param args: arguments for the function
:type args: tuple
:param kwargs: keyword arguments for the function
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_recurring(30)
>>> def f():
... print "the regular 'hello' from f"
and args/kwargs can also be preloaded:
>>> @schedule_recurring(30, args=('world',))
>>> def f(name):
... print 'the regular hello %s' % name
"""
starting_at = starting_at or time.time()
if target is None:
def decorator(target):
return schedule_recurring(
interval, target, maxtimes, starting_at, args, kwargs)
return decorator
func = target
if isinstance(target, compat.greenlet) or target is compat.main_greenlet:
if target.dead:
raise TypeError("can't schedule a dead greenlet")
func = target.run
def run_and_schedule_one(tstamp, count):
# pass in the time scheduled instead of just checking
# time.time() so that delays don't add up
if not maxtimes or count < maxtimes:
tstamp += interval
func(*args, **(kwargs or {}))
schedule_at(tstamp, run_and_schedule_one,
args=(tstamp, count + 1))
firstrun = starting_at + interval
schedule_at(firstrun, run_and_schedule_one, args=(firstrun, 0))
return target | [
"def",
"schedule_recurring",
"(",
"interval",
",",
"target",
"=",
"None",
",",
"maxtimes",
"=",
"0",
",",
"starting_at",
"=",
"0",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"None",
")",
":",
"starting_at",
"=",
"starting_at",
"or",
"time",
".",
"time",
"(",
")",
"if",
"target",
"is",
"None",
":",
"def",
"decorator",
"(",
"target",
")",
":",
"return",
"schedule_recurring",
"(",
"interval",
",",
"target",
",",
"maxtimes",
",",
"starting_at",
",",
"args",
",",
"kwargs",
")",
"return",
"decorator",
"func",
"=",
"target",
"if",
"isinstance",
"(",
"target",
",",
"compat",
".",
"greenlet",
")",
"or",
"target",
"is",
"compat",
".",
"main_greenlet",
":",
"if",
"target",
".",
"dead",
":",
"raise",
"TypeError",
"(",
"\"can't schedule a dead greenlet\"",
")",
"func",
"=",
"target",
".",
"run",
"def",
"run_and_schedule_one",
"(",
"tstamp",
",",
"count",
")",
":",
"# pass in the time scheduled instead of just checking",
"# time.time() so that delays don't add up",
"if",
"not",
"maxtimes",
"or",
"count",
"<",
"maxtimes",
":",
"tstamp",
"+=",
"interval",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"(",
"kwargs",
"or",
"{",
"}",
")",
")",
"schedule_at",
"(",
"tstamp",
",",
"run_and_schedule_one",
",",
"args",
"=",
"(",
"tstamp",
",",
"count",
"+",
"1",
")",
")",
"firstrun",
"=",
"starting_at",
"+",
"interval",
"schedule_at",
"(",
"firstrun",
",",
"run_and_schedule_one",
",",
"args",
"=",
"(",
"firstrun",
",",
"0",
")",
")",
"return",
"target"
] | insert a greenlet into the scheduler to run regularly at an interval
If provided a function, it is wrapped in a new greenlet
:param interval: the number of seconds between invocations
:type interval: int or float
:param target: what to schedule
:type target: function or greenlet
:param maxtimes: if provided, do not run more than ``maxtimes`` iterations
:type maxtimes: int
:param starting_at:
the unix timestamp of when to schedule it for the first time (defaults
to the time of the ``schedule_recurring`` call)
:type starting_at: int or float
:param args: arguments for the function
:type args: tuple
:param kwargs: keyword arguments for the function
:type kwargs: dict or None
:returns: the ``target`` argument
This function can also be used as a decorator:
>>> @schedule_recurring(30)
>>> def f():
... print "the regular 'hello' from f"
and args/kwargs can also be preloaded:
>>> @schedule_recurring(30, args=('world',))
>>> def f(name):
... print 'the regular hello %s' % name | [
"insert",
"a",
"greenlet",
"into",
"the",
"scheduler",
"to",
"run",
"regularly",
"at",
"an",
"interval"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L381-L442 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule_exception | def schedule_exception(exception, target):
"""schedule a greenlet to have an exception raised in it immediately
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("can only schedule exceptions for greenlets")
if target.dead:
raise ValueError("can't send exceptions to a dead greenlet")
schedule(target)
state.to_raise[target] = exception | python | def schedule_exception(exception, target):
"""schedule a greenlet to have an exception raised in it immediately
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("can only schedule exceptions for greenlets")
if target.dead:
raise ValueError("can't send exceptions to a dead greenlet")
schedule(target)
state.to_raise[target] = exception | [
"def",
"schedule_exception",
"(",
"exception",
",",
"target",
")",
":",
"if",
"not",
"isinstance",
"(",
"target",
",",
"compat",
".",
"greenlet",
")",
":",
"raise",
"TypeError",
"(",
"\"can only schedule exceptions for greenlets\"",
")",
"if",
"target",
".",
"dead",
":",
"raise",
"ValueError",
"(",
"\"can't send exceptions to a dead greenlet\"",
")",
"schedule",
"(",
"target",
")",
"state",
".",
"to_raise",
"[",
"target",
"]",
"=",
"exception"
] | schedule a greenlet to have an exception raised in it immediately
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet | [
"schedule",
"a",
"greenlet",
"to",
"have",
"an",
"exception",
"raised",
"in",
"it",
"immediately"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L445-L458 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule_exception_at | def schedule_exception_at(unixtime, exception, target):
"""schedule a greenlet to have an exception raised at a unix timestamp
:param unixtime: when to raise the exception in the target
:type unixtime: int or float
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("can only schedule exceptions for greenlets")
if target.dead:
raise ValueError("can't send exceptions to a dead greenlet")
schedule_at(unixtime, target)
state.to_raise[target] = exception | python | def schedule_exception_at(unixtime, exception, target):
"""schedule a greenlet to have an exception raised at a unix timestamp
:param unixtime: when to raise the exception in the target
:type unixtime: int or float
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("can only schedule exceptions for greenlets")
if target.dead:
raise ValueError("can't send exceptions to a dead greenlet")
schedule_at(unixtime, target)
state.to_raise[target] = exception | [
"def",
"schedule_exception_at",
"(",
"unixtime",
",",
"exception",
",",
"target",
")",
":",
"if",
"not",
"isinstance",
"(",
"target",
",",
"compat",
".",
"greenlet",
")",
":",
"raise",
"TypeError",
"(",
"\"can only schedule exceptions for greenlets\"",
")",
"if",
"target",
".",
"dead",
":",
"raise",
"ValueError",
"(",
"\"can't send exceptions to a dead greenlet\"",
")",
"schedule_at",
"(",
"unixtime",
",",
"target",
")",
"state",
".",
"to_raise",
"[",
"target",
"]",
"=",
"exception"
] | schedule a greenlet to have an exception raised at a unix timestamp
:param unixtime: when to raise the exception in the target
:type unixtime: int or float
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet | [
"schedule",
"a",
"greenlet",
"to",
"have",
"an",
"exception",
"raised",
"at",
"a",
"unix",
"timestamp"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L461-L476 | train |
teepark/greenhouse | greenhouse/scheduler.py | schedule_exception_in | def schedule_exception_in(secs, exception, target):
"""schedule a greenlet receive an exception after a number of seconds
:param secs: the number of seconds to wait before raising
:type secs: int or float
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet
"""
schedule_exception_at(time.time() + secs, exception, target) | python | def schedule_exception_in(secs, exception, target):
"""schedule a greenlet receive an exception after a number of seconds
:param secs: the number of seconds to wait before raising
:type secs: int or float
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet
"""
schedule_exception_at(time.time() + secs, exception, target) | [
"def",
"schedule_exception_in",
"(",
"secs",
",",
"exception",
",",
"target",
")",
":",
"schedule_exception_at",
"(",
"time",
".",
"time",
"(",
")",
"+",
"secs",
",",
"exception",
",",
"target",
")"
] | schedule a greenlet receive an exception after a number of seconds
:param secs: the number of seconds to wait before raising
:type secs: int or float
:param exception: the exception to raise in the greenlet
:type exception: Exception
:param target: the greenlet that should receive the exception
:type target: greenlet | [
"schedule",
"a",
"greenlet",
"receive",
"an",
"exception",
"after",
"a",
"number",
"of",
"seconds"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L479-L489 | train |
teepark/greenhouse | greenhouse/scheduler.py | end | def end(target):
"""schedule a greenlet to be stopped immediately
:param target: the greenlet to end
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("argument must be a greenlet")
if not target.dead:
schedule(target)
state.to_raise[target] = compat.GreenletExit() | python | def end(target):
"""schedule a greenlet to be stopped immediately
:param target: the greenlet to end
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("argument must be a greenlet")
if not target.dead:
schedule(target)
state.to_raise[target] = compat.GreenletExit() | [
"def",
"end",
"(",
"target",
")",
":",
"if",
"not",
"isinstance",
"(",
"target",
",",
"compat",
".",
"greenlet",
")",
":",
"raise",
"TypeError",
"(",
"\"argument must be a greenlet\"",
")",
"if",
"not",
"target",
".",
"dead",
":",
"schedule",
"(",
"target",
")",
"state",
".",
"to_raise",
"[",
"target",
"]",
"=",
"compat",
".",
"GreenletExit",
"(",
")"
] | schedule a greenlet to be stopped immediately
:param target: the greenlet to end
:type target: greenlet | [
"schedule",
"a",
"greenlet",
"to",
"be",
"stopped",
"immediately"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L492-L502 | train |
teepark/greenhouse | greenhouse/scheduler.py | handle_exception | def handle_exception(klass, exc, tb, coro=None):
"""run all the registered exception handlers
the first 3 arguments to this function match the output of
``sys.exc_info()``
:param klass: the exception klass
:type klass: type
:param exc: the exception instance
:type exc: Exception
:param tb: the traceback object
:type tb: Traceback
:param coro:
behave as though the exception occurred in this coroutine (defaults to
the current coroutine)
:type coro: greenlet
exception handlers run would be all those added with
:func:`global_exception_handler`, and any added for the relevant coroutine
with :func:`local_exception_handler`.
"""
if coro is None:
coro = compat.getcurrent()
replacement = []
for weak in state.local_exception_handlers.get(coro, ()):
func = weak()
if func is None:
continue
try:
func(klass, exc, tb)
except Exception:
continue
replacement.append(weak)
if replacement:
state.local_exception_handlers[coro][:] = replacement
replacement = []
for weak in state.global_exception_handlers:
func = weak()
if func is None:
continue
try:
func(klass, exc, tb)
except Exception:
continue
replacement.append(weak)
state.global_exception_handlers[:] = replacement | python | def handle_exception(klass, exc, tb, coro=None):
"""run all the registered exception handlers
the first 3 arguments to this function match the output of
``sys.exc_info()``
:param klass: the exception klass
:type klass: type
:param exc: the exception instance
:type exc: Exception
:param tb: the traceback object
:type tb: Traceback
:param coro:
behave as though the exception occurred in this coroutine (defaults to
the current coroutine)
:type coro: greenlet
exception handlers run would be all those added with
:func:`global_exception_handler`, and any added for the relevant coroutine
with :func:`local_exception_handler`.
"""
if coro is None:
coro = compat.getcurrent()
replacement = []
for weak in state.local_exception_handlers.get(coro, ()):
func = weak()
if func is None:
continue
try:
func(klass, exc, tb)
except Exception:
continue
replacement.append(weak)
if replacement:
state.local_exception_handlers[coro][:] = replacement
replacement = []
for weak in state.global_exception_handlers:
func = weak()
if func is None:
continue
try:
func(klass, exc, tb)
except Exception:
continue
replacement.append(weak)
state.global_exception_handlers[:] = replacement | [
"def",
"handle_exception",
"(",
"klass",
",",
"exc",
",",
"tb",
",",
"coro",
"=",
"None",
")",
":",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"replacement",
"=",
"[",
"]",
"for",
"weak",
"in",
"state",
".",
"local_exception_handlers",
".",
"get",
"(",
"coro",
",",
"(",
")",
")",
":",
"func",
"=",
"weak",
"(",
")",
"if",
"func",
"is",
"None",
":",
"continue",
"try",
":",
"func",
"(",
"klass",
",",
"exc",
",",
"tb",
")",
"except",
"Exception",
":",
"continue",
"replacement",
".",
"append",
"(",
"weak",
")",
"if",
"replacement",
":",
"state",
".",
"local_exception_handlers",
"[",
"coro",
"]",
"[",
":",
"]",
"=",
"replacement",
"replacement",
"=",
"[",
"]",
"for",
"weak",
"in",
"state",
".",
"global_exception_handlers",
":",
"func",
"=",
"weak",
"(",
")",
"if",
"func",
"is",
"None",
":",
"continue",
"try",
":",
"func",
"(",
"klass",
",",
"exc",
",",
"tb",
")",
"except",
"Exception",
":",
"continue",
"replacement",
".",
"append",
"(",
"weak",
")",
"state",
".",
"global_exception_handlers",
"[",
":",
"]",
"=",
"replacement"
] | run all the registered exception handlers
the first 3 arguments to this function match the output of
``sys.exc_info()``
:param klass: the exception klass
:type klass: type
:param exc: the exception instance
:type exc: Exception
:param tb: the traceback object
:type tb: Traceback
:param coro:
behave as though the exception occurred in this coroutine (defaults to
the current coroutine)
:type coro: greenlet
exception handlers run would be all those added with
:func:`global_exception_handler`, and any added for the relevant coroutine
with :func:`local_exception_handler`. | [
"run",
"all",
"the",
"registered",
"exception",
"handlers"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L606-L659 | train |
teepark/greenhouse | greenhouse/scheduler.py | global_exception_handler | def global_exception_handler(handler):
"""add a callback for when an exception goes uncaught in any greenlet
:param handler:
the callback function. must be a function taking 3 arguments:
- ``klass`` the exception class
- ``exc`` the exception instance
- ``tb`` the traceback object
:type handler: function
Note also that the callback is only held by a weakref, so if all other refs
to the function are lost it will stop handling greenlets' exceptions
"""
if not hasattr(handler, "__call__"):
raise TypeError("exception handlers must be callable")
log.info("setting a new global exception handler")
state.global_exception_handlers.append(weakref.ref(handler))
return handler | python | def global_exception_handler(handler):
"""add a callback for when an exception goes uncaught in any greenlet
:param handler:
the callback function. must be a function taking 3 arguments:
- ``klass`` the exception class
- ``exc`` the exception instance
- ``tb`` the traceback object
:type handler: function
Note also that the callback is only held by a weakref, so if all other refs
to the function are lost it will stop handling greenlets' exceptions
"""
if not hasattr(handler, "__call__"):
raise TypeError("exception handlers must be callable")
log.info("setting a new global exception handler")
state.global_exception_handlers.append(weakref.ref(handler))
return handler | [
"def",
"global_exception_handler",
"(",
"handler",
")",
":",
"if",
"not",
"hasattr",
"(",
"handler",
",",
"\"__call__\"",
")",
":",
"raise",
"TypeError",
"(",
"\"exception handlers must be callable\"",
")",
"log",
".",
"info",
"(",
"\"setting a new global exception handler\"",
")",
"state",
".",
"global_exception_handlers",
".",
"append",
"(",
"weakref",
".",
"ref",
"(",
"handler",
")",
")",
"return",
"handler"
] | add a callback for when an exception goes uncaught in any greenlet
:param handler:
the callback function. must be a function taking 3 arguments:
- ``klass`` the exception class
- ``exc`` the exception instance
- ``tb`` the traceback object
:type handler: function
Note also that the callback is only held by a weakref, so if all other refs
to the function are lost it will stop handling greenlets' exceptions | [
"add",
"a",
"callback",
"for",
"when",
"an",
"exception",
"goes",
"uncaught",
"in",
"any",
"greenlet"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L662-L682 | train |
teepark/greenhouse | greenhouse/scheduler.py | remove_global_exception_handler | def remove_global_exception_handler(handler):
"""remove a callback from the list of global exception handlers
:param handler:
the callback, previously added via :func:`global_exception_handler`,
to remove
:type handler: function
:returns: bool, whether the handler was found (and therefore removed)
"""
for i, cb in enumerate(state.global_exception_handlers):
cb = cb()
if cb is not None and cb is handler:
state.global_exception_handlers.pop(i)
log.info("removing a global exception handler")
return True
return False | python | def remove_global_exception_handler(handler):
"""remove a callback from the list of global exception handlers
:param handler:
the callback, previously added via :func:`global_exception_handler`,
to remove
:type handler: function
:returns: bool, whether the handler was found (and therefore removed)
"""
for i, cb in enumerate(state.global_exception_handlers):
cb = cb()
if cb is not None and cb is handler:
state.global_exception_handlers.pop(i)
log.info("removing a global exception handler")
return True
return False | [
"def",
"remove_global_exception_handler",
"(",
"handler",
")",
":",
"for",
"i",
",",
"cb",
"in",
"enumerate",
"(",
"state",
".",
"global_exception_handlers",
")",
":",
"cb",
"=",
"cb",
"(",
")",
"if",
"cb",
"is",
"not",
"None",
"and",
"cb",
"is",
"handler",
":",
"state",
".",
"global_exception_handlers",
".",
"pop",
"(",
"i",
")",
"log",
".",
"info",
"(",
"\"removing a global exception handler\"",
")",
"return",
"True",
"return",
"False"
] | remove a callback from the list of global exception handlers
:param handler:
the callback, previously added via :func:`global_exception_handler`,
to remove
:type handler: function
:returns: bool, whether the handler was found (and therefore removed) | [
"remove",
"a",
"callback",
"from",
"the",
"list",
"of",
"global",
"exception",
"handlers"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L685-L701 | train |
teepark/greenhouse | greenhouse/scheduler.py | local_exception_handler | def local_exception_handler(handler=None, coro=None):
"""add a callback for when an exception occurs in a particular greenlet
:param handler:
the callback function, must be a function taking 3 arguments:
- ``klass`` the exception class
- ``exc`` the exception instance
- ``tb`` the traceback object
:type handler: function
:param coro:
the coroutine for which to apply the exception handler (defaults to the
current coroutine)
:type coro: greenlet
"""
if handler is None:
return lambda h: local_exception_handler(h, coro)
if not hasattr(handler, "__call__"):
raise TypeError("exception handlers must be callable")
if coro is None:
coro = compat.getcurrent()
log.info("setting a new coroutine local exception handler")
state.local_exception_handlers.setdefault(coro, []).append(
weakref.ref(handler))
return handler | python | def local_exception_handler(handler=None, coro=None):
"""add a callback for when an exception occurs in a particular greenlet
:param handler:
the callback function, must be a function taking 3 arguments:
- ``klass`` the exception class
- ``exc`` the exception instance
- ``tb`` the traceback object
:type handler: function
:param coro:
the coroutine for which to apply the exception handler (defaults to the
current coroutine)
:type coro: greenlet
"""
if handler is None:
return lambda h: local_exception_handler(h, coro)
if not hasattr(handler, "__call__"):
raise TypeError("exception handlers must be callable")
if coro is None:
coro = compat.getcurrent()
log.info("setting a new coroutine local exception handler")
state.local_exception_handlers.setdefault(coro, []).append(
weakref.ref(handler))
return handler | [
"def",
"local_exception_handler",
"(",
"handler",
"=",
"None",
",",
"coro",
"=",
"None",
")",
":",
"if",
"handler",
"is",
"None",
":",
"return",
"lambda",
"h",
":",
"local_exception_handler",
"(",
"h",
",",
"coro",
")",
"if",
"not",
"hasattr",
"(",
"handler",
",",
"\"__call__\"",
")",
":",
"raise",
"TypeError",
"(",
"\"exception handlers must be callable\"",
")",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"log",
".",
"info",
"(",
"\"setting a new coroutine local exception handler\"",
")",
"state",
".",
"local_exception_handlers",
".",
"setdefault",
"(",
"coro",
",",
"[",
"]",
")",
".",
"append",
"(",
"weakref",
".",
"ref",
"(",
"handler",
")",
")",
"return",
"handler"
] | add a callback for when an exception occurs in a particular greenlet
:param handler:
the callback function, must be a function taking 3 arguments:
- ``klass`` the exception class
- ``exc`` the exception instance
- ``tb`` the traceback object
:type handler: function
:param coro:
the coroutine for which to apply the exception handler (defaults to the
current coroutine)
:type coro: greenlet | [
"add",
"a",
"callback",
"for",
"when",
"an",
"exception",
"occurs",
"in",
"a",
"particular",
"greenlet"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L704-L733 | train |
teepark/greenhouse | greenhouse/scheduler.py | remove_local_exception_handler | def remove_local_exception_handler(handler, coro=None):
"""remove a callback from the list of exception handlers for a coroutine
:param handler: the callback to remove
:type handler: function
:param coro: the coroutine for which to remove the local handler
:type coro: greenlet
:returns: bool, whether the handler was found (and therefore removed)
"""
if coro is None:
coro = compat.getcurrent()
for i, cb in enumerate(state.local_exception_handlers.get(coro, [])):
cb = cb()
if cb is not None and cb is handler:
state.local_exception_handlers[coro].pop(i)
log.info("removing a coroutine local exception handler")
return True
return False | python | def remove_local_exception_handler(handler, coro=None):
"""remove a callback from the list of exception handlers for a coroutine
:param handler: the callback to remove
:type handler: function
:param coro: the coroutine for which to remove the local handler
:type coro: greenlet
:returns: bool, whether the handler was found (and therefore removed)
"""
if coro is None:
coro = compat.getcurrent()
for i, cb in enumerate(state.local_exception_handlers.get(coro, [])):
cb = cb()
if cb is not None and cb is handler:
state.local_exception_handlers[coro].pop(i)
log.info("removing a coroutine local exception handler")
return True
return False | [
"def",
"remove_local_exception_handler",
"(",
"handler",
",",
"coro",
"=",
"None",
")",
":",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"for",
"i",
",",
"cb",
"in",
"enumerate",
"(",
"state",
".",
"local_exception_handlers",
".",
"get",
"(",
"coro",
",",
"[",
"]",
")",
")",
":",
"cb",
"=",
"cb",
"(",
")",
"if",
"cb",
"is",
"not",
"None",
"and",
"cb",
"is",
"handler",
":",
"state",
".",
"local_exception_handlers",
"[",
"coro",
"]",
".",
"pop",
"(",
"i",
")",
"log",
".",
"info",
"(",
"\"removing a coroutine local exception handler\"",
")",
"return",
"True",
"return",
"False"
] | remove a callback from the list of exception handlers for a coroutine
:param handler: the callback to remove
:type handler: function
:param coro: the coroutine for which to remove the local handler
:type coro: greenlet
:returns: bool, whether the handler was found (and therefore removed) | [
"remove",
"a",
"callback",
"from",
"the",
"list",
"of",
"exception",
"handlers",
"for",
"a",
"coroutine"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L736-L755 | train |
teepark/greenhouse | greenhouse/scheduler.py | global_hook | def global_hook(handler):
"""add a callback to run in every switch between coroutines
:param handler:
the callback function, must be a function taking 2 arguments:
- the greenlet being switched from
- the greenlet being switched to
be aware that only a weak reference to this function will be held.
:type handler: function
"""
if not hasattr(handler, "__call__"):
raise TypeError("trace hooks must be callable")
log.info("setting a new global hook callback")
state.global_hooks.append(weakref.ref(handler))
return handler | python | def global_hook(handler):
"""add a callback to run in every switch between coroutines
:param handler:
the callback function, must be a function taking 2 arguments:
- the greenlet being switched from
- the greenlet being switched to
be aware that only a weak reference to this function will be held.
:type handler: function
"""
if not hasattr(handler, "__call__"):
raise TypeError("trace hooks must be callable")
log.info("setting a new global hook callback")
state.global_hooks.append(weakref.ref(handler))
return handler | [
"def",
"global_hook",
"(",
"handler",
")",
":",
"if",
"not",
"hasattr",
"(",
"handler",
",",
"\"__call__\"",
")",
":",
"raise",
"TypeError",
"(",
"\"trace hooks must be callable\"",
")",
"log",
".",
"info",
"(",
"\"setting a new global hook callback\"",
")",
"state",
".",
"global_hooks",
".",
"append",
"(",
"weakref",
".",
"ref",
"(",
"handler",
")",
")",
"return",
"handler"
] | add a callback to run in every switch between coroutines
:param handler:
the callback function, must be a function taking 2 arguments:
- the greenlet being switched from
- the greenlet being switched to
be aware that only a weak reference to this function will be held.
:type handler: function | [
"add",
"a",
"callback",
"to",
"run",
"in",
"every",
"switch",
"between",
"coroutines"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L758-L776 | train |
teepark/greenhouse | greenhouse/scheduler.py | remove_global_hook | def remove_global_hook(handler):
"""remove a callback from the list of global hooks
:param handler:
the callback function, previously added with global_hook, to remove
from the list of global hooks
:type handler: function
:returns: bool, whether the handler was removed from the global hooks
"""
for i, cb in enumerate(state.global_hooks):
cb = cb()
if cb is not None and cb is handler:
state.global_hooks.pop(i)
log.info("removing a global hook callback")
return True
return False | python | def remove_global_hook(handler):
"""remove a callback from the list of global hooks
:param handler:
the callback function, previously added with global_hook, to remove
from the list of global hooks
:type handler: function
:returns: bool, whether the handler was removed from the global hooks
"""
for i, cb in enumerate(state.global_hooks):
cb = cb()
if cb is not None and cb is handler:
state.global_hooks.pop(i)
log.info("removing a global hook callback")
return True
return False | [
"def",
"remove_global_hook",
"(",
"handler",
")",
":",
"for",
"i",
",",
"cb",
"in",
"enumerate",
"(",
"state",
".",
"global_hooks",
")",
":",
"cb",
"=",
"cb",
"(",
")",
"if",
"cb",
"is",
"not",
"None",
"and",
"cb",
"is",
"handler",
":",
"state",
".",
"global_hooks",
".",
"pop",
"(",
"i",
")",
"log",
".",
"info",
"(",
"\"removing a global hook callback\"",
")",
"return",
"True",
"return",
"False"
] | remove a callback from the list of global hooks
:param handler:
the callback function, previously added with global_hook, to remove
from the list of global hooks
:type handler: function
:returns: bool, whether the handler was removed from the global hooks | [
"remove",
"a",
"callback",
"from",
"the",
"list",
"of",
"global",
"hooks"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L779-L795 | train |
teepark/greenhouse | greenhouse/scheduler.py | local_incoming_hook | def local_incoming_hook(handler=None, coro=None):
"""add a callback to run every time a greenlet is about to be switched to
:param handler:
the callback function, must be a function taking 2 arguments:
- an integer indicating whether it is being called as an incoming (1)
hook or an outgoing (2) hook (in this case it will always receive 1).
- the coroutine being switched into (in this case it will always be the
same as the one indicated by the ``coro`` argument to
``local_incoming_hook``.
Be aware that only a weak reference to this function will be held.
:type handler: function
:param coro:
the coroutine for which to apply the trace hook (defaults to current)
:type coro: greenlet
"""
if handler is None:
return lambda h: local_incoming_hook(h, coro)
if not hasattr(handler, "__call__"):
raise TypeError("trace hooks must be callable")
if coro is None:
coro = compat.getcurrent()
log.info("setting a coroutine incoming local hook callback")
state.local_to_hooks.setdefault(coro, []).append(
weakref.ref(handler))
return handler | python | def local_incoming_hook(handler=None, coro=None):
"""add a callback to run every time a greenlet is about to be switched to
:param handler:
the callback function, must be a function taking 2 arguments:
- an integer indicating whether it is being called as an incoming (1)
hook or an outgoing (2) hook (in this case it will always receive 1).
- the coroutine being switched into (in this case it will always be the
same as the one indicated by the ``coro`` argument to
``local_incoming_hook``.
Be aware that only a weak reference to this function will be held.
:type handler: function
:param coro:
the coroutine for which to apply the trace hook (defaults to current)
:type coro: greenlet
"""
if handler is None:
return lambda h: local_incoming_hook(h, coro)
if not hasattr(handler, "__call__"):
raise TypeError("trace hooks must be callable")
if coro is None:
coro = compat.getcurrent()
log.info("setting a coroutine incoming local hook callback")
state.local_to_hooks.setdefault(coro, []).append(
weakref.ref(handler))
return handler | [
"def",
"local_incoming_hook",
"(",
"handler",
"=",
"None",
",",
"coro",
"=",
"None",
")",
":",
"if",
"handler",
"is",
"None",
":",
"return",
"lambda",
"h",
":",
"local_incoming_hook",
"(",
"h",
",",
"coro",
")",
"if",
"not",
"hasattr",
"(",
"handler",
",",
"\"__call__\"",
")",
":",
"raise",
"TypeError",
"(",
"\"trace hooks must be callable\"",
")",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"log",
".",
"info",
"(",
"\"setting a coroutine incoming local hook callback\"",
")",
"state",
".",
"local_to_hooks",
".",
"setdefault",
"(",
"coro",
",",
"[",
"]",
")",
".",
"append",
"(",
"weakref",
".",
"ref",
"(",
"handler",
")",
")",
"return",
"handler"
] | add a callback to run every time a greenlet is about to be switched to
:param handler:
the callback function, must be a function taking 2 arguments:
- an integer indicating whether it is being called as an incoming (1)
hook or an outgoing (2) hook (in this case it will always receive 1).
- the coroutine being switched into (in this case it will always be the
same as the one indicated by the ``coro`` argument to
``local_incoming_hook``.
Be aware that only a weak reference to this function will be held.
:type handler: function
:param coro:
the coroutine for which to apply the trace hook (defaults to current)
:type coro: greenlet | [
"add",
"a",
"callback",
"to",
"run",
"every",
"time",
"a",
"greenlet",
"is",
"about",
"to",
"be",
"switched",
"to"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L798-L830 | train |
teepark/greenhouse | greenhouse/scheduler.py | remove_local_incoming_hook | def remove_local_incoming_hook(handler, coro=None):
"""remove a callback from the incoming hooks for a particular coro
:param handler: the callback previously added via local_incoming_hook
:type handler: function
:param coro: the coroutine for which the hook should be removed
:type coro: greenlet
:returns: bool, whether the handler was found and removed
"""
if coro is None:
coro = compat.getcurrent()
for i, cb in enumerate(state.local_to_hooks.get(coro, [])):
cb = cb()
if cb is not None and cb is handler:
log.info("removing a coroutine incoming local hook callback")
state.local_to_hooks[coro].pop(i)
return True
return False | python | def remove_local_incoming_hook(handler, coro=None):
"""remove a callback from the incoming hooks for a particular coro
:param handler: the callback previously added via local_incoming_hook
:type handler: function
:param coro: the coroutine for which the hook should be removed
:type coro: greenlet
:returns: bool, whether the handler was found and removed
"""
if coro is None:
coro = compat.getcurrent()
for i, cb in enumerate(state.local_to_hooks.get(coro, [])):
cb = cb()
if cb is not None and cb is handler:
log.info("removing a coroutine incoming local hook callback")
state.local_to_hooks[coro].pop(i)
return True
return False | [
"def",
"remove_local_incoming_hook",
"(",
"handler",
",",
"coro",
"=",
"None",
")",
":",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"for",
"i",
",",
"cb",
"in",
"enumerate",
"(",
"state",
".",
"local_to_hooks",
".",
"get",
"(",
"coro",
",",
"[",
"]",
")",
")",
":",
"cb",
"=",
"cb",
"(",
")",
"if",
"cb",
"is",
"not",
"None",
"and",
"cb",
"is",
"handler",
":",
"log",
".",
"info",
"(",
"\"removing a coroutine incoming local hook callback\"",
")",
"state",
".",
"local_to_hooks",
"[",
"coro",
"]",
".",
"pop",
"(",
"i",
")",
"return",
"True",
"return",
"False"
] | remove a callback from the incoming hooks for a particular coro
:param handler: the callback previously added via local_incoming_hook
:type handler: function
:param coro: the coroutine for which the hook should be removed
:type coro: greenlet
:returns: bool, whether the handler was found and removed | [
"remove",
"a",
"callback",
"from",
"the",
"incoming",
"hooks",
"for",
"a",
"particular",
"coro"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L833-L852 | train |
teepark/greenhouse | greenhouse/scheduler.py | local_outgoing_hook | def local_outgoing_hook(handler=None, coro=None):
"""add a callback to run every time a greenlet is switched away from
:param handler:
the callback function, must be a function taking 2 arguments:
- an integer indicating whether it is being called as an incoming (1)
hook or as an outgoing (2) hook (in this case it will always be 2).
- the coroutine being switched from (in this case it is the one
indicated by the ``coro`` argument to ``local_outgoing_hook``.
Be aware that only a weak reference to this function will be held.
:type handler: function
:param coro:
the coroutine for which to apply the trace hook (defaults to current)
:type coro: greenlet
"""
if handler is None:
return lambda h: local_outgoing_hook(h, coro)
if not hasattr(handler, "__call__"):
raise TypeError("trace hooks must be callable")
if coro is None:
coro = compat.getcurrent()
log.info("setting a coroutine local outgoing hook callback")
state.local_from_hooks.setdefault(coro, []).append(
weakref.ref(handler))
return handler | python | def local_outgoing_hook(handler=None, coro=None):
"""add a callback to run every time a greenlet is switched away from
:param handler:
the callback function, must be a function taking 2 arguments:
- an integer indicating whether it is being called as an incoming (1)
hook or as an outgoing (2) hook (in this case it will always be 2).
- the coroutine being switched from (in this case it is the one
indicated by the ``coro`` argument to ``local_outgoing_hook``.
Be aware that only a weak reference to this function will be held.
:type handler: function
:param coro:
the coroutine for which to apply the trace hook (defaults to current)
:type coro: greenlet
"""
if handler is None:
return lambda h: local_outgoing_hook(h, coro)
if not hasattr(handler, "__call__"):
raise TypeError("trace hooks must be callable")
if coro is None:
coro = compat.getcurrent()
log.info("setting a coroutine local outgoing hook callback")
state.local_from_hooks.setdefault(coro, []).append(
weakref.ref(handler))
return handler | [
"def",
"local_outgoing_hook",
"(",
"handler",
"=",
"None",
",",
"coro",
"=",
"None",
")",
":",
"if",
"handler",
"is",
"None",
":",
"return",
"lambda",
"h",
":",
"local_outgoing_hook",
"(",
"h",
",",
"coro",
")",
"if",
"not",
"hasattr",
"(",
"handler",
",",
"\"__call__\"",
")",
":",
"raise",
"TypeError",
"(",
"\"trace hooks must be callable\"",
")",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"log",
".",
"info",
"(",
"\"setting a coroutine local outgoing hook callback\"",
")",
"state",
".",
"local_from_hooks",
".",
"setdefault",
"(",
"coro",
",",
"[",
"]",
")",
".",
"append",
"(",
"weakref",
".",
"ref",
"(",
"handler",
")",
")",
"return",
"handler"
] | add a callback to run every time a greenlet is switched away from
:param handler:
the callback function, must be a function taking 2 arguments:
- an integer indicating whether it is being called as an incoming (1)
hook or as an outgoing (2) hook (in this case it will always be 2).
- the coroutine being switched from (in this case it is the one
indicated by the ``coro`` argument to ``local_outgoing_hook``.
Be aware that only a weak reference to this function will be held.
:type handler: function
:param coro:
the coroutine for which to apply the trace hook (defaults to current)
:type coro: greenlet | [
"add",
"a",
"callback",
"to",
"run",
"every",
"time",
"a",
"greenlet",
"is",
"switched",
"away",
"from"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L855-L886 | train |
teepark/greenhouse | greenhouse/scheduler.py | remove_local_outgoing_hook | def remove_local_outgoing_hook(handler, coro=None):
"""remove a callback from the outgoing hooks for a particular coro
:param handler: the callback previously added via local_outgoing_hook
:type handler: function
:param coro: the coroutine for which the hook should be removed
:type coro: greenlet
:returns: bool, whether the handler was found and removed
"""
if coro is None:
coro = compat.getcurrent()
for i, cb in enumerate(state.local_from_hooks.get(coro, [])):
cb = cb()
if cb is not None and cb is handler:
log.info("removing a coroutine outgoing local hook callback")
state.local_from_hooks[coro].pop(i)
return True
return False | python | def remove_local_outgoing_hook(handler, coro=None):
"""remove a callback from the outgoing hooks for a particular coro
:param handler: the callback previously added via local_outgoing_hook
:type handler: function
:param coro: the coroutine for which the hook should be removed
:type coro: greenlet
:returns: bool, whether the handler was found and removed
"""
if coro is None:
coro = compat.getcurrent()
for i, cb in enumerate(state.local_from_hooks.get(coro, [])):
cb = cb()
if cb is not None and cb is handler:
log.info("removing a coroutine outgoing local hook callback")
state.local_from_hooks[coro].pop(i)
return True
return False | [
"def",
"remove_local_outgoing_hook",
"(",
"handler",
",",
"coro",
"=",
"None",
")",
":",
"if",
"coro",
"is",
"None",
":",
"coro",
"=",
"compat",
".",
"getcurrent",
"(",
")",
"for",
"i",
",",
"cb",
"in",
"enumerate",
"(",
"state",
".",
"local_from_hooks",
".",
"get",
"(",
"coro",
",",
"[",
"]",
")",
")",
":",
"cb",
"=",
"cb",
"(",
")",
"if",
"cb",
"is",
"not",
"None",
"and",
"cb",
"is",
"handler",
":",
"log",
".",
"info",
"(",
"\"removing a coroutine outgoing local hook callback\"",
")",
"state",
".",
"local_from_hooks",
"[",
"coro",
"]",
".",
"pop",
"(",
"i",
")",
"return",
"True",
"return",
"False"
] | remove a callback from the outgoing hooks for a particular coro
:param handler: the callback previously added via local_outgoing_hook
:type handler: function
:param coro: the coroutine for which the hook should be removed
:type coro: greenlet
:returns: bool, whether the handler was found and removed | [
"remove",
"a",
"callback",
"from",
"the",
"outgoing",
"hooks",
"for",
"a",
"particular",
"coro"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L889-L908 | train |
teepark/greenhouse | greenhouse/scheduler.py | set_ignore_interrupts | def set_ignore_interrupts(flag=True):
"""turn off EINTR-raising from emulated syscalls on interruption by signals
due to the nature of greenhouse's system call emulation,
``signal.siginterrupt`` can't be made to work with it. specifically,
greenhouse can't differentiate between different signals. so this function
toggles whether to restart for *ALL* or *NO* signals.
:param flag:
whether to turn EINTR exceptions off (``True``) or on (``False``)
:type flag: bool
"""
log.info("setting ignore_interrupts to %r" % flag)
state.ignore_interrupts = bool(flag) | python | def set_ignore_interrupts(flag=True):
"""turn off EINTR-raising from emulated syscalls on interruption by signals
due to the nature of greenhouse's system call emulation,
``signal.siginterrupt`` can't be made to work with it. specifically,
greenhouse can't differentiate between different signals. so this function
toggles whether to restart for *ALL* or *NO* signals.
:param flag:
whether to turn EINTR exceptions off (``True``) or on (``False``)
:type flag: bool
"""
log.info("setting ignore_interrupts to %r" % flag)
state.ignore_interrupts = bool(flag) | [
"def",
"set_ignore_interrupts",
"(",
"flag",
"=",
"True",
")",
":",
"log",
".",
"info",
"(",
"\"setting ignore_interrupts to %r\"",
"%",
"flag",
")",
"state",
".",
"ignore_interrupts",
"=",
"bool",
"(",
"flag",
")"
] | turn off EINTR-raising from emulated syscalls on interruption by signals
due to the nature of greenhouse's system call emulation,
``signal.siginterrupt`` can't be made to work with it. specifically,
greenhouse can't differentiate between different signals. so this function
toggles whether to restart for *ALL* or *NO* signals.
:param flag:
whether to turn EINTR exceptions off (``True``) or on (``False``)
:type flag: bool | [
"turn",
"off",
"EINTR",
"-",
"raising",
"from",
"emulated",
"syscalls",
"on",
"interruption",
"by",
"signals"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L911-L924 | train |
teepark/greenhouse | greenhouse/scheduler.py | reset_poller | def reset_poller(poll=None):
"""replace the scheduler's poller, throwing away any pre-existing state
this is only really a good idea in the new child process after a fork(2).
"""
state.poller = poll or poller.best()
log.info("resetting fd poller, using %s" % type(state.poller).__name__) | python | def reset_poller(poll=None):
"""replace the scheduler's poller, throwing away any pre-existing state
this is only really a good idea in the new child process after a fork(2).
"""
state.poller = poll or poller.best()
log.info("resetting fd poller, using %s" % type(state.poller).__name__) | [
"def",
"reset_poller",
"(",
"poll",
"=",
"None",
")",
":",
"state",
".",
"poller",
"=",
"poll",
"or",
"poller",
".",
"best",
"(",
")",
"log",
".",
"info",
"(",
"\"resetting fd poller, using %s\"",
"%",
"type",
"(",
"state",
".",
"poller",
")",
".",
"__name__",
")"
] | replace the scheduler's poller, throwing away any pre-existing state
this is only really a good idea in the new child process after a fork(2). | [
"replace",
"the",
"scheduler",
"s",
"poller",
"throwing",
"away",
"any",
"pre",
"-",
"existing",
"state"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L927-L933 | train |
nickpandolfi/Cyther | cyther/tools.py | find_resource | def find_resource(r, *, pkg='cyther'):
"""
Finds a given cyther resource in the 'test' subdirectory in
'cyther' package
"""
file_path = pkg_resources.resource_filename(pkg, os.path.join('test', r))
if not os.path.isfile(file_path):
msg = "Resource '{}' does not exist"
raise FileNotFoundError(msg.format(file_path))
return file_path | python | def find_resource(r, *, pkg='cyther'):
"""
Finds a given cyther resource in the 'test' subdirectory in
'cyther' package
"""
file_path = pkg_resources.resource_filename(pkg, os.path.join('test', r))
if not os.path.isfile(file_path):
msg = "Resource '{}' does not exist"
raise FileNotFoundError(msg.format(file_path))
return file_path | [
"def",
"find_resource",
"(",
"r",
",",
"*",
",",
"pkg",
"=",
"'cyther'",
")",
":",
"file_path",
"=",
"pkg_resources",
".",
"resource_filename",
"(",
"pkg",
",",
"os",
".",
"path",
".",
"join",
"(",
"'test'",
",",
"r",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"file_path",
")",
":",
"msg",
"=",
"\"Resource '{}' does not exist\"",
"raise",
"FileNotFoundError",
"(",
"msg",
".",
"format",
"(",
"file_path",
")",
")",
"return",
"file_path"
] | Finds a given cyther resource in the 'test' subdirectory in
'cyther' package | [
"Finds",
"a",
"given",
"cyther",
"resource",
"in",
"the",
"test",
"subdirectory",
"in",
"cyther",
"package"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L17-L26 | train |
nickpandolfi/Cyther | cyther/tools.py | assert_output | def assert_output(output, assert_equal):
"""
Check that two outputs have the same contents as one another, even if they
aren't sorted yet
"""
sorted_output = sorted(output)
sorted_assert = sorted(assert_equal)
if sorted_output != sorted_assert:
raise ValueError(ASSERT_ERROR.format(sorted_output, sorted_assert)) | python | def assert_output(output, assert_equal):
"""
Check that two outputs have the same contents as one another, even if they
aren't sorted yet
"""
sorted_output = sorted(output)
sorted_assert = sorted(assert_equal)
if sorted_output != sorted_assert:
raise ValueError(ASSERT_ERROR.format(sorted_output, sorted_assert)) | [
"def",
"assert_output",
"(",
"output",
",",
"assert_equal",
")",
":",
"sorted_output",
"=",
"sorted",
"(",
"output",
")",
"sorted_assert",
"=",
"sorted",
"(",
"assert_equal",
")",
"if",
"sorted_output",
"!=",
"sorted_assert",
":",
"raise",
"ValueError",
"(",
"ASSERT_ERROR",
".",
"format",
"(",
"sorted_output",
",",
"sorted_assert",
")",
")"
] | Check that two outputs have the same contents as one another, even if they
aren't sorted yet | [
"Check",
"that",
"two",
"outputs",
"have",
"the",
"same",
"contents",
"as",
"one",
"another",
"even",
"if",
"they",
"aren",
"t",
"sorted",
"yet"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L78-L86 | train |
nickpandolfi/Cyther | cyther/tools.py | write_dict_to_file | def write_dict_to_file(file_path, obj):
"""
Write a dictionary of string keys to a file
"""
lines = []
for key, value in obj.items():
lines.append(key + ':' + repr(value) + '\n')
with open(file_path, 'w+') as file:
file.writelines(lines)
return None | python | def write_dict_to_file(file_path, obj):
"""
Write a dictionary of string keys to a file
"""
lines = []
for key, value in obj.items():
lines.append(key + ':' + repr(value) + '\n')
with open(file_path, 'w+') as file:
file.writelines(lines)
return None | [
"def",
"write_dict_to_file",
"(",
"file_path",
",",
"obj",
")",
":",
"lines",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"obj",
".",
"items",
"(",
")",
":",
"lines",
".",
"append",
"(",
"key",
"+",
"':'",
"+",
"repr",
"(",
"value",
")",
"+",
"'\\n'",
")",
"with",
"open",
"(",
"file_path",
",",
"'w+'",
")",
"as",
"file",
":",
"file",
".",
"writelines",
"(",
"lines",
")",
"return",
"None"
] | Write a dictionary of string keys to a file | [
"Write",
"a",
"dictionary",
"of",
"string",
"keys",
"to",
"a",
"file"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L89-L100 | train |
nickpandolfi/Cyther | cyther/tools.py | read_dict_from_file | def read_dict_from_file(file_path):
"""
Read a dictionary of strings from a file
"""
with open(file_path) as file:
lines = file.read().splitlines()
obj = {}
for line in lines:
key, value = line.split(':', maxsplit=1)
obj[key] = eval(value)
return obj | python | def read_dict_from_file(file_path):
"""
Read a dictionary of strings from a file
"""
with open(file_path) as file:
lines = file.read().splitlines()
obj = {}
for line in lines:
key, value = line.split(':', maxsplit=1)
obj[key] = eval(value)
return obj | [
"def",
"read_dict_from_file",
"(",
"file_path",
")",
":",
"with",
"open",
"(",
"file_path",
")",
"as",
"file",
":",
"lines",
"=",
"file",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"obj",
"=",
"{",
"}",
"for",
"line",
"in",
"lines",
":",
"key",
",",
"value",
"=",
"line",
".",
"split",
"(",
"':'",
",",
"maxsplit",
"=",
"1",
")",
"obj",
"[",
"key",
"]",
"=",
"eval",
"(",
"value",
")",
"return",
"obj"
] | Read a dictionary of strings from a file | [
"Read",
"a",
"dictionary",
"of",
"strings",
"from",
"a",
"file"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L103-L115 | train |
nickpandolfi/Cyther | cyther/tools.py | get_input | def get_input(prompt, check, *, redo_prompt=None, repeat_prompt=False):
"""
Ask the user to input something on the terminal level, check their response
and ask again if they didn't answer correctly
"""
if isinstance(check, str):
check = (check,)
to_join = []
for item in check:
if item:
to_join.append(str(item))
else:
to_join.append("''")
prompt += " [{}]: ".format('/'.join(to_join))
if repeat_prompt:
redo_prompt = prompt
elif not redo_prompt:
redo_prompt = "Incorrect input, please choose from {}: " \
"".format(str(check))
if callable(check):
def _checker(r): return check(r)
elif isinstance(check, tuple):
def _checker(r): return r in check
else:
raise ValueError(RESPONSES_ERROR.format(type(check)))
response = input(prompt)
while not _checker(response):
print(response, type(response))
response = input(redo_prompt if redo_prompt else prompt)
return response | python | def get_input(prompt, check, *, redo_prompt=None, repeat_prompt=False):
"""
Ask the user to input something on the terminal level, check their response
and ask again if they didn't answer correctly
"""
if isinstance(check, str):
check = (check,)
to_join = []
for item in check:
if item:
to_join.append(str(item))
else:
to_join.append("''")
prompt += " [{}]: ".format('/'.join(to_join))
if repeat_prompt:
redo_prompt = prompt
elif not redo_prompt:
redo_prompt = "Incorrect input, please choose from {}: " \
"".format(str(check))
if callable(check):
def _checker(r): return check(r)
elif isinstance(check, tuple):
def _checker(r): return r in check
else:
raise ValueError(RESPONSES_ERROR.format(type(check)))
response = input(prompt)
while not _checker(response):
print(response, type(response))
response = input(redo_prompt if redo_prompt else prompt)
return response | [
"def",
"get_input",
"(",
"prompt",
",",
"check",
",",
"*",
",",
"redo_prompt",
"=",
"None",
",",
"repeat_prompt",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"check",
",",
"str",
")",
":",
"check",
"=",
"(",
"check",
",",
")",
"to_join",
"=",
"[",
"]",
"for",
"item",
"in",
"check",
":",
"if",
"item",
":",
"to_join",
".",
"append",
"(",
"str",
"(",
"item",
")",
")",
"else",
":",
"to_join",
".",
"append",
"(",
"\"''\"",
")",
"prompt",
"+=",
"\" [{}]: \"",
".",
"format",
"(",
"'/'",
".",
"join",
"(",
"to_join",
")",
")",
"if",
"repeat_prompt",
":",
"redo_prompt",
"=",
"prompt",
"elif",
"not",
"redo_prompt",
":",
"redo_prompt",
"=",
"\"Incorrect input, please choose from {}: \"",
"\"\"",
".",
"format",
"(",
"str",
"(",
"check",
")",
")",
"if",
"callable",
"(",
"check",
")",
":",
"def",
"_checker",
"(",
"r",
")",
":",
"return",
"check",
"(",
"r",
")",
"elif",
"isinstance",
"(",
"check",
",",
"tuple",
")",
":",
"def",
"_checker",
"(",
"r",
")",
":",
"return",
"r",
"in",
"check",
"else",
":",
"raise",
"ValueError",
"(",
"RESPONSES_ERROR",
".",
"format",
"(",
"type",
"(",
"check",
")",
")",
")",
"response",
"=",
"input",
"(",
"prompt",
")",
"while",
"not",
"_checker",
"(",
"response",
")",
":",
"print",
"(",
"response",
",",
"type",
"(",
"response",
")",
")",
"response",
"=",
"input",
"(",
"redo_prompt",
"if",
"redo_prompt",
"else",
"prompt",
")",
"return",
"response"
] | Ask the user to input something on the terminal level, check their response
and ask again if they didn't answer correctly | [
"Ask",
"the",
"user",
"to",
"input",
"something",
"on",
"the",
"terminal",
"level",
"check",
"their",
"response",
"and",
"ask",
"again",
"if",
"they",
"didn",
"t",
"answer",
"correctly"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L121-L155 | train |
nickpandolfi/Cyther | cyther/tools.py | get_choice | def get_choice(prompt, choices):
"""
Asks for a single choice out of multiple items.
Given those items, and a prompt to ask the user with
"""
print()
checker = []
for offset, choice in enumerate(choices):
number = offset + 1
print("\t{}): '{}'\n".format(number, choice))
checker.append(str(number))
response = get_input(prompt, tuple(checker) + ('',))
if not response:
print("Exiting...")
exit()
offset = int(response) - 1
selected = choices[offset]
return selected | python | def get_choice(prompt, choices):
"""
Asks for a single choice out of multiple items.
Given those items, and a prompt to ask the user with
"""
print()
checker = []
for offset, choice in enumerate(choices):
number = offset + 1
print("\t{}): '{}'\n".format(number, choice))
checker.append(str(number))
response = get_input(prompt, tuple(checker) + ('',))
if not response:
print("Exiting...")
exit()
offset = int(response) - 1
selected = choices[offset]
return selected | [
"def",
"get_choice",
"(",
"prompt",
",",
"choices",
")",
":",
"print",
"(",
")",
"checker",
"=",
"[",
"]",
"for",
"offset",
",",
"choice",
"in",
"enumerate",
"(",
"choices",
")",
":",
"number",
"=",
"offset",
"+",
"1",
"print",
"(",
"\"\\t{}): '{}'\\n\"",
".",
"format",
"(",
"number",
",",
"choice",
")",
")",
"checker",
".",
"append",
"(",
"str",
"(",
"number",
")",
")",
"response",
"=",
"get_input",
"(",
"prompt",
",",
"tuple",
"(",
"checker",
")",
"+",
"(",
"''",
",",
")",
")",
"if",
"not",
"response",
":",
"print",
"(",
"\"Exiting...\"",
")",
"exit",
"(",
")",
"offset",
"=",
"int",
"(",
"response",
")",
"-",
"1",
"selected",
"=",
"choices",
"[",
"offset",
"]",
"return",
"selected"
] | Asks for a single choice out of multiple items.
Given those items, and a prompt to ask the user with | [
"Asks",
"for",
"a",
"single",
"choice",
"out",
"of",
"multiple",
"items",
".",
"Given",
"those",
"items",
"and",
"a",
"prompt",
"to",
"ask",
"the",
"user",
"with"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L158-L178 | train |
nickpandolfi/Cyther | cyther/tools.py | generateBatches | def generateBatches(tasks, givens):
"""
A function to generate a batch of commands to run in a specific order as to
meet all the dependencies for each command. For example, the commands with
no dependencies are run first, and the commands with the most deep
dependencies are run last
"""
_removeGivensFromTasks(tasks, givens)
batches = []
while tasks:
batch = set()
for task, dependencies in tasks.items():
if not dependencies:
batch.add(task)
if not batch:
_batchErrorProcessing(tasks)
for task in batch:
del tasks[task]
for task, dependencies in tasks.items():
for item in batch:
if item in dependencies:
tasks[task].remove(item)
batches.append(batch)
return batches | python | def generateBatches(tasks, givens):
"""
A function to generate a batch of commands to run in a specific order as to
meet all the dependencies for each command. For example, the commands with
no dependencies are run first, and the commands with the most deep
dependencies are run last
"""
_removeGivensFromTasks(tasks, givens)
batches = []
while tasks:
batch = set()
for task, dependencies in tasks.items():
if not dependencies:
batch.add(task)
if not batch:
_batchErrorProcessing(tasks)
for task in batch:
del tasks[task]
for task, dependencies in tasks.items():
for item in batch:
if item in dependencies:
tasks[task].remove(item)
batches.append(batch)
return batches | [
"def",
"generateBatches",
"(",
"tasks",
",",
"givens",
")",
":",
"_removeGivensFromTasks",
"(",
"tasks",
",",
"givens",
")",
"batches",
"=",
"[",
"]",
"while",
"tasks",
":",
"batch",
"=",
"set",
"(",
")",
"for",
"task",
",",
"dependencies",
"in",
"tasks",
".",
"items",
"(",
")",
":",
"if",
"not",
"dependencies",
":",
"batch",
".",
"add",
"(",
"task",
")",
"if",
"not",
"batch",
":",
"_batchErrorProcessing",
"(",
"tasks",
")",
"for",
"task",
"in",
"batch",
":",
"del",
"tasks",
"[",
"task",
"]",
"for",
"task",
",",
"dependencies",
"in",
"tasks",
".",
"items",
"(",
")",
":",
"for",
"item",
"in",
"batch",
":",
"if",
"item",
"in",
"dependencies",
":",
"tasks",
"[",
"task",
"]",
".",
"remove",
"(",
"item",
")",
"batches",
".",
"append",
"(",
"batch",
")",
"return",
"batches"
] | A function to generate a batch of commands to run in a specific order as to
meet all the dependencies for each command. For example, the commands with
no dependencies are run first, and the commands with the most deep
dependencies are run last | [
"A",
"function",
"to",
"generate",
"a",
"batch",
"of",
"commands",
"to",
"run",
"in",
"a",
"specific",
"order",
"as",
"to",
"meet",
"all",
"the",
"dependencies",
"for",
"each",
"command",
".",
"For",
"example",
"the",
"commands",
"with",
"no",
"dependencies",
"are",
"run",
"first",
"and",
"the",
"commands",
"with",
"the",
"most",
"deep",
"dependencies",
"are",
"run",
"last"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L191-L219 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Groups.Get | def Get(self,key):
"""Get group by providing name, ID, description or other unique key.
If key is not unique and finds multiple matches only the first
will be returned
>>> clc.v2.Datacenter().Groups().Get("Default Group")
<clc.APIv2.group.Group object at 0x1065e5250>
"""
for group in self.groups:
if group.id.lower() == key.lower(): return(group)
elif group.name.lower() == key.lower(): return(group)
elif group.description.lower() == key.lower(): return(group)
raise(clc.CLCException("Group not found")) | python | def Get(self,key):
"""Get group by providing name, ID, description or other unique key.
If key is not unique and finds multiple matches only the first
will be returned
>>> clc.v2.Datacenter().Groups().Get("Default Group")
<clc.APIv2.group.Group object at 0x1065e5250>
"""
for group in self.groups:
if group.id.lower() == key.lower(): return(group)
elif group.name.lower() == key.lower(): return(group)
elif group.description.lower() == key.lower(): return(group)
raise(clc.CLCException("Group not found")) | [
"def",
"Get",
"(",
"self",
",",
"key",
")",
":",
"for",
"group",
"in",
"self",
".",
"groups",
":",
"if",
"group",
".",
"id",
".",
"lower",
"(",
")",
"==",
"key",
".",
"lower",
"(",
")",
":",
"return",
"(",
"group",
")",
"elif",
"group",
".",
"name",
".",
"lower",
"(",
")",
"==",
"key",
".",
"lower",
"(",
")",
":",
"return",
"(",
"group",
")",
"elif",
"group",
".",
"description",
".",
"lower",
"(",
")",
"==",
"key",
".",
"lower",
"(",
")",
":",
"return",
"(",
"group",
")",
"raise",
"(",
"clc",
".",
"CLCException",
"(",
"\"Group not found\"",
")",
")"
] | Get group by providing name, ID, description or other unique key.
If key is not unique and finds multiple matches only the first
will be returned
>>> clc.v2.Datacenter().Groups().Get("Default Group")
<clc.APIv2.group.Group object at 0x1065e5250> | [
"Get",
"group",
"by",
"providing",
"name",
"ID",
"description",
"or",
"other",
"unique",
"key",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L65-L81 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Groups.Search | def Search(self,key):
"""Search group list by providing partial name, ID, description or other key.
>>> clc.v2.Datacenter().Groups().Search("Default Group")
[<clc.APIv2.group.Group object at 0x1065b0f50>, <clc.APIv2.group.Group object at 0x1065b0d10>]
"""
results = []
for group in self.groups:
if group.id.lower().find(key.lower()) != -1: results.append(group)
elif group.name.lower().find(key.lower()) != -1: results.append(group)
elif group.description.lower().find(key.lower()) != -1: results.append(group)
return(results) | python | def Search(self,key):
"""Search group list by providing partial name, ID, description or other key.
>>> clc.v2.Datacenter().Groups().Search("Default Group")
[<clc.APIv2.group.Group object at 0x1065b0f50>, <clc.APIv2.group.Group object at 0x1065b0d10>]
"""
results = []
for group in self.groups:
if group.id.lower().find(key.lower()) != -1: results.append(group)
elif group.name.lower().find(key.lower()) != -1: results.append(group)
elif group.description.lower().find(key.lower()) != -1: results.append(group)
return(results) | [
"def",
"Search",
"(",
"self",
",",
"key",
")",
":",
"results",
"=",
"[",
"]",
"for",
"group",
"in",
"self",
".",
"groups",
":",
"if",
"group",
".",
"id",
".",
"lower",
"(",
")",
".",
"find",
"(",
"key",
".",
"lower",
"(",
")",
")",
"!=",
"-",
"1",
":",
"results",
".",
"append",
"(",
"group",
")",
"elif",
"group",
".",
"name",
".",
"lower",
"(",
")",
".",
"find",
"(",
"key",
".",
"lower",
"(",
")",
")",
"!=",
"-",
"1",
":",
"results",
".",
"append",
"(",
"group",
")",
"elif",
"group",
".",
"description",
".",
"lower",
"(",
")",
".",
"find",
"(",
"key",
".",
"lower",
"(",
")",
")",
"!=",
"-",
"1",
":",
"results",
".",
"append",
"(",
"group",
")",
"return",
"(",
"results",
")"
] | Search group list by providing partial name, ID, description or other key.
>>> clc.v2.Datacenter().Groups().Search("Default Group")
[<clc.APIv2.group.Group object at 0x1065b0f50>, <clc.APIv2.group.Group object at 0x1065b0d10>] | [
"Search",
"group",
"list",
"by",
"providing",
"partial",
"name",
"ID",
"description",
"or",
"other",
"key",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L84-L98 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Group.GetAll | def GetAll(root_group_id,alias=None,session=None):
"""Gets a list of groups within a given account.
>>> clc.v2.Group.GetAll("wa1-4416")
[<clc.APIv2.group.Group object at 0x1065b0190>, <clc.APIv2.group.Group object at 0x1065b0dd0>]
"""
if not alias: alias = clc.v2.Account.GetAlias(session=session)
groups = []
for r in clc.v2.API.Call('GET','groups/%s/%s' % (alias,root_group_id),{}, session=session)['groups']:
groups.append(Group(id=r['id'],alias=alias,group_obj=r,session=session))
return(groups) | python | def GetAll(root_group_id,alias=None,session=None):
"""Gets a list of groups within a given account.
>>> clc.v2.Group.GetAll("wa1-4416")
[<clc.APIv2.group.Group object at 0x1065b0190>, <clc.APIv2.group.Group object at 0x1065b0dd0>]
"""
if not alias: alias = clc.v2.Account.GetAlias(session=session)
groups = []
for r in clc.v2.API.Call('GET','groups/%s/%s' % (alias,root_group_id),{}, session=session)['groups']:
groups.append(Group(id=r['id'],alias=alias,group_obj=r,session=session))
return(groups) | [
"def",
"GetAll",
"(",
"root_group_id",
",",
"alias",
"=",
"None",
",",
"session",
"=",
"None",
")",
":",
"if",
"not",
"alias",
":",
"alias",
"=",
"clc",
".",
"v2",
".",
"Account",
".",
"GetAlias",
"(",
"session",
"=",
"session",
")",
"groups",
"=",
"[",
"]",
"for",
"r",
"in",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'GET'",
",",
"'groups/%s/%s'",
"%",
"(",
"alias",
",",
"root_group_id",
")",
",",
"{",
"}",
",",
"session",
"=",
"session",
")",
"[",
"'groups'",
"]",
":",
"groups",
".",
"append",
"(",
"Group",
"(",
"id",
"=",
"r",
"[",
"'id'",
"]",
",",
"alias",
"=",
"alias",
",",
"group_obj",
"=",
"r",
",",
"session",
"=",
"session",
")",
")",
"return",
"(",
"groups",
")"
] | Gets a list of groups within a given account.
>>> clc.v2.Group.GetAll("wa1-4416")
[<clc.APIv2.group.Group object at 0x1065b0190>, <clc.APIv2.group.Group object at 0x1065b0dd0>] | [
"Gets",
"a",
"list",
"of",
"groups",
"within",
"a",
"given",
"account",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L105-L118 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Group.Refresh | def Refresh(self):
"""Reloads the group object to synchronize with cloud representation.
>>> clc.v2.Group("wa-1234").Refresh()
"""
self.dirty = False
self.data = clc.v2.API.Call('GET','groups/%s/%s' % (self.alias,self.id), session=self.session)
self.data['changeInfo']['createdDate'] = clc.v2.time_utils.ZuluTSToSeconds(self.data['changeInfo']['createdDate'])
self.data['changeInfo']['modifiedDate'] = clc.v2.time_utils.ZuluTSToSeconds(self.data['changeInfo']['modifiedDate']) | python | def Refresh(self):
"""Reloads the group object to synchronize with cloud representation.
>>> clc.v2.Group("wa-1234").Refresh()
"""
self.dirty = False
self.data = clc.v2.API.Call('GET','groups/%s/%s' % (self.alias,self.id), session=self.session)
self.data['changeInfo']['createdDate'] = clc.v2.time_utils.ZuluTSToSeconds(self.data['changeInfo']['createdDate'])
self.data['changeInfo']['modifiedDate'] = clc.v2.time_utils.ZuluTSToSeconds(self.data['changeInfo']['modifiedDate']) | [
"def",
"Refresh",
"(",
"self",
")",
":",
"self",
".",
"dirty",
"=",
"False",
"self",
".",
"data",
"=",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'GET'",
",",
"'groups/%s/%s'",
"%",
"(",
"self",
".",
"alias",
",",
"self",
".",
"id",
")",
",",
"session",
"=",
"self",
".",
"session",
")",
"self",
".",
"data",
"[",
"'changeInfo'",
"]",
"[",
"'createdDate'",
"]",
"=",
"clc",
".",
"v2",
".",
"time_utils",
".",
"ZuluTSToSeconds",
"(",
"self",
".",
"data",
"[",
"'changeInfo'",
"]",
"[",
"'createdDate'",
"]",
")",
"self",
".",
"data",
"[",
"'changeInfo'",
"]",
"[",
"'modifiedDate'",
"]",
"=",
"clc",
".",
"v2",
".",
"time_utils",
".",
"ZuluTSToSeconds",
"(",
"self",
".",
"data",
"[",
"'changeInfo'",
"]",
"[",
"'modifiedDate'",
"]",
")"
] | Reloads the group object to synchronize with cloud representation.
>>> clc.v2.Group("wa-1234").Refresh() | [
"Reloads",
"the",
"group",
"object",
"to",
"synchronize",
"with",
"cloud",
"representation",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L152-L163 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Group.Defaults | def Defaults(self,key):
"""Returns default configurations for resources deployed to this group.
If specified key is not defined returns None.
# {"cpu":{"inherited":false},"memoryGB":{"inherited":false},"networkId":{"inherited":false},
# "primaryDns":{"value":"172.17.1.26","inherited":true},"secondaryDns":{"value":"172.17.1.27","inherited":true},
# "templateName":{"value":"WIN2012DTC-64","inherited":false}}
"""
if not hasattr(self,'defaults'):
self.defaults = clc.v2.API.Call('GET','groups/%s/%s/defaults' % (self.alias,self.id), session=self.session)
try:
return(self.defaults[key]['value'])
except:
return(None) | python | def Defaults(self,key):
"""Returns default configurations for resources deployed to this group.
If specified key is not defined returns None.
# {"cpu":{"inherited":false},"memoryGB":{"inherited":false},"networkId":{"inherited":false},
# "primaryDns":{"value":"172.17.1.26","inherited":true},"secondaryDns":{"value":"172.17.1.27","inherited":true},
# "templateName":{"value":"WIN2012DTC-64","inherited":false}}
"""
if not hasattr(self,'defaults'):
self.defaults = clc.v2.API.Call('GET','groups/%s/%s/defaults' % (self.alias,self.id), session=self.session)
try:
return(self.defaults[key]['value'])
except:
return(None) | [
"def",
"Defaults",
"(",
"self",
",",
"key",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'defaults'",
")",
":",
"self",
".",
"defaults",
"=",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'GET'",
",",
"'groups/%s/%s/defaults'",
"%",
"(",
"self",
".",
"alias",
",",
"self",
".",
"id",
")",
",",
"session",
"=",
"self",
".",
"session",
")",
"try",
":",
"return",
"(",
"self",
".",
"defaults",
"[",
"key",
"]",
"[",
"'value'",
"]",
")",
"except",
":",
"return",
"(",
"None",
")"
] | Returns default configurations for resources deployed to this group.
If specified key is not defined returns None.
# {"cpu":{"inherited":false},"memoryGB":{"inherited":false},"networkId":{"inherited":false},
# "primaryDns":{"value":"172.17.1.26","inherited":true},"secondaryDns":{"value":"172.17.1.27","inherited":true},
# "templateName":{"value":"WIN2012DTC-64","inherited":false}} | [
"Returns",
"default",
"configurations",
"for",
"resources",
"deployed",
"to",
"this",
"group",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L166-L181 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Group.Subgroups | def Subgroups(self):
"""Returns a Groups object containing all child groups.
>>> clc.v2.Group("wa1-4416").Subgroups()
<clc.APIv2.group.Groups object at 0x105fa27d0>
"""
return(Groups(alias=self.alias,groups_lst=self.data['groups'],session=self.session)) | python | def Subgroups(self):
"""Returns a Groups object containing all child groups.
>>> clc.v2.Group("wa1-4416").Subgroups()
<clc.APIv2.group.Groups object at 0x105fa27d0>
"""
return(Groups(alias=self.alias,groups_lst=self.data['groups'],session=self.session)) | [
"def",
"Subgroups",
"(",
"self",
")",
":",
"return",
"(",
"Groups",
"(",
"alias",
"=",
"self",
".",
"alias",
",",
"groups_lst",
"=",
"self",
".",
"data",
"[",
"'groups'",
"]",
",",
"session",
"=",
"self",
".",
"session",
")",
")"
] | Returns a Groups object containing all child groups.
>>> clc.v2.Group("wa1-4416").Subgroups()
<clc.APIv2.group.Groups object at 0x105fa27d0> | [
"Returns",
"a",
"Groups",
"object",
"containing",
"all",
"child",
"groups",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L184-L192 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/group.py | Group.Servers | def Servers(self):
"""Returns a Servers object containing all servers within the group.
>>> clc.v2.Group("wa1-4416").Servers()
<clc.APIv2.server.Servers object at 0x1065b0f10>
"""
return(clc.v2.Servers(
alias=self.alias,
servers_lst=[obj['id'] for obj in self.data['links'] if obj['rel']=='server'],
session=self.session)) | python | def Servers(self):
"""Returns a Servers object containing all servers within the group.
>>> clc.v2.Group("wa1-4416").Servers()
<clc.APIv2.server.Servers object at 0x1065b0f10>
"""
return(clc.v2.Servers(
alias=self.alias,
servers_lst=[obj['id'] for obj in self.data['links'] if obj['rel']=='server'],
session=self.session)) | [
"def",
"Servers",
"(",
"self",
")",
":",
"return",
"(",
"clc",
".",
"v2",
".",
"Servers",
"(",
"alias",
"=",
"self",
".",
"alias",
",",
"servers_lst",
"=",
"[",
"obj",
"[",
"'id'",
"]",
"for",
"obj",
"in",
"self",
".",
"data",
"[",
"'links'",
"]",
"if",
"obj",
"[",
"'rel'",
"]",
"==",
"'server'",
"]",
",",
"session",
"=",
"self",
".",
"session",
")",
")"
] | Returns a Servers object containing all servers within the group.
>>> clc.v2.Group("wa1-4416").Servers()
<clc.APIv2.server.Servers object at 0x1065b0f10> | [
"Returns",
"a",
"Servers",
"object",
"containing",
"all",
"servers",
"within",
"the",
"group",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L224-L235 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/queue.py | Queue.List | def List(type='All'):
"""List of Queued requests and their current status details.
https://t3n.zendesk.com/entries/20350251-List-Queue-Requests
:param type: list items in the queue filtered by status (All, Pending, Complete, Error)
"""
r = clc.v1.API.Call('post','Queue/ListQueueRequests',{'ItemStatusType': Queue.item_status_type_map[type] })
if int(r['StatusCode']) == 0: return(r['Requests']) | python | def List(type='All'):
"""List of Queued requests and their current status details.
https://t3n.zendesk.com/entries/20350251-List-Queue-Requests
:param type: list items in the queue filtered by status (All, Pending, Complete, Error)
"""
r = clc.v1.API.Call('post','Queue/ListQueueRequests',{'ItemStatusType': Queue.item_status_type_map[type] })
if int(r['StatusCode']) == 0: return(r['Requests']) | [
"def",
"List",
"(",
"type",
"=",
"'All'",
")",
":",
"r",
"=",
"clc",
".",
"v1",
".",
"API",
".",
"Call",
"(",
"'post'",
",",
"'Queue/ListQueueRequests'",
",",
"{",
"'ItemStatusType'",
":",
"Queue",
".",
"item_status_type_map",
"[",
"type",
"]",
"}",
")",
"if",
"int",
"(",
"r",
"[",
"'StatusCode'",
"]",
")",
"==",
"0",
":",
"return",
"(",
"r",
"[",
"'Requests'",
"]",
")"
] | List of Queued requests and their current status details.
https://t3n.zendesk.com/entries/20350251-List-Queue-Requests
:param type: list items in the queue filtered by status (All, Pending, Complete, Error) | [
"List",
"of",
"Queued",
"requests",
"and",
"their",
"current",
"status",
"details",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/queue.py#L18-L26 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/network.py | Networks._Load | def _Load(self,location):
"""Load all networks associated with the given location.
https://www.centurylinkcloud.com/api-docs/v2/#get-network-list#request
"""
# https://api.ctl.io/v2-experimental/networks/ALIAS/WA1
for network in clc.v2.API.Call('GET','/v2-experimental/networks/%s/%s' % (self.alias,location),{},session=self.session):
self.networks.append(Network(id=network['id'],alias=self.alias,network_obj=network,session=self.session)) | python | def _Load(self,location):
"""Load all networks associated with the given location.
https://www.centurylinkcloud.com/api-docs/v2/#get-network-list#request
"""
# https://api.ctl.io/v2-experimental/networks/ALIAS/WA1
for network in clc.v2.API.Call('GET','/v2-experimental/networks/%s/%s' % (self.alias,location),{},session=self.session):
self.networks.append(Network(id=network['id'],alias=self.alias,network_obj=network,session=self.session)) | [
"def",
"_Load",
"(",
"self",
",",
"location",
")",
":",
"# https://api.ctl.io/v2-experimental/networks/ALIAS/WA1",
"for",
"network",
"in",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'GET'",
",",
"'/v2-experimental/networks/%s/%s'",
"%",
"(",
"self",
".",
"alias",
",",
"location",
")",
",",
"{",
"}",
",",
"session",
"=",
"self",
".",
"session",
")",
":",
"self",
".",
"networks",
".",
"append",
"(",
"Network",
"(",
"id",
"=",
"network",
"[",
"'id'",
"]",
",",
"alias",
"=",
"self",
".",
"alias",
",",
"network_obj",
"=",
"network",
",",
"session",
"=",
"self",
".",
"session",
")",
")"
] | Load all networks associated with the given location.
https://www.centurylinkcloud.com/api-docs/v2/#get-network-list#request | [
"Load",
"all",
"networks",
"associated",
"with",
"the",
"given",
"location",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L51-L59 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/network.py | Networks.Get | def Get(self,key):
"""Get network by providing name, ID, or other unique key.
If key is not unique and finds multiple matches only the first
will be returned
"""
for network in self.networks:
try:
if network.id == key: return(network)
if network.name == key: return(network)
if network.cidr == key: return(network)
except:
# We ignore malformed records with missing attributes
pass | python | def Get(self,key):
"""Get network by providing name, ID, or other unique key.
If key is not unique and finds multiple matches only the first
will be returned
"""
for network in self.networks:
try:
if network.id == key: return(network)
if network.name == key: return(network)
if network.cidr == key: return(network)
except:
# We ignore malformed records with missing attributes
pass | [
"def",
"Get",
"(",
"self",
",",
"key",
")",
":",
"for",
"network",
"in",
"self",
".",
"networks",
":",
"try",
":",
"if",
"network",
".",
"id",
"==",
"key",
":",
"return",
"(",
"network",
")",
"if",
"network",
".",
"name",
"==",
"key",
":",
"return",
"(",
"network",
")",
"if",
"network",
".",
"cidr",
"==",
"key",
":",
"return",
"(",
"network",
")",
"except",
":",
"# We ignore malformed records with missing attributes",
"pass"
] | Get network by providing name, ID, or other unique key.
If key is not unique and finds multiple matches only the first
will be returned | [
"Get",
"network",
"by",
"providing",
"name",
"ID",
"or",
"other",
"unique",
"key",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L62-L76 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/network.py | Network.Create | def Create(alias=None,location=None,session=None):
"""Claims a new network within a given account.
https://www.ctl.io/api-docs/v2/#networks-claim-network
Returns operation id and link to check status
"""
if not alias: alias = clc.v2.Account.GetAlias(session=session)
if not location: location = clc.v2.Account.GetLocation(session=session)
return clc.v2.Requests(
clc.v2.API.Call('POST','/v2-experimental/networks/%s/%s/claim' % (alias, location),session=session),
alias=alias,
session=session) | python | def Create(alias=None,location=None,session=None):
"""Claims a new network within a given account.
https://www.ctl.io/api-docs/v2/#networks-claim-network
Returns operation id and link to check status
"""
if not alias: alias = clc.v2.Account.GetAlias(session=session)
if not location: location = clc.v2.Account.GetLocation(session=session)
return clc.v2.Requests(
clc.v2.API.Call('POST','/v2-experimental/networks/%s/%s/claim' % (alias, location),session=session),
alias=alias,
session=session) | [
"def",
"Create",
"(",
"alias",
"=",
"None",
",",
"location",
"=",
"None",
",",
"session",
"=",
"None",
")",
":",
"if",
"not",
"alias",
":",
"alias",
"=",
"clc",
".",
"v2",
".",
"Account",
".",
"GetAlias",
"(",
"session",
"=",
"session",
")",
"if",
"not",
"location",
":",
"location",
"=",
"clc",
".",
"v2",
".",
"Account",
".",
"GetLocation",
"(",
"session",
"=",
"session",
")",
"return",
"clc",
".",
"v2",
".",
"Requests",
"(",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'POST'",
",",
"'/v2-experimental/networks/%s/%s/claim'",
"%",
"(",
"alias",
",",
"location",
")",
",",
"session",
"=",
"session",
")",
",",
"alias",
"=",
"alias",
",",
"session",
"=",
"session",
")"
] | Claims a new network within a given account.
https://www.ctl.io/api-docs/v2/#networks-claim-network
Returns operation id and link to check status | [
"Claims",
"a",
"new",
"network",
"within",
"a",
"given",
"account",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L102-L116 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/network.py | Network.Delete | def Delete(self,location=None):
"""Releases the calling network.
https://www.ctl.io/api-docs/v2/#networks-release-network
Returns a 204 and no content
"""
if not location: location = clc.v2.Account.GetLocation(session=self.session)
return clc.v2.API.Call('POST','/v2-experimental/networks/%s/%s/%s/release' % (self.alias, location, self.id),
session=self.session) | python | def Delete(self,location=None):
"""Releases the calling network.
https://www.ctl.io/api-docs/v2/#networks-release-network
Returns a 204 and no content
"""
if not location: location = clc.v2.Account.GetLocation(session=self.session)
return clc.v2.API.Call('POST','/v2-experimental/networks/%s/%s/%s/release' % (self.alias, location, self.id),
session=self.session) | [
"def",
"Delete",
"(",
"self",
",",
"location",
"=",
"None",
")",
":",
"if",
"not",
"location",
":",
"location",
"=",
"clc",
".",
"v2",
".",
"Account",
".",
"GetLocation",
"(",
"session",
"=",
"self",
".",
"session",
")",
"return",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'POST'",
",",
"'/v2-experimental/networks/%s/%s/%s/release'",
"%",
"(",
"self",
".",
"alias",
",",
"location",
",",
"self",
".",
"id",
")",
",",
"session",
"=",
"self",
".",
"session",
")"
] | Releases the calling network.
https://www.ctl.io/api-docs/v2/#networks-release-network
Returns a 204 and no content | [
"Releases",
"the",
"calling",
"network",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L118-L129 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/network.py | Network.Update | def Update(self,name,description=None,location=None):
"""Updates the attributes of a given Network via PUT.
https://www.ctl.io/api-docs/v2/#networks-update-network
{
"name": "VLAN for Development Servers",
"description": "Development Servers on 11.22.33.0/24"
}
Returns a 204 and no content
"""
if not location: location = clc.v2.Account.GetLocation(session=self.session)
payload = {'name': name}
payload['description'] = description if description else self.description
r = clc.v2.API.Call('PUT','/v2-experimental/networks/%s/%s/%s' % (self.alias, location, self.id), payload, session=self.session)
self.name = self.data['name'] = name
if description: self.data['description'] = description | python | def Update(self,name,description=None,location=None):
"""Updates the attributes of a given Network via PUT.
https://www.ctl.io/api-docs/v2/#networks-update-network
{
"name": "VLAN for Development Servers",
"description": "Development Servers on 11.22.33.0/24"
}
Returns a 204 and no content
"""
if not location: location = clc.v2.Account.GetLocation(session=self.session)
payload = {'name': name}
payload['description'] = description if description else self.description
r = clc.v2.API.Call('PUT','/v2-experimental/networks/%s/%s/%s' % (self.alias, location, self.id), payload, session=self.session)
self.name = self.data['name'] = name
if description: self.data['description'] = description | [
"def",
"Update",
"(",
"self",
",",
"name",
",",
"description",
"=",
"None",
",",
"location",
"=",
"None",
")",
":",
"if",
"not",
"location",
":",
"location",
"=",
"clc",
".",
"v2",
".",
"Account",
".",
"GetLocation",
"(",
"session",
"=",
"self",
".",
"session",
")",
"payload",
"=",
"{",
"'name'",
":",
"name",
"}",
"payload",
"[",
"'description'",
"]",
"=",
"description",
"if",
"description",
"else",
"self",
".",
"description",
"r",
"=",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'PUT'",
",",
"'/v2-experimental/networks/%s/%s/%s'",
"%",
"(",
"self",
".",
"alias",
",",
"location",
",",
"self",
".",
"id",
")",
",",
"payload",
",",
"session",
"=",
"self",
".",
"session",
")",
"self",
".",
"name",
"=",
"self",
".",
"data",
"[",
"'name'",
"]",
"=",
"name",
"if",
"description",
":",
"self",
".",
"data",
"[",
"'description'",
"]",
"=",
"description"
] | Updates the attributes of a given Network via PUT.
https://www.ctl.io/api-docs/v2/#networks-update-network
{
"name": "VLAN for Development Servers",
"description": "Development Servers on 11.22.33.0/24"
}
Returns a 204 and no content | [
"Updates",
"the",
"attributes",
"of",
"a",
"given",
"Network",
"via",
"PUT",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L131-L152 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv2/network.py | Network.Refresh | def Refresh(self, location=None):
"""Reloads the network object to synchronize with cloud representation.
>>> clc.v2.Network("f58148729bd94b02ae8b652f5c5feba3").Refresh()
GET https://api.ctl.io/v2-experimental/networks/{accountAlias}/{dataCenter}/{Network}?ipAddresses=none|claimed|free|all
"""
if not location: location = clc.v2.Account.GetLocation(session=self.session)
new_object = clc.v2.API.Call('GET','/v2-experimental/networks/%s/%s/%s' % (self.alias,location,self.id), session=self.session)
if new_object:
self.name = new_object['name']
self.data = new_object | python | def Refresh(self, location=None):
"""Reloads the network object to synchronize with cloud representation.
>>> clc.v2.Network("f58148729bd94b02ae8b652f5c5feba3").Refresh()
GET https://api.ctl.io/v2-experimental/networks/{accountAlias}/{dataCenter}/{Network}?ipAddresses=none|claimed|free|all
"""
if not location: location = clc.v2.Account.GetLocation(session=self.session)
new_object = clc.v2.API.Call('GET','/v2-experimental/networks/%s/%s/%s' % (self.alias,location,self.id), session=self.session)
if new_object:
self.name = new_object['name']
self.data = new_object | [
"def",
"Refresh",
"(",
"self",
",",
"location",
"=",
"None",
")",
":",
"if",
"not",
"location",
":",
"location",
"=",
"clc",
".",
"v2",
".",
"Account",
".",
"GetLocation",
"(",
"session",
"=",
"self",
".",
"session",
")",
"new_object",
"=",
"clc",
".",
"v2",
".",
"API",
".",
"Call",
"(",
"'GET'",
",",
"'/v2-experimental/networks/%s/%s/%s'",
"%",
"(",
"self",
".",
"alias",
",",
"location",
",",
"self",
".",
"id",
")",
",",
"session",
"=",
"self",
".",
"session",
")",
"if",
"new_object",
":",
"self",
".",
"name",
"=",
"new_object",
"[",
"'name'",
"]",
"self",
".",
"data",
"=",
"new_object"
] | Reloads the network object to synchronize with cloud representation.
>>> clc.v2.Network("f58148729bd94b02ae8b652f5c5feba3").Refresh()
GET https://api.ctl.io/v2-experimental/networks/{accountAlias}/{dataCenter}/{Network}?ipAddresses=none|claimed|free|all | [
"Reloads",
"the",
"network",
"object",
"to",
"synchronize",
"with",
"cloud",
"representation",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L154-L167 | train |
biocommons/uta-align | setup.py | install_build_requires | def install_build_requires(pkg_targets):
"""Iterate through build_requires list and pip install if package is not present
accounting for version"""
def pip_install(pkg_name, pkg_vers=None):
pkg_name_version = '%s==%s' % (pkg_name, pkg_vers) if pkg_vers else pkg_name
print '[WARNING] %s not found, attempting to install using a raw "pip install" call!' % pkg_name_version
subprocess.Popen('pip install %s' % pkg_name_version, shell=True).communicate()
def get_pkg_info(pkg):
"""Get package name and version given a build_requires element"""
pkg_name, pkg_vers = None, None
if '==' in pkg:
pkg_name, pkg_vers = pkg.split('==')
else:
pkg_name = pkg.replace('>', '').replace('<', '').split('=')[0]
return pkg_name, pkg_vers
for pkg in pkg_targets:
pkg_name, pkg_vers = get_pkg_info(pkg)
try:
pkg_name_version = '%s==%s' % (pkg_name, pkg_vers) if pkg_vers else pkg_name
if pkg_vers:
version = getattr(importlib.import_module(pkg_name), '__version__')
if version != pkg_vers:
pip_install(pkg_name, pkg_vers)
else:
importlib.import_module(pkg_name)
except ImportError:
pip_install(pkg_name, pkg_vers) | python | def install_build_requires(pkg_targets):
"""Iterate through build_requires list and pip install if package is not present
accounting for version"""
def pip_install(pkg_name, pkg_vers=None):
pkg_name_version = '%s==%s' % (pkg_name, pkg_vers) if pkg_vers else pkg_name
print '[WARNING] %s not found, attempting to install using a raw "pip install" call!' % pkg_name_version
subprocess.Popen('pip install %s' % pkg_name_version, shell=True).communicate()
def get_pkg_info(pkg):
"""Get package name and version given a build_requires element"""
pkg_name, pkg_vers = None, None
if '==' in pkg:
pkg_name, pkg_vers = pkg.split('==')
else:
pkg_name = pkg.replace('>', '').replace('<', '').split('=')[0]
return pkg_name, pkg_vers
for pkg in pkg_targets:
pkg_name, pkg_vers = get_pkg_info(pkg)
try:
pkg_name_version = '%s==%s' % (pkg_name, pkg_vers) if pkg_vers else pkg_name
if pkg_vers:
version = getattr(importlib.import_module(pkg_name), '__version__')
if version != pkg_vers:
pip_install(pkg_name, pkg_vers)
else:
importlib.import_module(pkg_name)
except ImportError:
pip_install(pkg_name, pkg_vers) | [
"def",
"install_build_requires",
"(",
"pkg_targets",
")",
":",
"def",
"pip_install",
"(",
"pkg_name",
",",
"pkg_vers",
"=",
"None",
")",
":",
"pkg_name_version",
"=",
"'%s==%s'",
"%",
"(",
"pkg_name",
",",
"pkg_vers",
")",
"if",
"pkg_vers",
"else",
"pkg_name",
"print",
"'[WARNING] %s not found, attempting to install using a raw \"pip install\" call!'",
"%",
"pkg_name_version",
"subprocess",
".",
"Popen",
"(",
"'pip install %s'",
"%",
"pkg_name_version",
",",
"shell",
"=",
"True",
")",
".",
"communicate",
"(",
")",
"def",
"get_pkg_info",
"(",
"pkg",
")",
":",
"\"\"\"Get package name and version given a build_requires element\"\"\"",
"pkg_name",
",",
"pkg_vers",
"=",
"None",
",",
"None",
"if",
"'=='",
"in",
"pkg",
":",
"pkg_name",
",",
"pkg_vers",
"=",
"pkg",
".",
"split",
"(",
"'=='",
")",
"else",
":",
"pkg_name",
"=",
"pkg",
".",
"replace",
"(",
"'>'",
",",
"''",
")",
".",
"replace",
"(",
"'<'",
",",
"''",
")",
".",
"split",
"(",
"'='",
")",
"[",
"0",
"]",
"return",
"pkg_name",
",",
"pkg_vers",
"for",
"pkg",
"in",
"pkg_targets",
":",
"pkg_name",
",",
"pkg_vers",
"=",
"get_pkg_info",
"(",
"pkg",
")",
"try",
":",
"pkg_name_version",
"=",
"'%s==%s'",
"%",
"(",
"pkg_name",
",",
"pkg_vers",
")",
"if",
"pkg_vers",
"else",
"pkg_name",
"if",
"pkg_vers",
":",
"version",
"=",
"getattr",
"(",
"importlib",
".",
"import_module",
"(",
"pkg_name",
")",
",",
"'__version__'",
")",
"if",
"version",
"!=",
"pkg_vers",
":",
"pip_install",
"(",
"pkg_name",
",",
"pkg_vers",
")",
"else",
":",
"importlib",
".",
"import_module",
"(",
"pkg_name",
")",
"except",
"ImportError",
":",
"pip_install",
"(",
"pkg_name",
",",
"pkg_vers",
")"
] | Iterate through build_requires list and pip install if package is not present
accounting for version | [
"Iterate",
"through",
"build_requires",
"list",
"and",
"pip",
"install",
"if",
"package",
"is",
"not",
"present",
"accounting",
"for",
"version"
] | c3af0a293bb6ce7d241a8ec9843e4a6bb18d751f | https://github.com/biocommons/uta-align/blob/c3af0a293bb6ce7d241a8ec9843e4a6bb18d751f/setup.py#L32-L61 | train |
nickpandolfi/Cyther | cyther/processing.py | initiateCompilation | def initiateCompilation(args, file):
"""
Starts the entire compilation procedure
"""
####commands = finalizeCommands(args, file)
commands = makeCommands(0, file)
if not args['concise'] and args['print_args']:
print_commands = bool(args['watch'])
response = multiCall(*commands, print_commands=print_commands)
return response | python | def initiateCompilation(args, file):
"""
Starts the entire compilation procedure
"""
####commands = finalizeCommands(args, file)
commands = makeCommands(0, file)
if not args['concise'] and args['print_args']:
print_commands = bool(args['watch'])
response = multiCall(*commands, print_commands=print_commands)
return response | [
"def",
"initiateCompilation",
"(",
"args",
",",
"file",
")",
":",
"####commands = finalizeCommands(args, file)",
"commands",
"=",
"makeCommands",
"(",
"0",
",",
"file",
")",
"if",
"not",
"args",
"[",
"'concise'",
"]",
"and",
"args",
"[",
"'print_args'",
"]",
":",
"print_commands",
"=",
"bool",
"(",
"args",
"[",
"'watch'",
"]",
")",
"response",
"=",
"multiCall",
"(",
"*",
"commands",
",",
"print_commands",
"=",
"print_commands",
")",
"return",
"response"
] | Starts the entire compilation procedure | [
"Starts",
"the",
"entire",
"compilation",
"procedure"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/processing.py#L24-L33 | train |
nickpandolfi/Cyther | cyther/processing.py | run | def run(path, timer=False, repeat=3, number=10000, precision=2):
"""
Extracts and runs the '@cyther' code from the given file 'path' name
"""
code = extractAtCyther(path)
if not code:
output = "There was no '@cyther' code collected from the " \
"file '{}'\n".format(path)
# TODO This should use a result, right?
return {'returncode': 0, 'output': output}
module_directory = os.path.dirname(path)
module_name = os.path.splitext(os.path.basename(path))[0]
setup_string = SETUP_TEMPLATE.format(module_directory, module_name, '{}')
if timer:
string = TIMER_TEMPLATE.format(setup_string, code, repeat,
number, precision, '{}')
else:
string = setup_string + code
script = os.path.join(os.path.dirname(__file__), 'script.py')
with open(script, 'w+') as file:
file.write(string)
response = call(['python', script])
return response | python | def run(path, timer=False, repeat=3, number=10000, precision=2):
"""
Extracts and runs the '@cyther' code from the given file 'path' name
"""
code = extractAtCyther(path)
if not code:
output = "There was no '@cyther' code collected from the " \
"file '{}'\n".format(path)
# TODO This should use a result, right?
return {'returncode': 0, 'output': output}
module_directory = os.path.dirname(path)
module_name = os.path.splitext(os.path.basename(path))[0]
setup_string = SETUP_TEMPLATE.format(module_directory, module_name, '{}')
if timer:
string = TIMER_TEMPLATE.format(setup_string, code, repeat,
number, precision, '{}')
else:
string = setup_string + code
script = os.path.join(os.path.dirname(__file__), 'script.py')
with open(script, 'w+') as file:
file.write(string)
response = call(['python', script])
return response | [
"def",
"run",
"(",
"path",
",",
"timer",
"=",
"False",
",",
"repeat",
"=",
"3",
",",
"number",
"=",
"10000",
",",
"precision",
"=",
"2",
")",
":",
"code",
"=",
"extractAtCyther",
"(",
"path",
")",
"if",
"not",
"code",
":",
"output",
"=",
"\"There was no '@cyther' code collected from the \"",
"\"file '{}'\\n\"",
".",
"format",
"(",
"path",
")",
"# TODO This should use a result, right?",
"return",
"{",
"'returncode'",
":",
"0",
",",
"'output'",
":",
"output",
"}",
"module_directory",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
"module_name",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
")",
"[",
"0",
"]",
"setup_string",
"=",
"SETUP_TEMPLATE",
".",
"format",
"(",
"module_directory",
",",
"module_name",
",",
"'{}'",
")",
"if",
"timer",
":",
"string",
"=",
"TIMER_TEMPLATE",
".",
"format",
"(",
"setup_string",
",",
"code",
",",
"repeat",
",",
"number",
",",
"precision",
",",
"'{}'",
")",
"else",
":",
"string",
"=",
"setup_string",
"+",
"code",
"script",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'script.py'",
")",
"with",
"open",
"(",
"script",
",",
"'w+'",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"string",
")",
"response",
"=",
"call",
"(",
"[",
"'python'",
",",
"script",
"]",
")",
"return",
"response"
] | Extracts and runs the '@cyther' code from the given file 'path' name | [
"Extracts",
"and",
"runs",
"the"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/processing.py#L140-L166 | train |
nickpandolfi/Cyther | cyther/processing.py | core | def core(args):
"""
The heart of Cyther, this function controls the main loop, and can be
used to perform any Cyther action. You can call if using Cyther
from the module level
"""
args = furtherArgsProcessing(args)
numfiles = len(args['filenames'])
interval = INTERVAL / numfiles
files = processFiles(args)
while True:
for file in files:
cytherize(args, file)
if not args['watch']:
break
else:
time.sleep(interval) | python | def core(args):
"""
The heart of Cyther, this function controls the main loop, and can be
used to perform any Cyther action. You can call if using Cyther
from the module level
"""
args = furtherArgsProcessing(args)
numfiles = len(args['filenames'])
interval = INTERVAL / numfiles
files = processFiles(args)
while True:
for file in files:
cytherize(args, file)
if not args['watch']:
break
else:
time.sleep(interval) | [
"def",
"core",
"(",
"args",
")",
":",
"args",
"=",
"furtherArgsProcessing",
"(",
"args",
")",
"numfiles",
"=",
"len",
"(",
"args",
"[",
"'filenames'",
"]",
")",
"interval",
"=",
"INTERVAL",
"/",
"numfiles",
"files",
"=",
"processFiles",
"(",
"args",
")",
"while",
"True",
":",
"for",
"file",
"in",
"files",
":",
"cytherize",
"(",
"args",
",",
"file",
")",
"if",
"not",
"args",
"[",
"'watch'",
"]",
":",
"break",
"else",
":",
"time",
".",
"sleep",
"(",
"interval",
")"
] | The heart of Cyther, this function controls the main loop, and can be
used to perform any Cyther action. You can call if using Cyther
from the module level | [
"The",
"heart",
"of",
"Cyther",
"this",
"function",
"controls",
"the",
"main",
"loop",
"and",
"can",
"be",
"used",
"to",
"perform",
"any",
"Cyther",
"action",
".",
"You",
"can",
"call",
"if",
"using",
"Cyther",
"from",
"the",
"module",
"level"
] | 9fb0bd77af594008aa6ee8af460aa8c953abf5bc | https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/processing.py#L169-L186 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | Clp.set_timestamp | def set_timestamp(self,timestamp=None):
"""
Set the timestamp of the linguistic processor, set to None for the current time
@type timestamp:string
@param timestamp: version of the linguistic processor
"""
if timestamp is None:
import time
timestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('timestamp',timestamp) | python | def set_timestamp(self,timestamp=None):
"""
Set the timestamp of the linguistic processor, set to None for the current time
@type timestamp:string
@param timestamp: version of the linguistic processor
"""
if timestamp is None:
import time
timestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('timestamp',timestamp) | [
"def",
"set_timestamp",
"(",
"self",
",",
"timestamp",
"=",
"None",
")",
":",
"if",
"timestamp",
"is",
"None",
":",
"import",
"time",
"timestamp",
"=",
"time",
".",
"strftime",
"(",
"'%Y-%m-%dT%H:%M:%S%Z'",
")",
"self",
".",
"node",
".",
"set",
"(",
"'timestamp'",
",",
"timestamp",
")"
] | Set the timestamp of the linguistic processor, set to None for the current time
@type timestamp:string
@param timestamp: version of the linguistic processor | [
"Set",
"the",
"timestamp",
"of",
"the",
"linguistic",
"processor",
"set",
"to",
"None",
"for",
"the",
"current",
"time"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L332-L341 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | Clp.set_beginTimestamp | def set_beginTimestamp(self,btimestamp=None):
"""
Set the begin timestamp of the linguistic processor, set to None for the current time
@type btimestamp: string
@param btimestamp: version of the linguistic processor
"""
if btimestamp is None:
import time
btimestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('beginTimestamp',btimestamp) | python | def set_beginTimestamp(self,btimestamp=None):
"""
Set the begin timestamp of the linguistic processor, set to None for the current time
@type btimestamp: string
@param btimestamp: version of the linguistic processor
"""
if btimestamp is None:
import time
btimestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('beginTimestamp',btimestamp) | [
"def",
"set_beginTimestamp",
"(",
"self",
",",
"btimestamp",
"=",
"None",
")",
":",
"if",
"btimestamp",
"is",
"None",
":",
"import",
"time",
"btimestamp",
"=",
"time",
".",
"strftime",
"(",
"'%Y-%m-%dT%H:%M:%S%Z'",
")",
"self",
".",
"node",
".",
"set",
"(",
"'beginTimestamp'",
",",
"btimestamp",
")"
] | Set the begin timestamp of the linguistic processor, set to None for the current time
@type btimestamp: string
@param btimestamp: version of the linguistic processor | [
"Set",
"the",
"begin",
"timestamp",
"of",
"the",
"linguistic",
"processor",
"set",
"to",
"None",
"for",
"the",
"current",
"time"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L352-L361 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | Clp.set_endTimestamp | def set_endTimestamp(self,etimestamp=None):
"""
Set the end timestamp of the linguistic processor, set to None for the current time
@type etimestamp: string
@param etimestamp: version of the linguistic processor
"""
if etimestamp is None:
import time
etimestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('endTimestamp',etimestamp) | python | def set_endTimestamp(self,etimestamp=None):
"""
Set the end timestamp of the linguistic processor, set to None for the current time
@type etimestamp: string
@param etimestamp: version of the linguistic processor
"""
if etimestamp is None:
import time
etimestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('endTimestamp',etimestamp) | [
"def",
"set_endTimestamp",
"(",
"self",
",",
"etimestamp",
"=",
"None",
")",
":",
"if",
"etimestamp",
"is",
"None",
":",
"import",
"time",
"etimestamp",
"=",
"time",
".",
"strftime",
"(",
"'%Y-%m-%dT%H:%M:%S%Z'",
")",
"self",
".",
"node",
".",
"set",
"(",
"'endTimestamp'",
",",
"etimestamp",
")"
] | Set the end timestamp of the linguistic processor, set to None for the current time
@type etimestamp: string
@param etimestamp: version of the linguistic processor | [
"Set",
"the",
"end",
"timestamp",
"of",
"the",
"linguistic",
"processor",
"set",
"to",
"None",
"for",
"the",
"current",
"time"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L372-L381 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | CHeader.set_publicId | def set_publicId(self,publicId):
'''
Sets the publicId to the public object
@param publicId: a publicId (title of article)
@type publicId: string
'''
publicObj = self.get_public()
if publicObj is not None:
publicObj.set_publicid(publicId)
else:
publicObj = Cpublic()
publicObj.set_publicid(publicId)
self.set_public(publicObj) | python | def set_publicId(self,publicId):
'''
Sets the publicId to the public object
@param publicId: a publicId (title of article)
@type publicId: string
'''
publicObj = self.get_public()
if publicObj is not None:
publicObj.set_publicid(publicId)
else:
publicObj = Cpublic()
publicObj.set_publicid(publicId)
self.set_public(publicObj) | [
"def",
"set_publicId",
"(",
"self",
",",
"publicId",
")",
":",
"publicObj",
"=",
"self",
".",
"get_public",
"(",
")",
"if",
"publicObj",
"is",
"not",
"None",
":",
"publicObj",
".",
"set_publicid",
"(",
"publicId",
")",
"else",
":",
"publicObj",
"=",
"Cpublic",
"(",
")",
"publicObj",
".",
"set_publicid",
"(",
"publicId",
")",
"self",
".",
"set_public",
"(",
"publicObj",
")"
] | Sets the publicId to the public object
@param publicId: a publicId (title of article)
@type publicId: string | [
"Sets",
"the",
"publicId",
"to",
"the",
"public",
"object"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L559-L571 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | CHeader.set_uri | def set_uri(self, uri):
'''
Sets the uri to the public object
@param uri: a uri
@type uri: string
'''
publicObj = self.get_public()
if publicObj is not None:
publicObj.set_uri(uri)
else:
publicObj = Cpublic()
publicObj.set_uri(uri)
self.set_public(publicObj) | python | def set_uri(self, uri):
'''
Sets the uri to the public object
@param uri: a uri
@type uri: string
'''
publicObj = self.get_public()
if publicObj is not None:
publicObj.set_uri(uri)
else:
publicObj = Cpublic()
publicObj.set_uri(uri)
self.set_public(publicObj) | [
"def",
"set_uri",
"(",
"self",
",",
"uri",
")",
":",
"publicObj",
"=",
"self",
".",
"get_public",
"(",
")",
"if",
"publicObj",
"is",
"not",
"None",
":",
"publicObj",
".",
"set_uri",
"(",
"uri",
")",
"else",
":",
"publicObj",
"=",
"Cpublic",
"(",
")",
"publicObj",
".",
"set_uri",
"(",
"uri",
")",
"self",
".",
"set_public",
"(",
"publicObj",
")"
] | Sets the uri to the public object
@param uri: a uri
@type uri: string | [
"Sets",
"the",
"uri",
"to",
"the",
"public",
"object"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L585-L597 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | CHeader.remove_lp | def remove_lp(self,layer):
"""
Removes the linguistic processors for a given layer
@type layer: string
@param layer: the name of the layer
"""
for this_node in self.node.findall('linguisticProcessors'):
if this_node.get('layer') == layer:
self.node.remove(this_node)
break | python | def remove_lp(self,layer):
"""
Removes the linguistic processors for a given layer
@type layer: string
@param layer: the name of the layer
"""
for this_node in self.node.findall('linguisticProcessors'):
if this_node.get('layer') == layer:
self.node.remove(this_node)
break | [
"def",
"remove_lp",
"(",
"self",
",",
"layer",
")",
":",
"for",
"this_node",
"in",
"self",
".",
"node",
".",
"findall",
"(",
"'linguisticProcessors'",
")",
":",
"if",
"this_node",
".",
"get",
"(",
"'layer'",
")",
"==",
"layer",
":",
"self",
".",
"node",
".",
"remove",
"(",
"this_node",
")",
"break"
] | Removes the linguistic processors for a given layer
@type layer: string
@param layer: the name of the layer | [
"Removes",
"the",
"linguistic",
"processors",
"for",
"a",
"given",
"layer"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L620-L629 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | CHeader.add_linguistic_processor | def add_linguistic_processor(self, layer ,my_lp):
"""
Adds a linguistic processor to a certain layer
@type layer: string
@param layer: the name of the layer
@type my_lp: L{Clp}
@param my_lp: the linguistic processor
"""
## Locate the linguisticProcessor element for taht layer
found_lp_obj = None
for this_lp in self.node.findall('linguisticProcessors'):
lp_obj = ClinguisticProcessors(this_lp)
if lp_obj.get_layer() == layer:
found_lp_obj = lp_obj
break
if found_lp_obj is None: #Not found
found_lp_obj = ClinguisticProcessors()
found_lp_obj.set_layer(layer)
self.add_linguistic_processors(found_lp_obj)
found_lp_obj.add_linguistic_processor(my_lp) | python | def add_linguistic_processor(self, layer ,my_lp):
"""
Adds a linguistic processor to a certain layer
@type layer: string
@param layer: the name of the layer
@type my_lp: L{Clp}
@param my_lp: the linguistic processor
"""
## Locate the linguisticProcessor element for taht layer
found_lp_obj = None
for this_lp in self.node.findall('linguisticProcessors'):
lp_obj = ClinguisticProcessors(this_lp)
if lp_obj.get_layer() == layer:
found_lp_obj = lp_obj
break
if found_lp_obj is None: #Not found
found_lp_obj = ClinguisticProcessors()
found_lp_obj.set_layer(layer)
self.add_linguistic_processors(found_lp_obj)
found_lp_obj.add_linguistic_processor(my_lp) | [
"def",
"add_linguistic_processor",
"(",
"self",
",",
"layer",
",",
"my_lp",
")",
":",
"## Locate the linguisticProcessor element for taht layer",
"found_lp_obj",
"=",
"None",
"for",
"this_lp",
"in",
"self",
".",
"node",
".",
"findall",
"(",
"'linguisticProcessors'",
")",
":",
"lp_obj",
"=",
"ClinguisticProcessors",
"(",
"this_lp",
")",
"if",
"lp_obj",
".",
"get_layer",
"(",
")",
"==",
"layer",
":",
"found_lp_obj",
"=",
"lp_obj",
"break",
"if",
"found_lp_obj",
"is",
"None",
":",
"#Not found",
"found_lp_obj",
"=",
"ClinguisticProcessors",
"(",
")",
"found_lp_obj",
".",
"set_layer",
"(",
"layer",
")",
"self",
".",
"add_linguistic_processors",
"(",
"found_lp_obj",
")",
"found_lp_obj",
".",
"add_linguistic_processor",
"(",
"my_lp",
")"
] | Adds a linguistic processor to a certain layer
@type layer: string
@param layer: the name of the layer
@type my_lp: L{Clp}
@param my_lp: the linguistic processor | [
"Adds",
"a",
"linguistic",
"processor",
"to",
"a",
"certain",
"layer"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L631-L652 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | CHeader.get_fileDesc | def get_fileDesc(self):
'''
Returns the fileDesc object or None if there is no such element
@return: the fileDesc object
@rtype: L{CfileDesc}
'''
node = self.node.find('fileDesc')
if node is not None:
return CfileDesc(node=node)
else:
return None | python | def get_fileDesc(self):
'''
Returns the fileDesc object or None if there is no such element
@return: the fileDesc object
@rtype: L{CfileDesc}
'''
node = self.node.find('fileDesc')
if node is not None:
return CfileDesc(node=node)
else:
return None | [
"def",
"get_fileDesc",
"(",
"self",
")",
":",
"node",
"=",
"self",
".",
"node",
".",
"find",
"(",
"'fileDesc'",
")",
"if",
"node",
"is",
"not",
"None",
":",
"return",
"CfileDesc",
"(",
"node",
"=",
"node",
")",
"else",
":",
"return",
"None"
] | Returns the fileDesc object or None if there is no such element
@return: the fileDesc object
@rtype: L{CfileDesc} | [
"Returns",
"the",
"fileDesc",
"object",
"or",
"None",
"if",
"there",
"is",
"no",
"such",
"element"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L654-L664 | train |
cltl/KafNafParserPy | KafNafParserPy/header_data.py | CHeader.get_public | def get_public(self):
'''
Returns the public object or None if there is no such element
@return: the public object
@rtype: L{Cpublic}
'''
node = self.node.find('public')
if node is not None:
return Cpublic(node=node)
else:
return None | python | def get_public(self):
'''
Returns the public object or None if there is no such element
@return: the public object
@rtype: L{Cpublic}
'''
node = self.node.find('public')
if node is not None:
return Cpublic(node=node)
else:
return None | [
"def",
"get_public",
"(",
"self",
")",
":",
"node",
"=",
"self",
".",
"node",
".",
"find",
"(",
"'public'",
")",
"if",
"node",
"is",
"not",
"None",
":",
"return",
"Cpublic",
"(",
"node",
"=",
"node",
")",
"else",
":",
"return",
"None"
] | Returns the public object or None if there is no such element
@return: the public object
@rtype: L{Cpublic} | [
"Returns",
"the",
"public",
"object",
"or",
"None",
"if",
"there",
"is",
"no",
"such",
"element"
] | 9bc32e803c176404b255ba317479b8780ed5f569 | https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L674-L684 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/blueprint.py | Blueprint.GetPackages | def GetPackages(classification,visibility):
"""Gets a list of Blueprint Packages filtered by classification and visibility.
https://t3n.zendesk.com/entries/20411357-Get-Packages
:param classification: package type filter (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
"""
r = clc.v1.API.Call('post','Blueprint/GetPackages',
{'Classification': Blueprint.classification_stoi[classification],'Visibility': Blueprint.visibility_stoi[visibility]})
if int(r['StatusCode']) == 0: return(r['Packages']) | python | def GetPackages(classification,visibility):
"""Gets a list of Blueprint Packages filtered by classification and visibility.
https://t3n.zendesk.com/entries/20411357-Get-Packages
:param classification: package type filter (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
"""
r = clc.v1.API.Call('post','Blueprint/GetPackages',
{'Classification': Blueprint.classification_stoi[classification],'Visibility': Blueprint.visibility_stoi[visibility]})
if int(r['StatusCode']) == 0: return(r['Packages']) | [
"def",
"GetPackages",
"(",
"classification",
",",
"visibility",
")",
":",
"r",
"=",
"clc",
".",
"v1",
".",
"API",
".",
"Call",
"(",
"'post'",
",",
"'Blueprint/GetPackages'",
",",
"{",
"'Classification'",
":",
"Blueprint",
".",
"classification_stoi",
"[",
"classification",
"]",
",",
"'Visibility'",
":",
"Blueprint",
".",
"visibility_stoi",
"[",
"visibility",
"]",
"}",
")",
"if",
"int",
"(",
"r",
"[",
"'StatusCode'",
"]",
")",
"==",
"0",
":",
"return",
"(",
"r",
"[",
"'Packages'",
"]",
")"
] | Gets a list of Blueprint Packages filtered by classification and visibility.
https://t3n.zendesk.com/entries/20411357-Get-Packages
:param classification: package type filter (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared) | [
"Gets",
"a",
"list",
"of",
"Blueprint",
"Packages",
"filtered",
"by",
"classification",
"and",
"visibility",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L51-L61 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/blueprint.py | Blueprint.GetAllPackages | def GetAllPackages(classification):
"""Gets a list of all Blueprint Packages with a given classification.
https://t3n.zendesk.com/entries/20411357-Get-Packages
:param classification: package type filter (System, Script, Software)
"""
packages = []
for visibility in Blueprint.visibility_stoi.keys():
try:
for r in Blueprint.GetPackages(classification,visibility): packages.append(dict(r.items()+{'Visibility':visibility}.items()))
except:
pass
if len(packages): return(packages) | python | def GetAllPackages(classification):
"""Gets a list of all Blueprint Packages with a given classification.
https://t3n.zendesk.com/entries/20411357-Get-Packages
:param classification: package type filter (System, Script, Software)
"""
packages = []
for visibility in Blueprint.visibility_stoi.keys():
try:
for r in Blueprint.GetPackages(classification,visibility): packages.append(dict(r.items()+{'Visibility':visibility}.items()))
except:
pass
if len(packages): return(packages) | [
"def",
"GetAllPackages",
"(",
"classification",
")",
":",
"packages",
"=",
"[",
"]",
"for",
"visibility",
"in",
"Blueprint",
".",
"visibility_stoi",
".",
"keys",
"(",
")",
":",
"try",
":",
"for",
"r",
"in",
"Blueprint",
".",
"GetPackages",
"(",
"classification",
",",
"visibility",
")",
":",
"packages",
".",
"append",
"(",
"dict",
"(",
"r",
".",
"items",
"(",
")",
"+",
"{",
"'Visibility'",
":",
"visibility",
"}",
".",
"items",
"(",
")",
")",
")",
"except",
":",
"pass",
"if",
"len",
"(",
"packages",
")",
":",
"return",
"(",
"packages",
")"
] | Gets a list of all Blueprint Packages with a given classification.
https://t3n.zendesk.com/entries/20411357-Get-Packages
:param classification: package type filter (System, Script, Software) | [
"Gets",
"a",
"list",
"of",
"all",
"Blueprint",
"Packages",
"with",
"a",
"given",
"classification",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L65-L78 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/blueprint.py | Blueprint.PackageUpload | def PackageUpload(package,ftp_url):
"""Uploads specified zip package to cloud endpoint.
See URL below for description on how to properly create a package:
https://t3n.zendesk.com/entries/20348448-Blueprints-Script-and-Software-Package-Management
:param package: path to zip file containing package.manifest and supporting scripts
:param ftp_url: ftp URL including credentials of the form ftp://user:password@hostname
"""
#o = urlparse.urlparse(ftp_url)
# Very weak URL checking
#if o.scheme.lower() != "ftp":
# clc.v1.output.Status('ERROR',2,'Invalid FTP URL')
# return
# Confirm file exists
if not os.path.isfile(package):
clc.v1.output.Status('ERROR',2,'Package file (%s) not found' % (package))
return
m = re.search("ftp://(?P<user>.+?):(?P<passwd>.+?)@(?P<host>.+)",ftp_url)
try:
ftp = ftplib.FTP(m.group('host'),m.group('user'),m.group('passwd'))
file = open(package,'rb')
filename = re.sub(".*/","",package)
ftp.storbinary("STOR %s" % (filename),file)
file.close()
ftp.quit()
clc.v1.output.Status('SUCCESS',2,'Blueprint package %s Uploaded' % (filename))
except Exception as e:
clc.v1.output.Status('ERROR',2,'FTP error %s: %s' % (ftp_url,str(e)))
return({}) | python | def PackageUpload(package,ftp_url):
"""Uploads specified zip package to cloud endpoint.
See URL below for description on how to properly create a package:
https://t3n.zendesk.com/entries/20348448-Blueprints-Script-and-Software-Package-Management
:param package: path to zip file containing package.manifest and supporting scripts
:param ftp_url: ftp URL including credentials of the form ftp://user:password@hostname
"""
#o = urlparse.urlparse(ftp_url)
# Very weak URL checking
#if o.scheme.lower() != "ftp":
# clc.v1.output.Status('ERROR',2,'Invalid FTP URL')
# return
# Confirm file exists
if not os.path.isfile(package):
clc.v1.output.Status('ERROR',2,'Package file (%s) not found' % (package))
return
m = re.search("ftp://(?P<user>.+?):(?P<passwd>.+?)@(?P<host>.+)",ftp_url)
try:
ftp = ftplib.FTP(m.group('host'),m.group('user'),m.group('passwd'))
file = open(package,'rb')
filename = re.sub(".*/","",package)
ftp.storbinary("STOR %s" % (filename),file)
file.close()
ftp.quit()
clc.v1.output.Status('SUCCESS',2,'Blueprint package %s Uploaded' % (filename))
except Exception as e:
clc.v1.output.Status('ERROR',2,'FTP error %s: %s' % (ftp_url,str(e)))
return({}) | [
"def",
"PackageUpload",
"(",
"package",
",",
"ftp_url",
")",
":",
"#o = urlparse.urlparse(ftp_url)",
"# Very weak URL checking",
"#if o.scheme.lower() != \"ftp\": ",
"#\tclc.v1.output.Status('ERROR',2,'Invalid FTP URL')",
"#\treturn",
"# Confirm file exists ",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"package",
")",
":",
"clc",
".",
"v1",
".",
"output",
".",
"Status",
"(",
"'ERROR'",
",",
"2",
",",
"'Package file (%s) not found'",
"%",
"(",
"package",
")",
")",
"return",
"m",
"=",
"re",
".",
"search",
"(",
"\"ftp://(?P<user>.+?):(?P<passwd>.+?)@(?P<host>.+)\"",
",",
"ftp_url",
")",
"try",
":",
"ftp",
"=",
"ftplib",
".",
"FTP",
"(",
"m",
".",
"group",
"(",
"'host'",
")",
",",
"m",
".",
"group",
"(",
"'user'",
")",
",",
"m",
".",
"group",
"(",
"'passwd'",
")",
")",
"file",
"=",
"open",
"(",
"package",
",",
"'rb'",
")",
"filename",
"=",
"re",
".",
"sub",
"(",
"\".*/\"",
",",
"\"\"",
",",
"package",
")",
"ftp",
".",
"storbinary",
"(",
"\"STOR %s\"",
"%",
"(",
"filename",
")",
",",
"file",
")",
"file",
".",
"close",
"(",
")",
"ftp",
".",
"quit",
"(",
")",
"clc",
".",
"v1",
".",
"output",
".",
"Status",
"(",
"'SUCCESS'",
",",
"2",
",",
"'Blueprint package %s Uploaded'",
"%",
"(",
"filename",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"clc",
".",
"v1",
".",
"output",
".",
"Status",
"(",
"'ERROR'",
",",
"2",
",",
"'FTP error %s: %s'",
"%",
"(",
"ftp_url",
",",
"str",
"(",
"e",
")",
")",
")",
"return",
"(",
"{",
"}",
")"
] | Uploads specified zip package to cloud endpoint.
See URL below for description on how to properly create a package:
https://t3n.zendesk.com/entries/20348448-Blueprints-Script-and-Software-Package-Management
:param package: path to zip file containing package.manifest and supporting scripts
:param ftp_url: ftp URL including credentials of the form ftp://user:password@hostname | [
"Uploads",
"specified",
"zip",
"package",
"to",
"cloud",
"endpoint",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L107-L139 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/blueprint.py | Blueprint.PackagePublish | def PackagePublish(package,classification,visibility,os):
"""Publishes a Blueprint Package for use within the Blueprint Designer.
https://t3n.zendesk.com/entries/20426453-Publish-Package
:param package: path to zip file containing package.manifest and supporting scripts
:param classification: package type (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
:param os: list of ints containing Operating System template IDs
"""
r = clc.v1.API.Call('post','Blueprint/PublishPackage',
{'Classification': Blueprint.classification_stoi[classification], 'Name': package, 'OperatingSystems': os,
'Visibility': Blueprint.visibility_stoi[visibility]})
if int(r['StatusCode']) == 0: return(r) | python | def PackagePublish(package,classification,visibility,os):
"""Publishes a Blueprint Package for use within the Blueprint Designer.
https://t3n.zendesk.com/entries/20426453-Publish-Package
:param package: path to zip file containing package.manifest and supporting scripts
:param classification: package type (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
:param os: list of ints containing Operating System template IDs
"""
r = clc.v1.API.Call('post','Blueprint/PublishPackage',
{'Classification': Blueprint.classification_stoi[classification], 'Name': package, 'OperatingSystems': os,
'Visibility': Blueprint.visibility_stoi[visibility]})
if int(r['StatusCode']) == 0: return(r) | [
"def",
"PackagePublish",
"(",
"package",
",",
"classification",
",",
"visibility",
",",
"os",
")",
":",
"r",
"=",
"clc",
".",
"v1",
".",
"API",
".",
"Call",
"(",
"'post'",
",",
"'Blueprint/PublishPackage'",
",",
"{",
"'Classification'",
":",
"Blueprint",
".",
"classification_stoi",
"[",
"classification",
"]",
",",
"'Name'",
":",
"package",
",",
"'OperatingSystems'",
":",
"os",
",",
"'Visibility'",
":",
"Blueprint",
".",
"visibility_stoi",
"[",
"visibility",
"]",
"}",
")",
"if",
"int",
"(",
"r",
"[",
"'StatusCode'",
"]",
")",
"==",
"0",
":",
"return",
"(",
"r",
")"
] | Publishes a Blueprint Package for use within the Blueprint Designer.
https://t3n.zendesk.com/entries/20426453-Publish-Package
:param package: path to zip file containing package.manifest and supporting scripts
:param classification: package type (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
:param os: list of ints containing Operating System template IDs | [
"Publishes",
"a",
"Blueprint",
"Package",
"for",
"use",
"within",
"the",
"Blueprint",
"Designer",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L143-L156 | train |
CenturyLinkCloud/clc-python-sdk | src/clc/APIv1/blueprint.py | Blueprint.PackagePublishUI | def PackagePublishUI(package,type,visibility):
"""Publishes a Blueprint Package for use within the Blueprint Designer after interactive OS selection.
Interactive selection of one or more operating systems by name.
:param package: path to zip file containing package.manifest and supporting scripts
:param classification: package type (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
"""
# fetch OS list
linux_lst = {'L': {'selected': False, 'Description': 'All Linux'}}
windows_lst = {'W': {'selected': False, 'Description': 'All Windows'}}
for r in clc.v1.Server.GetTemplates():
r['selected'] = False
if re.search("Windows",r['Description']): windows_lst[str(r['OperatingSystem'])] = r
elif re.search("CentOS|RedHat|Ubuntu",r['Description']): linux_lst[str(r['OperatingSystem'])] = r
# Get selections
if os.name=='posix':
scr = curses.initscr()
curses.cbreak();
while True:
if os.name=='posix': c = Blueprint._DrawPublishPackageUIPosix(scr,linux_lst,windows_lst)
else: c = Blueprint._DrawPublishPackageUI(linux_lst,windows_lst)
if c.lower() == 'q': break
elif c.lower() == 'l':
for l in linux_lst: linux_lst[l]['selected'] = not linux_lst[l]['selected']
elif c.lower() == 'w':
for l in windows_lst: windows_lst[l]['selected'] = not windows_lst[l]['selected']
elif c in linux_lst: linux_lst[c]['selected'] = not linux_lst[c]['selected']
elif c in windows_lst: windows_lst[c]['selected'] = not windows_lst[c]['selected']
if os.name=='posix':
curses.nocbreak(); curses.echo(); curses.endwin()
# Extract selections
ids = []
for l in dict(linux_lst.items()+windows_lst.items()).values():
if l['selected'] and 'OperatingSystem' in l: ids.append(str(l['OperatingSystem']))
clc.v1.output.Status('SUCCESS',2,'Selected operating system IDs: %s' % (" ".join(ids)))
return(Blueprint.PackagePublish(package,type,visibility,ids)) | python | def PackagePublishUI(package,type,visibility):
"""Publishes a Blueprint Package for use within the Blueprint Designer after interactive OS selection.
Interactive selection of one or more operating systems by name.
:param package: path to zip file containing package.manifest and supporting scripts
:param classification: package type (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared)
"""
# fetch OS list
linux_lst = {'L': {'selected': False, 'Description': 'All Linux'}}
windows_lst = {'W': {'selected': False, 'Description': 'All Windows'}}
for r in clc.v1.Server.GetTemplates():
r['selected'] = False
if re.search("Windows",r['Description']): windows_lst[str(r['OperatingSystem'])] = r
elif re.search("CentOS|RedHat|Ubuntu",r['Description']): linux_lst[str(r['OperatingSystem'])] = r
# Get selections
if os.name=='posix':
scr = curses.initscr()
curses.cbreak();
while True:
if os.name=='posix': c = Blueprint._DrawPublishPackageUIPosix(scr,linux_lst,windows_lst)
else: c = Blueprint._DrawPublishPackageUI(linux_lst,windows_lst)
if c.lower() == 'q': break
elif c.lower() == 'l':
for l in linux_lst: linux_lst[l]['selected'] = not linux_lst[l]['selected']
elif c.lower() == 'w':
for l in windows_lst: windows_lst[l]['selected'] = not windows_lst[l]['selected']
elif c in linux_lst: linux_lst[c]['selected'] = not linux_lst[c]['selected']
elif c in windows_lst: windows_lst[c]['selected'] = not windows_lst[c]['selected']
if os.name=='posix':
curses.nocbreak(); curses.echo(); curses.endwin()
# Extract selections
ids = []
for l in dict(linux_lst.items()+windows_lst.items()).values():
if l['selected'] and 'OperatingSystem' in l: ids.append(str(l['OperatingSystem']))
clc.v1.output.Status('SUCCESS',2,'Selected operating system IDs: %s' % (" ".join(ids)))
return(Blueprint.PackagePublish(package,type,visibility,ids)) | [
"def",
"PackagePublishUI",
"(",
"package",
",",
"type",
",",
"visibility",
")",
":",
"# fetch OS list",
"linux_lst",
"=",
"{",
"'L'",
":",
"{",
"'selected'",
":",
"False",
",",
"'Description'",
":",
"'All Linux'",
"}",
"}",
"windows_lst",
"=",
"{",
"'W'",
":",
"{",
"'selected'",
":",
"False",
",",
"'Description'",
":",
"'All Windows'",
"}",
"}",
"for",
"r",
"in",
"clc",
".",
"v1",
".",
"Server",
".",
"GetTemplates",
"(",
")",
":",
"r",
"[",
"'selected'",
"]",
"=",
"False",
"if",
"re",
".",
"search",
"(",
"\"Windows\"",
",",
"r",
"[",
"'Description'",
"]",
")",
":",
"windows_lst",
"[",
"str",
"(",
"r",
"[",
"'OperatingSystem'",
"]",
")",
"]",
"=",
"r",
"elif",
"re",
".",
"search",
"(",
"\"CentOS|RedHat|Ubuntu\"",
",",
"r",
"[",
"'Description'",
"]",
")",
":",
"linux_lst",
"[",
"str",
"(",
"r",
"[",
"'OperatingSystem'",
"]",
")",
"]",
"=",
"r",
"# Get selections",
"if",
"os",
".",
"name",
"==",
"'posix'",
":",
"scr",
"=",
"curses",
".",
"initscr",
"(",
")",
"curses",
".",
"cbreak",
"(",
")",
"while",
"True",
":",
"if",
"os",
".",
"name",
"==",
"'posix'",
":",
"c",
"=",
"Blueprint",
".",
"_DrawPublishPackageUIPosix",
"(",
"scr",
",",
"linux_lst",
",",
"windows_lst",
")",
"else",
":",
"c",
"=",
"Blueprint",
".",
"_DrawPublishPackageUI",
"(",
"linux_lst",
",",
"windows_lst",
")",
"if",
"c",
".",
"lower",
"(",
")",
"==",
"'q'",
":",
"break",
"elif",
"c",
".",
"lower",
"(",
")",
"==",
"'l'",
":",
"for",
"l",
"in",
"linux_lst",
":",
"linux_lst",
"[",
"l",
"]",
"[",
"'selected'",
"]",
"=",
"not",
"linux_lst",
"[",
"l",
"]",
"[",
"'selected'",
"]",
"elif",
"c",
".",
"lower",
"(",
")",
"==",
"'w'",
":",
"for",
"l",
"in",
"windows_lst",
":",
"windows_lst",
"[",
"l",
"]",
"[",
"'selected'",
"]",
"=",
"not",
"windows_lst",
"[",
"l",
"]",
"[",
"'selected'",
"]",
"elif",
"c",
"in",
"linux_lst",
":",
"linux_lst",
"[",
"c",
"]",
"[",
"'selected'",
"]",
"=",
"not",
"linux_lst",
"[",
"c",
"]",
"[",
"'selected'",
"]",
"elif",
"c",
"in",
"windows_lst",
":",
"windows_lst",
"[",
"c",
"]",
"[",
"'selected'",
"]",
"=",
"not",
"windows_lst",
"[",
"c",
"]",
"[",
"'selected'",
"]",
"if",
"os",
".",
"name",
"==",
"'posix'",
":",
"curses",
".",
"nocbreak",
"(",
")",
"curses",
".",
"echo",
"(",
")",
"curses",
".",
"endwin",
"(",
")",
"# Extract selections",
"ids",
"=",
"[",
"]",
"for",
"l",
"in",
"dict",
"(",
"linux_lst",
".",
"items",
"(",
")",
"+",
"windows_lst",
".",
"items",
"(",
")",
")",
".",
"values",
"(",
")",
":",
"if",
"l",
"[",
"'selected'",
"]",
"and",
"'OperatingSystem'",
"in",
"l",
":",
"ids",
".",
"append",
"(",
"str",
"(",
"l",
"[",
"'OperatingSystem'",
"]",
")",
")",
"clc",
".",
"v1",
".",
"output",
".",
"Status",
"(",
"'SUCCESS'",
",",
"2",
",",
"'Selected operating system IDs: %s'",
"%",
"(",
"\" \"",
".",
"join",
"(",
"ids",
")",
")",
")",
"return",
"(",
"Blueprint",
".",
"PackagePublish",
"(",
"package",
",",
"type",
",",
"visibility",
",",
"ids",
")",
")"
] | Publishes a Blueprint Package for use within the Blueprint Designer after interactive OS selection.
Interactive selection of one or more operating systems by name.
:param package: path to zip file containing package.manifest and supporting scripts
:param classification: package type (System, Script, Software)
:param visibility: package visibility filter (Public, Private, Shared) | [
"Publishes",
"a",
"Blueprint",
"Package",
"for",
"use",
"within",
"the",
"Blueprint",
"Designer",
"after",
"interactive",
"OS",
"selection",
"."
] | f4dba40c627cb08dd4b7d0d277e8d67578010b05 | https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L214-L255 | train |
srossross/rpmfile | rpmfile/__init__.py | RPMFile.getmembers | def getmembers(self):
'''
Return the members of the archive as a list of RPMInfo objects. The
list has the same order as the members in the archive.
'''
if self._members is None:
self._members = _members = []
g = self.data_file
magic = g.read(2)
while magic:
if magic == b'07':
magic += g.read(4)
member = RPMInfo._read(magic, g)
if member.name == 'TRAILER!!!':
break
if not member.isdir:
_members.append(member)
magic = g.read(2)
return _members
return self._members | python | def getmembers(self):
'''
Return the members of the archive as a list of RPMInfo objects. The
list has the same order as the members in the archive.
'''
if self._members is None:
self._members = _members = []
g = self.data_file
magic = g.read(2)
while magic:
if magic == b'07':
magic += g.read(4)
member = RPMInfo._read(magic, g)
if member.name == 'TRAILER!!!':
break
if not member.isdir:
_members.append(member)
magic = g.read(2)
return _members
return self._members | [
"def",
"getmembers",
"(",
"self",
")",
":",
"if",
"self",
".",
"_members",
"is",
"None",
":",
"self",
".",
"_members",
"=",
"_members",
"=",
"[",
"]",
"g",
"=",
"self",
".",
"data_file",
"magic",
"=",
"g",
".",
"read",
"(",
"2",
")",
"while",
"magic",
":",
"if",
"magic",
"==",
"b'07'",
":",
"magic",
"+=",
"g",
".",
"read",
"(",
"4",
")",
"member",
"=",
"RPMInfo",
".",
"_read",
"(",
"magic",
",",
"g",
")",
"if",
"member",
".",
"name",
"==",
"'TRAILER!!!'",
":",
"break",
"if",
"not",
"member",
".",
"isdir",
":",
"_members",
".",
"append",
"(",
"member",
")",
"magic",
"=",
"g",
".",
"read",
"(",
"2",
")",
"return",
"_members",
"return",
"self",
".",
"_members"
] | Return the members of the archive as a list of RPMInfo objects. The
list has the same order as the members in the archive. | [
"Return",
"the",
"members",
"of",
"the",
"archive",
"as",
"a",
"list",
"of",
"RPMInfo",
"objects",
".",
"The",
"list",
"has",
"the",
"same",
"order",
"as",
"the",
"members",
"in",
"the",
"archive",
"."
] | 3ab96f211da7b56f5e99d8cc248f714a6e542d31 | https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/__init__.py#L111-L133 | train |
srossross/rpmfile | rpmfile/__init__.py | RPMFile.getmember | def getmember(self, name):
'''
Return an RPMInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
'''
members = self.getmembers()
for m in members[::-1]:
if m.name == name:
return m
raise KeyError("member %s could not be found" % name) | python | def getmember(self, name):
'''
Return an RPMInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
'''
members = self.getmembers()
for m in members[::-1]:
if m.name == name:
return m
raise KeyError("member %s could not be found" % name) | [
"def",
"getmember",
"(",
"self",
",",
"name",
")",
":",
"members",
"=",
"self",
".",
"getmembers",
"(",
")",
"for",
"m",
"in",
"members",
"[",
":",
":",
"-",
"1",
"]",
":",
"if",
"m",
".",
"name",
"==",
"name",
":",
"return",
"m",
"raise",
"KeyError",
"(",
"\"member %s could not be found\"",
"%",
"name",
")"
] | Return an RPMInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version. | [
"Return",
"an",
"RPMInfo",
"object",
"for",
"member",
"name",
".",
"If",
"name",
"can",
"not",
"be",
"found",
"in",
"the",
"archive",
"KeyError",
"is",
"raised",
".",
"If",
"a",
"member",
"occurs",
"more",
"than",
"once",
"in",
"the",
"archive",
"its",
"last",
"occurrence",
"is",
"assumed",
"to",
"be",
"the",
"most",
"up",
"-",
"to",
"-",
"date",
"version",
"."
] | 3ab96f211da7b56f5e99d8cc248f714a6e542d31 | https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/__init__.py#L135-L147 | train |
srossross/rpmfile | rpmfile/__init__.py | RPMFile.data_file | def data_file(self):
"""Return the uncompressed raw CPIO data of the RPM archive."""
if self._data_file is None:
fileobj = _SubFile(self._fileobj, self.data_offset)
if self.headers["archive_compression"] == b"xz":
if not getattr(sys.modules[__name__], 'lzma', False):
raise NoLZMAModuleError('lzma module not present')
self._data_file = lzma.LZMAFile(fileobj)
else:
self._data_file = gzip.GzipFile(fileobj=fileobj)
return self._data_file | python | def data_file(self):
"""Return the uncompressed raw CPIO data of the RPM archive."""
if self._data_file is None:
fileobj = _SubFile(self._fileobj, self.data_offset)
if self.headers["archive_compression"] == b"xz":
if not getattr(sys.modules[__name__], 'lzma', False):
raise NoLZMAModuleError('lzma module not present')
self._data_file = lzma.LZMAFile(fileobj)
else:
self._data_file = gzip.GzipFile(fileobj=fileobj)
return self._data_file | [
"def",
"data_file",
"(",
"self",
")",
":",
"if",
"self",
".",
"_data_file",
"is",
"None",
":",
"fileobj",
"=",
"_SubFile",
"(",
"self",
".",
"_fileobj",
",",
"self",
".",
"data_offset",
")",
"if",
"self",
".",
"headers",
"[",
"\"archive_compression\"",
"]",
"==",
"b\"xz\"",
":",
"if",
"not",
"getattr",
"(",
"sys",
".",
"modules",
"[",
"__name__",
"]",
",",
"'lzma'",
",",
"False",
")",
":",
"raise",
"NoLZMAModuleError",
"(",
"'lzma module not present'",
")",
"self",
".",
"_data_file",
"=",
"lzma",
".",
"LZMAFile",
"(",
"fileobj",
")",
"else",
":",
"self",
".",
"_data_file",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"fileobj",
")",
"return",
"self",
".",
"_data_file"
] | Return the uncompressed raw CPIO data of the RPM archive. | [
"Return",
"the",
"uncompressed",
"raw",
"CPIO",
"data",
"of",
"the",
"RPM",
"archive",
"."
] | 3ab96f211da7b56f5e99d8cc248f714a6e542d31 | https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/__init__.py#L163-L176 | train |
fridex/json2sql | json2sql/utils.py | load_input | def load_input(definition):
"""Load and parse input if needed.
:param definition: definition to use as an input (file, serialized JSON/YAML or dict)
:return: loaded input
:raises json2sql.ParsingInputError: when parsing fails
"""
if isinstance(definition, (str, io.TextIOWrapper)):
try:
definition = yaml.safe_load(definition)
except Exception as exc:
raise ParsingInputError("Unable to parse input: %s" % str(exc))
return definition | python | def load_input(definition):
"""Load and parse input if needed.
:param definition: definition to use as an input (file, serialized JSON/YAML or dict)
:return: loaded input
:raises json2sql.ParsingInputError: when parsing fails
"""
if isinstance(definition, (str, io.TextIOWrapper)):
try:
definition = yaml.safe_load(definition)
except Exception as exc:
raise ParsingInputError("Unable to parse input: %s" % str(exc))
return definition | [
"def",
"load_input",
"(",
"definition",
")",
":",
"if",
"isinstance",
"(",
"definition",
",",
"(",
"str",
",",
"io",
".",
"TextIOWrapper",
")",
")",
":",
"try",
":",
"definition",
"=",
"yaml",
".",
"safe_load",
"(",
"definition",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"ParsingInputError",
"(",
"\"Unable to parse input: %s\"",
"%",
"str",
"(",
"exc",
")",
")",
"return",
"definition"
] | Load and parse input if needed.
:param definition: definition to use as an input (file, serialized JSON/YAML or dict)
:return: loaded input
:raises json2sql.ParsingInputError: when parsing fails | [
"Load",
"and",
"parse",
"input",
"if",
"needed",
"."
] | a0851dd79827a684319b03fb899e129f81ff2d3a | https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/utils.py#L19-L32 | train |
fridex/json2sql | json2sql/utils.py | any2sql | def any2sql(func, definition_dict=None, **definition_kwargs):
"""Handle general to SQL conversion.
:param func: function to be called on the given definition
:param definition_dict: statement definition in dict
:param definition_kwargs: statement definition as kwargs
:return: raw SQL statement
"""
if definition_dict and definition_kwargs:
raise InputError("Cannot process dict and kwargs input at the same time")
definition = load_input(definition_dict or definition_kwargs)
if definition.get('returning', '') == '*':
definition['returning'] = mosql_raw('*')
try:
result = func(**definition)
except (TypeError, AttributeError) as exc:
raise ClauseError("Clause definition error: %s" % str(exc)) from exc
except Exception as exc:
import json2sql.errors as json2sql_errors
if exc.__class__.__name__ not in json2sql_errors.__dict__.keys():
raise json2sql_errors.Json2SqlInternalError("Unhandled error: %s" % str(exc)) from exc
raise
return result | python | def any2sql(func, definition_dict=None, **definition_kwargs):
"""Handle general to SQL conversion.
:param func: function to be called on the given definition
:param definition_dict: statement definition in dict
:param definition_kwargs: statement definition as kwargs
:return: raw SQL statement
"""
if definition_dict and definition_kwargs:
raise InputError("Cannot process dict and kwargs input at the same time")
definition = load_input(definition_dict or definition_kwargs)
if definition.get('returning', '') == '*':
definition['returning'] = mosql_raw('*')
try:
result = func(**definition)
except (TypeError, AttributeError) as exc:
raise ClauseError("Clause definition error: %s" % str(exc)) from exc
except Exception as exc:
import json2sql.errors as json2sql_errors
if exc.__class__.__name__ not in json2sql_errors.__dict__.keys():
raise json2sql_errors.Json2SqlInternalError("Unhandled error: %s" % str(exc)) from exc
raise
return result | [
"def",
"any2sql",
"(",
"func",
",",
"definition_dict",
"=",
"None",
",",
"*",
"*",
"definition_kwargs",
")",
":",
"if",
"definition_dict",
"and",
"definition_kwargs",
":",
"raise",
"InputError",
"(",
"\"Cannot process dict and kwargs input at the same time\"",
")",
"definition",
"=",
"load_input",
"(",
"definition_dict",
"or",
"definition_kwargs",
")",
"if",
"definition",
".",
"get",
"(",
"'returning'",
",",
"''",
")",
"==",
"'*'",
":",
"definition",
"[",
"'returning'",
"]",
"=",
"mosql_raw",
"(",
"'*'",
")",
"try",
":",
"result",
"=",
"func",
"(",
"*",
"*",
"definition",
")",
"except",
"(",
"TypeError",
",",
"AttributeError",
")",
"as",
"exc",
":",
"raise",
"ClauseError",
"(",
"\"Clause definition error: %s\"",
"%",
"str",
"(",
"exc",
")",
")",
"from",
"exc",
"except",
"Exception",
"as",
"exc",
":",
"import",
"json2sql",
".",
"errors",
"as",
"json2sql_errors",
"if",
"exc",
".",
"__class__",
".",
"__name__",
"not",
"in",
"json2sql_errors",
".",
"__dict__",
".",
"keys",
"(",
")",
":",
"raise",
"json2sql_errors",
".",
"Json2SqlInternalError",
"(",
"\"Unhandled error: %s\"",
"%",
"str",
"(",
"exc",
")",
")",
"from",
"exc",
"raise",
"return",
"result"
] | Handle general to SQL conversion.
:param func: function to be called on the given definition
:param definition_dict: statement definition in dict
:param definition_kwargs: statement definition as kwargs
:return: raw SQL statement | [
"Handle",
"general",
"to",
"SQL",
"conversion",
"."
] | a0851dd79827a684319b03fb899e129f81ff2d3a | https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/utils.py#L35-L62 | train |
fridex/json2sql | json2sql/select.py | _expand_join | def _expand_join(join_definition):
"""Expand join definition to `join' call.
:param join_definition: join definition
:return: expanded join definition
"""
join_table_name = join_definition.pop('table')
join_func = getattr(mosql_query, join_definition.pop('join_type', 'join'))
return join_func(join_table_name, **join_definition) | python | def _expand_join(join_definition):
"""Expand join definition to `join' call.
:param join_definition: join definition
:return: expanded join definition
"""
join_table_name = join_definition.pop('table')
join_func = getattr(mosql_query, join_definition.pop('join_type', 'join'))
return join_func(join_table_name, **join_definition) | [
"def",
"_expand_join",
"(",
"join_definition",
")",
":",
"join_table_name",
"=",
"join_definition",
".",
"pop",
"(",
"'table'",
")",
"join_func",
"=",
"getattr",
"(",
"mosql_query",
",",
"join_definition",
".",
"pop",
"(",
"'join_type'",
",",
"'join'",
")",
")",
"return",
"join_func",
"(",
"join_table_name",
",",
"*",
"*",
"join_definition",
")"
] | Expand join definition to `join' call.
:param join_definition: join definition
:return: expanded join definition | [
"Expand",
"join",
"definition",
"to",
"join",
"call",
"."
] | a0851dd79827a684319b03fb899e129f81ff2d3a | https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/select.py#L32-L40 | train |
fridex/json2sql | json2sql/select.py | _construct_select_query | def _construct_select_query(**filter_definition):
"""Return SELECT statement that will be used as a filter.
:param filter_definition: definition of a filter that should be used for SELECT construction
:return:
"""
table_name = filter_definition.pop('table')
distinct = filter_definition.pop('distinct', False)
select_count = filter_definition.pop('count', False)
if distinct and select_count:
raise UnsupportedDefinitionError('SELECT (DISTINCT ...) is not supported')
if select_count and 'select' in filter_definition:
raise UnsupportedDefinitionError('SELECT COUNT(columns) is not supported')
if 'joins' in filter_definition:
join_definitions = filter_definition.pop('joins')
if not isinstance(join_definitions, (tuple, list)):
join_definitions = (join_definitions,)
filter_definition['joins'] = []
for join_def in join_definitions:
filter_definition['joins'].append(_expand_join(join_def))
if 'where' in filter_definition:
for key, value in filter_definition['where'].items():
if is_filter_query(value):
# We can do it recursively here
sub_query = value.pop(DEFAULT_FILTER_KEY)
if value:
raise ParsingInputError("Unknown keys for sub-query provided: %s" % value)
filter_definition['where'][key] = mosql_raw('( {} )'.format(_construct_select_query(**sub_query)))
elif isinstance(value, str) and value.startswith('$') and QUERY_REFERENCE.fullmatch(value[1:]):
# Make sure we construct correct query with escaped table name and escaped column for sub-queries
filter_definition['where'][key] = mosql_raw('"{}"'.format('"."'.join(value[1:].split('.'))))
raw_select = select(table_name, **filter_definition)
if distinct:
# Note that we want to limit replace to the current SELECT, not affect nested ones
raw_select = raw_select.replace('SELECT', 'SELECT DISTINCT', 1)
if select_count:
# Note that we want to limit replace to the current SELECT, not affect nested ones
raw_select = raw_select.replace('SELECT *', 'SELECT COUNT(*)', 1)
return raw_select | python | def _construct_select_query(**filter_definition):
"""Return SELECT statement that will be used as a filter.
:param filter_definition: definition of a filter that should be used for SELECT construction
:return:
"""
table_name = filter_definition.pop('table')
distinct = filter_definition.pop('distinct', False)
select_count = filter_definition.pop('count', False)
if distinct and select_count:
raise UnsupportedDefinitionError('SELECT (DISTINCT ...) is not supported')
if select_count and 'select' in filter_definition:
raise UnsupportedDefinitionError('SELECT COUNT(columns) is not supported')
if 'joins' in filter_definition:
join_definitions = filter_definition.pop('joins')
if not isinstance(join_definitions, (tuple, list)):
join_definitions = (join_definitions,)
filter_definition['joins'] = []
for join_def in join_definitions:
filter_definition['joins'].append(_expand_join(join_def))
if 'where' in filter_definition:
for key, value in filter_definition['where'].items():
if is_filter_query(value):
# We can do it recursively here
sub_query = value.pop(DEFAULT_FILTER_KEY)
if value:
raise ParsingInputError("Unknown keys for sub-query provided: %s" % value)
filter_definition['where'][key] = mosql_raw('( {} )'.format(_construct_select_query(**sub_query)))
elif isinstance(value, str) and value.startswith('$') and QUERY_REFERENCE.fullmatch(value[1:]):
# Make sure we construct correct query with escaped table name and escaped column for sub-queries
filter_definition['where'][key] = mosql_raw('"{}"'.format('"."'.join(value[1:].split('.'))))
raw_select = select(table_name, **filter_definition)
if distinct:
# Note that we want to limit replace to the current SELECT, not affect nested ones
raw_select = raw_select.replace('SELECT', 'SELECT DISTINCT', 1)
if select_count:
# Note that we want to limit replace to the current SELECT, not affect nested ones
raw_select = raw_select.replace('SELECT *', 'SELECT COUNT(*)', 1)
return raw_select | [
"def",
"_construct_select_query",
"(",
"*",
"*",
"filter_definition",
")",
":",
"table_name",
"=",
"filter_definition",
".",
"pop",
"(",
"'table'",
")",
"distinct",
"=",
"filter_definition",
".",
"pop",
"(",
"'distinct'",
",",
"False",
")",
"select_count",
"=",
"filter_definition",
".",
"pop",
"(",
"'count'",
",",
"False",
")",
"if",
"distinct",
"and",
"select_count",
":",
"raise",
"UnsupportedDefinitionError",
"(",
"'SELECT (DISTINCT ...) is not supported'",
")",
"if",
"select_count",
"and",
"'select'",
"in",
"filter_definition",
":",
"raise",
"UnsupportedDefinitionError",
"(",
"'SELECT COUNT(columns) is not supported'",
")",
"if",
"'joins'",
"in",
"filter_definition",
":",
"join_definitions",
"=",
"filter_definition",
".",
"pop",
"(",
"'joins'",
")",
"if",
"not",
"isinstance",
"(",
"join_definitions",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"join_definitions",
"=",
"(",
"join_definitions",
",",
")",
"filter_definition",
"[",
"'joins'",
"]",
"=",
"[",
"]",
"for",
"join_def",
"in",
"join_definitions",
":",
"filter_definition",
"[",
"'joins'",
"]",
".",
"append",
"(",
"_expand_join",
"(",
"join_def",
")",
")",
"if",
"'where'",
"in",
"filter_definition",
":",
"for",
"key",
",",
"value",
"in",
"filter_definition",
"[",
"'where'",
"]",
".",
"items",
"(",
")",
":",
"if",
"is_filter_query",
"(",
"value",
")",
":",
"# We can do it recursively here",
"sub_query",
"=",
"value",
".",
"pop",
"(",
"DEFAULT_FILTER_KEY",
")",
"if",
"value",
":",
"raise",
"ParsingInputError",
"(",
"\"Unknown keys for sub-query provided: %s\"",
"%",
"value",
")",
"filter_definition",
"[",
"'where'",
"]",
"[",
"key",
"]",
"=",
"mosql_raw",
"(",
"'( {} )'",
".",
"format",
"(",
"_construct_select_query",
"(",
"*",
"*",
"sub_query",
")",
")",
")",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
"and",
"value",
".",
"startswith",
"(",
"'$'",
")",
"and",
"QUERY_REFERENCE",
".",
"fullmatch",
"(",
"value",
"[",
"1",
":",
"]",
")",
":",
"# Make sure we construct correct query with escaped table name and escaped column for sub-queries",
"filter_definition",
"[",
"'where'",
"]",
"[",
"key",
"]",
"=",
"mosql_raw",
"(",
"'\"{}\"'",
".",
"format",
"(",
"'\".\"'",
".",
"join",
"(",
"value",
"[",
"1",
":",
"]",
".",
"split",
"(",
"'.'",
")",
")",
")",
")",
"raw_select",
"=",
"select",
"(",
"table_name",
",",
"*",
"*",
"filter_definition",
")",
"if",
"distinct",
":",
"# Note that we want to limit replace to the current SELECT, not affect nested ones",
"raw_select",
"=",
"raw_select",
".",
"replace",
"(",
"'SELECT'",
",",
"'SELECT DISTINCT'",
",",
"1",
")",
"if",
"select_count",
":",
"# Note that we want to limit replace to the current SELECT, not affect nested ones",
"raw_select",
"=",
"raw_select",
".",
"replace",
"(",
"'SELECT *'",
",",
"'SELECT COUNT(*)'",
",",
"1",
")",
"return",
"raw_select"
] | Return SELECT statement that will be used as a filter.
:param filter_definition: definition of a filter that should be used for SELECT construction
:return: | [
"Return",
"SELECT",
"statement",
"that",
"will",
"be",
"used",
"as",
"a",
"filter",
"."
] | a0851dd79827a684319b03fb899e129f81ff2d3a | https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/select.py#L43-L90 | train |
teepark/greenhouse | greenhouse/io/sockets.py | Socket.connect | def connect(self, address):
"""initiate a new connection to a remote socket bound to an address
.. note:: this method will block until the connection has been made
:param address:
the address to which to initiate a connection, the format of which
depends on the socket's type; for TCP sockets, this is a
``(host, port``) two-tuple
"""
address = _dns_resolve(self, address)
with self._registered('we'):
while 1:
err = self._sock.connect_ex(address)
if not self._blocking or err not in _BLOCKING_OP:
if err not in (0, errno.EISCONN):
raise socket.error(err, errno.errorcode[err])
return
if self._writable.wait(self.gettimeout()):
raise socket.timeout("timed out")
if scheduler.state.interrupted:
raise IOError(errno.EINTR,
"interrupted system call") | python | def connect(self, address):
"""initiate a new connection to a remote socket bound to an address
.. note:: this method will block until the connection has been made
:param address:
the address to which to initiate a connection, the format of which
depends on the socket's type; for TCP sockets, this is a
``(host, port``) two-tuple
"""
address = _dns_resolve(self, address)
with self._registered('we'):
while 1:
err = self._sock.connect_ex(address)
if not self._blocking or err not in _BLOCKING_OP:
if err not in (0, errno.EISCONN):
raise socket.error(err, errno.errorcode[err])
return
if self._writable.wait(self.gettimeout()):
raise socket.timeout("timed out")
if scheduler.state.interrupted:
raise IOError(errno.EINTR,
"interrupted system call") | [
"def",
"connect",
"(",
"self",
",",
"address",
")",
":",
"address",
"=",
"_dns_resolve",
"(",
"self",
",",
"address",
")",
"with",
"self",
".",
"_registered",
"(",
"'we'",
")",
":",
"while",
"1",
":",
"err",
"=",
"self",
".",
"_sock",
".",
"connect_ex",
"(",
"address",
")",
"if",
"not",
"self",
".",
"_blocking",
"or",
"err",
"not",
"in",
"_BLOCKING_OP",
":",
"if",
"err",
"not",
"in",
"(",
"0",
",",
"errno",
".",
"EISCONN",
")",
":",
"raise",
"socket",
".",
"error",
"(",
"err",
",",
"errno",
".",
"errorcode",
"[",
"err",
"]",
")",
"return",
"if",
"self",
".",
"_writable",
".",
"wait",
"(",
"self",
".",
"gettimeout",
"(",
")",
")",
":",
"raise",
"socket",
".",
"timeout",
"(",
"\"timed out\"",
")",
"if",
"scheduler",
".",
"state",
".",
"interrupted",
":",
"raise",
"IOError",
"(",
"errno",
".",
"EINTR",
",",
"\"interrupted system call\"",
")"
] | initiate a new connection to a remote socket bound to an address
.. note:: this method will block until the connection has been made
:param address:
the address to which to initiate a connection, the format of which
depends on the socket's type; for TCP sockets, this is a
``(host, port``) two-tuple | [
"initiate",
"a",
"new",
"connection",
"to",
"a",
"remote",
"socket",
"bound",
"to",
"an",
"address"
] | 8fd1be4f5443ba090346b5ec82fdbeb0a060d956 | https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L150-L172 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.