repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
xenadevel/PyXenaManager
xenamanager/xena_statistics_view.py
XenaStreamsStats.read_stats
def read_stats(self): """ Read current statistics from chassis. :return: dictionary {stream: {tx: {stat name: stat value}} rx: {tpld: {stat group {stat name: value}}}} """ self.tx_statistics = TgnObjectsDict() for port in self.session.ports.values(): for stream in port.streams.values(): self.tx_statistics[stream] = stream.read_stats() tpld_statistics = XenaTpldsStats(self.session).read_stats() self.statistics = TgnObjectsDict() for stream, stream_stats in self.tx_statistics.items(): self.statistics[stream] = OrderedDict() self.statistics[stream]['tx'] = stream_stats self.statistics[stream]['rx'] = OrderedDict() stream_tpld = stream.get_attribute('ps_tpldid') for tpld, tpld_stats in tpld_statistics.items(): if tpld.id == stream_tpld: self.statistics[stream]['rx'][tpld] = tpld_stats return self.statistics
python
def read_stats(self): """ Read current statistics from chassis. :return: dictionary {stream: {tx: {stat name: stat value}} rx: {tpld: {stat group {stat name: value}}}} """ self.tx_statistics = TgnObjectsDict() for port in self.session.ports.values(): for stream in port.streams.values(): self.tx_statistics[stream] = stream.read_stats() tpld_statistics = XenaTpldsStats(self.session).read_stats() self.statistics = TgnObjectsDict() for stream, stream_stats in self.tx_statistics.items(): self.statistics[stream] = OrderedDict() self.statistics[stream]['tx'] = stream_stats self.statistics[stream]['rx'] = OrderedDict() stream_tpld = stream.get_attribute('ps_tpldid') for tpld, tpld_stats in tpld_statistics.items(): if tpld.id == stream_tpld: self.statistics[stream]['rx'][tpld] = tpld_stats return self.statistics
[ "def", "read_stats", "(", "self", ")", ":", "self", ".", "tx_statistics", "=", "TgnObjectsDict", "(", ")", "for", "port", "in", "self", ".", "session", ".", "ports", ".", "values", "(", ")", ":", "for", "stream", "in", "port", ".", "streams", ".", "values", "(", ")", ":", "self", ".", "tx_statistics", "[", "stream", "]", "=", "stream", ".", "read_stats", "(", ")", "tpld_statistics", "=", "XenaTpldsStats", "(", "self", ".", "session", ")", ".", "read_stats", "(", ")", "self", ".", "statistics", "=", "TgnObjectsDict", "(", ")", "for", "stream", ",", "stream_stats", "in", "self", ".", "tx_statistics", ".", "items", "(", ")", ":", "self", ".", "statistics", "[", "stream", "]", "=", "OrderedDict", "(", ")", "self", ".", "statistics", "[", "stream", "]", "[", "'tx'", "]", "=", "stream_stats", "self", ".", "statistics", "[", "stream", "]", "[", "'rx'", "]", "=", "OrderedDict", "(", ")", "stream_tpld", "=", "stream", ".", "get_attribute", "(", "'ps_tpldid'", ")", "for", "tpld", ",", "tpld_stats", "in", "tpld_statistics", ".", "items", "(", ")", ":", "if", "tpld", ".", "id", "==", "stream_tpld", ":", "self", ".", "statistics", "[", "stream", "]", "[", "'rx'", "]", "[", "tpld", "]", "=", "tpld_stats", "return", "self", ".", "statistics" ]
Read current statistics from chassis. :return: dictionary {stream: {tx: {stat name: stat value}} rx: {tpld: {stat group {stat name: value}}}}
[ "Read", "current", "statistics", "from", "chassis", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_statistics_view.py#L90-L112
xenadevel/PyXenaManager
xenamanager/xena_statistics_view.py
XenaTpldsStats.read_stats
def read_stats(self): """ Read current statistics from chassis. :return: dictionary {tpld full index {group name {stat name: stat value}}} """ self.statistics = TgnObjectsDict() for port in self.session.ports.values(): for tpld in port.tplds.values(): self.statistics[tpld] = tpld.read_stats() return self.statistics
python
def read_stats(self): """ Read current statistics from chassis. :return: dictionary {tpld full index {group name {stat name: stat value}}} """ self.statistics = TgnObjectsDict() for port in self.session.ports.values(): for tpld in port.tplds.values(): self.statistics[tpld] = tpld.read_stats() return self.statistics
[ "def", "read_stats", "(", "self", ")", ":", "self", ".", "statistics", "=", "TgnObjectsDict", "(", ")", "for", "port", "in", "self", ".", "session", ".", "ports", ".", "values", "(", ")", ":", "for", "tpld", "in", "port", ".", "tplds", ".", "values", "(", ")", ":", "self", ".", "statistics", "[", "tpld", "]", "=", "tpld", ".", "read_stats", "(", ")", "return", "self", ".", "statistics" ]
Read current statistics from chassis. :return: dictionary {tpld full index {group name {stat name: stat value}}}
[ "Read", "current", "statistics", "from", "chassis", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_statistics_view.py#L134-L144
quizl/quizler
quizler/utils.py
get_user_sets
def get_user_sets(client_id, user_id): """Find all user sets.""" data = api_call('get', 'users/{}/sets'.format(user_id), client_id=client_id) return [WordSet.from_dict(wordset) for wordset in data]
python
def get_user_sets(client_id, user_id): """Find all user sets.""" data = api_call('get', 'users/{}/sets'.format(user_id), client_id=client_id) return [WordSet.from_dict(wordset) for wordset in data]
[ "def", "get_user_sets", "(", "client_id", ",", "user_id", ")", ":", "data", "=", "api_call", "(", "'get'", ",", "'users/{}/sets'", ".", "format", "(", "user_id", ")", ",", "client_id", "=", "client_id", ")", "return", "[", "WordSet", ".", "from_dict", "(", "wordset", ")", "for", "wordset", "in", "data", "]" ]
Find all user sets.
[ "Find", "all", "user", "sets", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L10-L13
quizl/quizler
quizler/utils.py
print_user_sets
def print_user_sets(wordsets, print_terms): """Print all user sets by title. If 'print_terms', also prints all terms of all user sets. :param wordsets: List of WordSet. :param print_terms: If True, also prints all terms of all user sets. """ if not wordsets: print('No sets found') else: print('Found sets: {}'.format(len(wordsets))) for wordset in wordsets: print(' {}'.format(wordset)) if print_terms: for term in wordset.terms: print(' {}'.format(term))
python
def print_user_sets(wordsets, print_terms): """Print all user sets by title. If 'print_terms', also prints all terms of all user sets. :param wordsets: List of WordSet. :param print_terms: If True, also prints all terms of all user sets. """ if not wordsets: print('No sets found') else: print('Found sets: {}'.format(len(wordsets))) for wordset in wordsets: print(' {}'.format(wordset)) if print_terms: for term in wordset.terms: print(' {}'.format(term))
[ "def", "print_user_sets", "(", "wordsets", ",", "print_terms", ")", ":", "if", "not", "wordsets", ":", "print", "(", "'No sets found'", ")", "else", ":", "print", "(", "'Found sets: {}'", ".", "format", "(", "len", "(", "wordsets", ")", ")", ")", "for", "wordset", "in", "wordsets", ":", "print", "(", "' {}'", ".", "format", "(", "wordset", ")", ")", "if", "print_terms", ":", "for", "term", "in", "wordset", ".", "terms", ":", "print", "(", "' {}'", ".", "format", "(", "term", ")", ")" ]
Print all user sets by title. If 'print_terms', also prints all terms of all user sets. :param wordsets: List of WordSet. :param print_terms: If True, also prints all terms of all user sets.
[ "Print", "all", "user", "sets", "by", "title", ".", "If", "print_terms", "also", "prints", "all", "terms", "of", "all", "user", "sets", ".", ":", "param", "wordsets", ":", "List", "of", "WordSet", ".", ":", "param", "print_terms", ":", "If", "True", "also", "prints", "all", "terms", "of", "all", "user", "sets", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L16-L29
quizl/quizler
quizler/utils.py
get_common_terms
def get_common_terms(*api_envs): """Get all term duplicates across all user word sets as a list of (title of first word set, title of second word set, set of terms) tuples.""" common_terms = [] # pylint: disable=no-value-for-parameter wordsets = get_user_sets(*api_envs) # pylint: enable=no-value-for-parameter for wordset1, wordset2 in combinations(wordsets, 2): common = wordset1.has_common(wordset2) if common: common_terms.append((wordset1.title, wordset2.title, common)) return common_terms
python
def get_common_terms(*api_envs): """Get all term duplicates across all user word sets as a list of (title of first word set, title of second word set, set of terms) tuples.""" common_terms = [] # pylint: disable=no-value-for-parameter wordsets = get_user_sets(*api_envs) # pylint: enable=no-value-for-parameter for wordset1, wordset2 in combinations(wordsets, 2): common = wordset1.has_common(wordset2) if common: common_terms.append((wordset1.title, wordset2.title, common)) return common_terms
[ "def", "get_common_terms", "(", "*", "api_envs", ")", ":", "common_terms", "=", "[", "]", "# pylint: disable=no-value-for-parameter", "wordsets", "=", "get_user_sets", "(", "*", "api_envs", ")", "# pylint: enable=no-value-for-parameter", "for", "wordset1", ",", "wordset2", "in", "combinations", "(", "wordsets", ",", "2", ")", ":", "common", "=", "wordset1", ".", "has_common", "(", "wordset2", ")", "if", "common", ":", "common_terms", ".", "append", "(", "(", "wordset1", ".", "title", ",", "wordset2", ".", "title", ",", "common", ")", ")", "return", "common_terms" ]
Get all term duplicates across all user word sets as a list of (title of first word set, title of second word set, set of terms) tuples.
[ "Get", "all", "term", "duplicates", "across", "all", "user", "word", "sets", "as", "a", "list", "of", "(", "title", "of", "first", "word", "set", "title", "of", "second", "word", "set", "set", "of", "terms", ")", "tuples", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L32-L44
quizl/quizler
quizler/utils.py
print_common_terms
def print_common_terms(common_terms): """Print common terms for each pair of word sets. :param common_terms: Output of get_common_terms(). """ if not common_terms: print('No duplicates') else: for set_pair in common_terms: set1, set2, terms = set_pair print('{} and {} have in common:'.format(set1, set2)) for term in terms: print(' {}'.format(term))
python
def print_common_terms(common_terms): """Print common terms for each pair of word sets. :param common_terms: Output of get_common_terms(). """ if not common_terms: print('No duplicates') else: for set_pair in common_terms: set1, set2, terms = set_pair print('{} and {} have in common:'.format(set1, set2)) for term in terms: print(' {}'.format(term))
[ "def", "print_common_terms", "(", "common_terms", ")", ":", "if", "not", "common_terms", ":", "print", "(", "'No duplicates'", ")", "else", ":", "for", "set_pair", "in", "common_terms", ":", "set1", ",", "set2", ",", "terms", "=", "set_pair", "print", "(", "'{} and {} have in common:'", ".", "format", "(", "set1", ",", "set2", ")", ")", "for", "term", "in", "terms", ":", "print", "(", "' {}'", ".", "format", "(", "term", ")", ")" ]
Print common terms for each pair of word sets. :param common_terms: Output of get_common_terms().
[ "Print", "common", "terms", "for", "each", "pair", "of", "word", "sets", ".", ":", "param", "common_terms", ":", "Output", "of", "get_common_terms", "()", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L47-L58
quizl/quizler
quizler/utils.py
delete_term
def delete_term(set_id, term_id, access_token): """Delete the given term.""" api_call('delete', 'sets/{}/terms/{}'.format(set_id, term_id), access_token=access_token)
python
def delete_term(set_id, term_id, access_token): """Delete the given term.""" api_call('delete', 'sets/{}/terms/{}'.format(set_id, term_id), access_token=access_token)
[ "def", "delete_term", "(", "set_id", ",", "term_id", ",", "access_token", ")", ":", "api_call", "(", "'delete'", ",", "'sets/{}/terms/{}'", ".", "format", "(", "set_id", ",", "term_id", ")", ",", "access_token", "=", "access_token", ")" ]
Delete the given term.
[ "Delete", "the", "given", "term", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L61-L63
quizl/quizler
quizler/utils.py
add_term
def add_term(set_id, term, access_token): """Add the given term to the given set. :param term: Instance of Term. """ api_call('post', 'sets/{}/terms'.format(set_id), term.to_dict(), access_token=access_token)
python
def add_term(set_id, term, access_token): """Add the given term to the given set. :param term: Instance of Term. """ api_call('post', 'sets/{}/terms'.format(set_id), term.to_dict(), access_token=access_token)
[ "def", "add_term", "(", "set_id", ",", "term", ",", "access_token", ")", ":", "api_call", "(", "'post'", ",", "'sets/{}/terms'", ".", "format", "(", "set_id", ")", ",", "term", ".", "to_dict", "(", ")", ",", "access_token", "=", "access_token", ")" ]
Add the given term to the given set. :param term: Instance of Term.
[ "Add", "the", "given", "term", "to", "the", "given", "set", ".", ":", "param", "term", ":", "Instance", "of", "Term", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L66-L70
quizl/quizler
quizler/utils.py
reset_term_stats
def reset_term_stats(set_id, term_id, client_id, user_id, access_token): """Reset the stats of a term by deleting and re-creating it.""" found_sets = [user_set for user_set in get_user_sets(client_id, user_id) if user_set.set_id == set_id] if len(found_sets) != 1: raise ValueError('{} set(s) found with id {}'.format(len(found_sets), set_id)) found_terms = [term for term in found_sets[0].terms if term.term_id == term_id] if len(found_terms) != 1: raise ValueError('{} term(s) found with id {}'.format(len(found_terms), term_id)) term = found_terms[0] if term.image.url: # Creating a term with an image requires an "image identifier", which you get by uploading # an image via https://quizlet.com/api/2.0/docs/images , which can only be used by Quizlet # PLUS members. raise NotImplementedError('"{}" has an image and is thus not supported'.format(term)) print('Deleting "{}"...'.format(term)) delete_term(set_id, term_id, access_token) print('Re-creating "{}"...'.format(term)) add_term(set_id, term, access_token) print('Done')
python
def reset_term_stats(set_id, term_id, client_id, user_id, access_token): """Reset the stats of a term by deleting and re-creating it.""" found_sets = [user_set for user_set in get_user_sets(client_id, user_id) if user_set.set_id == set_id] if len(found_sets) != 1: raise ValueError('{} set(s) found with id {}'.format(len(found_sets), set_id)) found_terms = [term for term in found_sets[0].terms if term.term_id == term_id] if len(found_terms) != 1: raise ValueError('{} term(s) found with id {}'.format(len(found_terms), term_id)) term = found_terms[0] if term.image.url: # Creating a term with an image requires an "image identifier", which you get by uploading # an image via https://quizlet.com/api/2.0/docs/images , which can only be used by Quizlet # PLUS members. raise NotImplementedError('"{}" has an image and is thus not supported'.format(term)) print('Deleting "{}"...'.format(term)) delete_term(set_id, term_id, access_token) print('Re-creating "{}"...'.format(term)) add_term(set_id, term, access_token) print('Done')
[ "def", "reset_term_stats", "(", "set_id", ",", "term_id", ",", "client_id", ",", "user_id", ",", "access_token", ")", ":", "found_sets", "=", "[", "user_set", "for", "user_set", "in", "get_user_sets", "(", "client_id", ",", "user_id", ")", "if", "user_set", ".", "set_id", "==", "set_id", "]", "if", "len", "(", "found_sets", ")", "!=", "1", ":", "raise", "ValueError", "(", "'{} set(s) found with id {}'", ".", "format", "(", "len", "(", "found_sets", ")", ",", "set_id", ")", ")", "found_terms", "=", "[", "term", "for", "term", "in", "found_sets", "[", "0", "]", ".", "terms", "if", "term", ".", "term_id", "==", "term_id", "]", "if", "len", "(", "found_terms", ")", "!=", "1", ":", "raise", "ValueError", "(", "'{} term(s) found with id {}'", ".", "format", "(", "len", "(", "found_terms", ")", ",", "term_id", ")", ")", "term", "=", "found_terms", "[", "0", "]", "if", "term", ".", "image", ".", "url", ":", "# Creating a term with an image requires an \"image identifier\", which you get by uploading", "# an image via https://quizlet.com/api/2.0/docs/images , which can only be used by Quizlet", "# PLUS members.", "raise", "NotImplementedError", "(", "'\"{}\" has an image and is thus not supported'", ".", "format", "(", "term", ")", ")", "print", "(", "'Deleting \"{}\"...'", ".", "format", "(", "term", ")", ")", "delete_term", "(", "set_id", ",", "term_id", ",", "access_token", ")", "print", "(", "'Re-creating \"{}\"...'", ".", "format", "(", "term", ")", ")", "add_term", "(", "set_id", ",", "term", ",", "access_token", ")", "print", "(", "'Done'", ")" ]
Reset the stats of a term by deleting and re-creating it.
[ "Reset", "the", "stats", "of", "a", "term", "by", "deleting", "and", "re", "-", "creating", "it", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/utils.py#L73-L94
davidblaisonneau-orange/foreman
foreman/hosts.py
Hosts.createController
def createController(self, key, attributes, ipmi, printer=False): """ Function createController Create a controller node @param key: The host name or ID @param attributes:The payload of the host creation @param printer: - False for no creation progression message - True to get creation progression printed on STDOUT - Printer class containig a status method for enhanced print. def printer.status(status, msg, eol=eol) @return RETURN: The API result """ if key not in self: self.printer = printer self.async = False # Create the VM in foreman self.__printProgression__('In progress', key + ' creation: push in Foreman', eol='\r') self.api.create('hosts', attributes, async=self.async) self[key]['interfaces'].append(ipmi) # Wait for puppet catalog to be applied # self.waitPuppetCatalogToBeApplied(key) self.reload() self[key]['build'] = 'true' self[key]['boot'] = 'pxe' self[key]['power'] = 'cycle' return self[key]
python
def createController(self, key, attributes, ipmi, printer=False): """ Function createController Create a controller node @param key: The host name or ID @param attributes:The payload of the host creation @param printer: - False for no creation progression message - True to get creation progression printed on STDOUT - Printer class containig a status method for enhanced print. def printer.status(status, msg, eol=eol) @return RETURN: The API result """ if key not in self: self.printer = printer self.async = False # Create the VM in foreman self.__printProgression__('In progress', key + ' creation: push in Foreman', eol='\r') self.api.create('hosts', attributes, async=self.async) self[key]['interfaces'].append(ipmi) # Wait for puppet catalog to be applied # self.waitPuppetCatalogToBeApplied(key) self.reload() self[key]['build'] = 'true' self[key]['boot'] = 'pxe' self[key]['power'] = 'cycle' return self[key]
[ "def", "createController", "(", "self", ",", "key", ",", "attributes", ",", "ipmi", ",", "printer", "=", "False", ")", ":", "if", "key", "not", "in", "self", ":", "self", ".", "printer", "=", "printer", "self", ".", "async", "=", "False", "# Create the VM in foreman", "self", ".", "__printProgression__", "(", "'In progress'", ",", "key", "+", "' creation: push in Foreman'", ",", "eol", "=", "'\\r'", ")", "self", ".", "api", ".", "create", "(", "'hosts'", ",", "attributes", ",", "async", "=", "self", ".", "async", ")", "self", "[", "key", "]", "[", "'interfaces'", "]", ".", "append", "(", "ipmi", ")", "# Wait for puppet catalog to be applied", "# self.waitPuppetCatalogToBeApplied(key)", "self", ".", "reload", "(", ")", "self", "[", "key", "]", "[", "'build'", "]", "=", "'true'", "self", "[", "key", "]", "[", "'boot'", "]", "=", "'pxe'", "self", "[", "key", "]", "[", "'power'", "]", "=", "'cycle'", "return", "self", "[", "key", "]" ]
Function createController Create a controller node @param key: The host name or ID @param attributes:The payload of the host creation @param printer: - False for no creation progression message - True to get creation progression printed on STDOUT - Printer class containig a status method for enhanced print. def printer.status(status, msg, eol=eol) @return RETURN: The API result
[ "Function", "createController", "Create", "a", "controller", "node" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/hosts.py#L50-L77
davidblaisonneau-orange/foreman
foreman/hosts.py
Hosts.waitPuppetCatalogToBeApplied
def waitPuppetCatalogToBeApplied(self, key, sleepTime=5): """ Function waitPuppetCatalogToBeApplied Wait for puppet catalog to be applied @param key: The host name or ID @return RETURN: None """ # Wait for puppet catalog to be applied loop_stop = False while not loop_stop: status = self[key].getStatus() if status == 'No Changes' or status == 'Active': self.__printProgression__(True, key + ' creation: provisioning OK') loop_stop = True elif status == 'Error': self.__printProgression__(False, key + ' creation: Error - ' 'Error during provisioning') loop_stop = True return False else: self.__printProgression__('In progress', key + ' creation: provisioning ({})' .format(status), eol='\r') time.sleep(sleepTime)
python
def waitPuppetCatalogToBeApplied(self, key, sleepTime=5): """ Function waitPuppetCatalogToBeApplied Wait for puppet catalog to be applied @param key: The host name or ID @return RETURN: None """ # Wait for puppet catalog to be applied loop_stop = False while not loop_stop: status = self[key].getStatus() if status == 'No Changes' or status == 'Active': self.__printProgression__(True, key + ' creation: provisioning OK') loop_stop = True elif status == 'Error': self.__printProgression__(False, key + ' creation: Error - ' 'Error during provisioning') loop_stop = True return False else: self.__printProgression__('In progress', key + ' creation: provisioning ({})' .format(status), eol='\r') time.sleep(sleepTime)
[ "def", "waitPuppetCatalogToBeApplied", "(", "self", ",", "key", ",", "sleepTime", "=", "5", ")", ":", "# Wait for puppet catalog to be applied", "loop_stop", "=", "False", "while", "not", "loop_stop", ":", "status", "=", "self", "[", "key", "]", ".", "getStatus", "(", ")", "if", "status", "==", "'No Changes'", "or", "status", "==", "'Active'", ":", "self", ".", "__printProgression__", "(", "True", ",", "key", "+", "' creation: provisioning OK'", ")", "loop_stop", "=", "True", "elif", "status", "==", "'Error'", ":", "self", ".", "__printProgression__", "(", "False", ",", "key", "+", "' creation: Error - '", "'Error during provisioning'", ")", "loop_stop", "=", "True", "return", "False", "else", ":", "self", ".", "__printProgression__", "(", "'In progress'", ",", "key", "+", "' creation: provisioning ({})'", ".", "format", "(", "status", ")", ",", "eol", "=", "'\\r'", ")", "time", ".", "sleep", "(", "sleepTime", ")" ]
Function waitPuppetCatalogToBeApplied Wait for puppet catalog to be applied @param key: The host name or ID @return RETURN: None
[ "Function", "waitPuppetCatalogToBeApplied", "Wait", "for", "puppet", "catalog", "to", "be", "applied" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/hosts.py#L79-L105
davidblaisonneau-orange/foreman
foreman/hosts.py
Hosts.createVM
def createVM(self, key, attributes, printer=False): """ Function createVM Create a Virtual Machine The creation of a VM with libVirt is a bit complexe. We first create the element in foreman, the ask to start before the result of the creation. To do so, we make async calls to the API and check the results @param key: The host name or ID @param attributes:The payload of the host creation @param printer: - False for no creation progression message - True to get creation progression printed on STDOUT - Printer class containig a status method for enhanced print. def printer.status(status, msg, eol=eol) @return RETURN: The API result """ self.printer = printer self.async = False # Create the VM in foreman # NOTA: with 1.8 it will return 422 'Failed to login via SSH' self.__printProgression__('In progress', key + ' creation: push in Foreman', eol='\r') asyncCreation = self.api.create('hosts', attributes, async=self.async) # Wait before asking to power on the VM # sleep = 5 # for i in range(0, sleep): # time.sleep(1) # self.__printProgression__('In progress', # key + ' creation: start in {0}s' # .format(sleep - i), # eol='\r') # Power on the VM self.__printProgression__('In progress', key + ' creation: starting', eol='\r') powerOn = self[key].powerOn() # Show Power on result if powerOn['power']: self.__printProgression__('In progress', key + ' creation: wait for end of boot', eol='\r') else: self.__printProgression__(False, key + ' creation: Error - ' + str(powerOn)) return False # Show creation result # NOTA: with 1.8 it will return 422 'Failed to login via SSH' # if asyncCreation.result().status_code is 200: # self.__printProgression__('In progress', # key + ' creation: created', # eol='\r') # else: # self.__printProgression__(False, # key + ' creation: Error - ' + # str(asyncCreation.result() # .status_code) + ' - ' + # str(asyncCreation.result().text)) # return False # Wait for puppet catalog to be applied self.waitPuppetCatalogToBeApplied(key) return self[key]['id']
python
def createVM(self, key, attributes, printer=False): """ Function createVM Create a Virtual Machine The creation of a VM with libVirt is a bit complexe. We first create the element in foreman, the ask to start before the result of the creation. To do so, we make async calls to the API and check the results @param key: The host name or ID @param attributes:The payload of the host creation @param printer: - False for no creation progression message - True to get creation progression printed on STDOUT - Printer class containig a status method for enhanced print. def printer.status(status, msg, eol=eol) @return RETURN: The API result """ self.printer = printer self.async = False # Create the VM in foreman # NOTA: with 1.8 it will return 422 'Failed to login via SSH' self.__printProgression__('In progress', key + ' creation: push in Foreman', eol='\r') asyncCreation = self.api.create('hosts', attributes, async=self.async) # Wait before asking to power on the VM # sleep = 5 # for i in range(0, sleep): # time.sleep(1) # self.__printProgression__('In progress', # key + ' creation: start in {0}s' # .format(sleep - i), # eol='\r') # Power on the VM self.__printProgression__('In progress', key + ' creation: starting', eol='\r') powerOn = self[key].powerOn() # Show Power on result if powerOn['power']: self.__printProgression__('In progress', key + ' creation: wait for end of boot', eol='\r') else: self.__printProgression__(False, key + ' creation: Error - ' + str(powerOn)) return False # Show creation result # NOTA: with 1.8 it will return 422 'Failed to login via SSH' # if asyncCreation.result().status_code is 200: # self.__printProgression__('In progress', # key + ' creation: created', # eol='\r') # else: # self.__printProgression__(False, # key + ' creation: Error - ' + # str(asyncCreation.result() # .status_code) + ' - ' + # str(asyncCreation.result().text)) # return False # Wait for puppet catalog to be applied self.waitPuppetCatalogToBeApplied(key) return self[key]['id']
[ "def", "createVM", "(", "self", ",", "key", ",", "attributes", ",", "printer", "=", "False", ")", ":", "self", ".", "printer", "=", "printer", "self", ".", "async", "=", "False", "# Create the VM in foreman", "# NOTA: with 1.8 it will return 422 'Failed to login via SSH'", "self", ".", "__printProgression__", "(", "'In progress'", ",", "key", "+", "' creation: push in Foreman'", ",", "eol", "=", "'\\r'", ")", "asyncCreation", "=", "self", ".", "api", ".", "create", "(", "'hosts'", ",", "attributes", ",", "async", "=", "self", ".", "async", ")", "# Wait before asking to power on the VM", "# sleep = 5", "# for i in range(0, sleep):", "# time.sleep(1)", "# self.__printProgression__('In progress',", "# key + ' creation: start in {0}s'", "# .format(sleep - i),", "# eol='\\r')", "# Power on the VM", "self", ".", "__printProgression__", "(", "'In progress'", ",", "key", "+", "' creation: starting'", ",", "eol", "=", "'\\r'", ")", "powerOn", "=", "self", "[", "key", "]", ".", "powerOn", "(", ")", "# Show Power on result", "if", "powerOn", "[", "'power'", "]", ":", "self", ".", "__printProgression__", "(", "'In progress'", ",", "key", "+", "' creation: wait for end of boot'", ",", "eol", "=", "'\\r'", ")", "else", ":", "self", ".", "__printProgression__", "(", "False", ",", "key", "+", "' creation: Error - '", "+", "str", "(", "powerOn", ")", ")", "return", "False", "# Show creation result", "# NOTA: with 1.8 it will return 422 'Failed to login via SSH'", "# if asyncCreation.result().status_code is 200:", "# self.__printProgression__('In progress',", "# key + ' creation: created',", "# eol='\\r')", "# else:", "# self.__printProgression__(False,", "# key + ' creation: Error - ' +", "# str(asyncCreation.result()", "# .status_code) + ' - ' +", "# str(asyncCreation.result().text))", "# return False", "# Wait for puppet catalog to be applied", "self", ".", "waitPuppetCatalogToBeApplied", "(", "key", ")", "return", "self", "[", "key", "]", "[", "'id'", "]" ]
Function createVM Create a Virtual Machine The creation of a VM with libVirt is a bit complexe. We first create the element in foreman, the ask to start before the result of the creation. To do so, we make async calls to the API and check the results @param key: The host name or ID @param attributes:The payload of the host creation @param printer: - False for no creation progression message - True to get creation progression printed on STDOUT - Printer class containig a status method for enhanced print. def printer.status(status, msg, eol=eol) @return RETURN: The API result
[ "Function", "createVM", "Create", "a", "Virtual", "Machine" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/hosts.py#L107-L174
qubell/contrib-python-qubell-client
qubell/api/private/user.py
User.get
def get(router, organization, email): """ :rtype: User """ log.info("Getting user: %s" % email) resp = router.get_users(org_id=organization.id).json() ids = [x['id'] for x in resp if x['email'] == email] if len(ids): user = User(organization, ids[0]).init_router(router) return user else: raise exceptions.NotFoundError('User with email: %s not found' % email)
python
def get(router, organization, email): """ :rtype: User """ log.info("Getting user: %s" % email) resp = router.get_users(org_id=organization.id).json() ids = [x['id'] for x in resp if x['email'] == email] if len(ids): user = User(organization, ids[0]).init_router(router) return user else: raise exceptions.NotFoundError('User with email: %s not found' % email)
[ "def", "get", "(", "router", ",", "organization", ",", "email", ")", ":", "log", ".", "info", "(", "\"Getting user: %s\"", "%", "email", ")", "resp", "=", "router", ".", "get_users", "(", "org_id", "=", "organization", ".", "id", ")", ".", "json", "(", ")", "ids", "=", "[", "x", "[", "'id'", "]", "for", "x", "in", "resp", "if", "x", "[", "'email'", "]", "==", "email", "]", "if", "len", "(", "ids", ")", ":", "user", "=", "User", "(", "organization", ",", "ids", "[", "0", "]", ")", ".", "init_router", "(", "router", ")", "return", "user", "else", ":", "raise", "exceptions", ".", "NotFoundError", "(", "'User with email: %s not found'", "%", "email", ")" ]
:rtype: User
[ ":", "rtype", ":", "User" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/user.py#L37-L48
MatterMiners/cobald
cobald/daemon/config/mapping.py
Translator.construct
def construct(self, mapping: dict, **kwargs): """ Construct an object from a mapping :param mapping: the constructor definition, with ``__type__`` name and keyword arguments :param kwargs: additional keyword arguments to pass to the constructor """ assert '__type__' not in kwargs and '__args__' not in kwargs mapping = {**mapping, **kwargs} factory_fqdn = mapping.pop('__type__') factory = self.load_name(factory_fqdn) args = mapping.pop('__args__', []) return factory(*args, **mapping)
python
def construct(self, mapping: dict, **kwargs): """ Construct an object from a mapping :param mapping: the constructor definition, with ``__type__`` name and keyword arguments :param kwargs: additional keyword arguments to pass to the constructor """ assert '__type__' not in kwargs and '__args__' not in kwargs mapping = {**mapping, **kwargs} factory_fqdn = mapping.pop('__type__') factory = self.load_name(factory_fqdn) args = mapping.pop('__args__', []) return factory(*args, **mapping)
[ "def", "construct", "(", "self", ",", "mapping", ":", "dict", ",", "*", "*", "kwargs", ")", ":", "assert", "'__type__'", "not", "in", "kwargs", "and", "'__args__'", "not", "in", "kwargs", "mapping", "=", "{", "*", "*", "mapping", ",", "*", "*", "kwargs", "}", "factory_fqdn", "=", "mapping", ".", "pop", "(", "'__type__'", ")", "factory", "=", "self", ".", "load_name", "(", "factory_fqdn", ")", "args", "=", "mapping", ".", "pop", "(", "'__args__'", ",", "[", "]", ")", "return", "factory", "(", "*", "args", ",", "*", "*", "mapping", ")" ]
Construct an object from a mapping :param mapping: the constructor definition, with ``__type__`` name and keyword arguments :param kwargs: additional keyword arguments to pass to the constructor
[ "Construct", "an", "object", "from", "a", "mapping" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/config/mapping.py#L55-L67
MatterMiners/cobald
cobald/daemon/config/mapping.py
Translator.load_name
def load_name(absolute_name: str): """Load an object based on an absolute, dotted name""" path = absolute_name.split('.') try: __import__(absolute_name) except ImportError: try: obj = sys.modules[path[0]] except KeyError: raise ModuleNotFoundError('No module named %r' % path[0]) else: for component in path[1:]: try: obj = getattr(obj, component) except AttributeError as err: raise ConfigurationError(what='no such object %r: %s' % (absolute_name, err)) return obj else: # ImportError is not raised if ``absolute_name`` points to a valid module return sys.modules[absolute_name]
python
def load_name(absolute_name: str): """Load an object based on an absolute, dotted name""" path = absolute_name.split('.') try: __import__(absolute_name) except ImportError: try: obj = sys.modules[path[0]] except KeyError: raise ModuleNotFoundError('No module named %r' % path[0]) else: for component in path[1:]: try: obj = getattr(obj, component) except AttributeError as err: raise ConfigurationError(what='no such object %r: %s' % (absolute_name, err)) return obj else: # ImportError is not raised if ``absolute_name`` points to a valid module return sys.modules[absolute_name]
[ "def", "load_name", "(", "absolute_name", ":", "str", ")", ":", "path", "=", "absolute_name", ".", "split", "(", "'.'", ")", "try", ":", "__import__", "(", "absolute_name", ")", "except", "ImportError", ":", "try", ":", "obj", "=", "sys", ".", "modules", "[", "path", "[", "0", "]", "]", "except", "KeyError", ":", "raise", "ModuleNotFoundError", "(", "'No module named %r'", "%", "path", "[", "0", "]", ")", "else", ":", "for", "component", "in", "path", "[", "1", ":", "]", ":", "try", ":", "obj", "=", "getattr", "(", "obj", ",", "component", ")", "except", "AttributeError", "as", "err", ":", "raise", "ConfigurationError", "(", "what", "=", "'no such object %r: %s'", "%", "(", "absolute_name", ",", "err", ")", ")", "return", "obj", "else", ":", "# ImportError is not raised if ``absolute_name`` points to a valid module", "return", "sys", ".", "modules", "[", "absolute_name", "]" ]
Load an object based on an absolute, dotted name
[ "Load", "an", "object", "based", "on", "an", "absolute", "dotted", "name" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/config/mapping.py#L70-L88
qubell/contrib-python-qubell-client
qubell/api/private/manifest.py
Manifest.patch
def patch(self, path, value=None): """ Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777 """ # noinspection PyShadowingNames def pathGet(dictionary, path): for item in path.split("/"): dictionary = dictionary[item] return dictionary # noinspection PyShadowingNames def pathSet(dictionary, path, value): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) dictionary[key] = value # noinspection PyShadowingNames def pathRm(dictionary, path): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) del dictionary[key] src = yaml.load(self.content) if value: pathSet(src, path, value) else: pathRm(src, path) self._raw_content = yaml.safe_dump(src, default_flow_style=False) return True
python
def patch(self, path, value=None): """ Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777 """ # noinspection PyShadowingNames def pathGet(dictionary, path): for item in path.split("/"): dictionary = dictionary[item] return dictionary # noinspection PyShadowingNames def pathSet(dictionary, path, value): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) dictionary[key] = value # noinspection PyShadowingNames def pathRm(dictionary, path): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) del dictionary[key] src = yaml.load(self.content) if value: pathSet(src, path, value) else: pathRm(src, path) self._raw_content = yaml.safe_dump(src, default_flow_style=False) return True
[ "def", "patch", "(", "self", ",", "path", ",", "value", "=", "None", ")", ":", "# noinspection PyShadowingNames", "def", "pathGet", "(", "dictionary", ",", "path", ")", ":", "for", "item", "in", "path", ".", "split", "(", "\"/\"", ")", ":", "dictionary", "=", "dictionary", "[", "item", "]", "return", "dictionary", "# noinspection PyShadowingNames", "def", "pathSet", "(", "dictionary", ",", "path", ",", "value", ")", ":", "path", "=", "path", ".", "split", "(", "\"/\"", ")", "key", "=", "path", "[", "-", "1", "]", "dictionary", "=", "pathGet", "(", "dictionary", ",", "\"/\"", ".", "join", "(", "path", "[", ":", "-", "1", "]", ")", ")", "dictionary", "[", "key", "]", "=", "value", "# noinspection PyShadowingNames", "def", "pathRm", "(", "dictionary", ",", "path", ")", ":", "path", "=", "path", ".", "split", "(", "\"/\"", ")", "key", "=", "path", "[", "-", "1", "]", "dictionary", "=", "pathGet", "(", "dictionary", ",", "\"/\"", ".", "join", "(", "path", "[", ":", "-", "1", "]", ")", ")", "del", "dictionary", "[", "key", "]", "src", "=", "yaml", ".", "load", "(", "self", ".", "content", ")", "if", "value", ":", "pathSet", "(", "src", ",", "path", ",", "value", ")", "else", ":", "pathRm", "(", "src", ",", "path", ")", "self", ".", "_raw_content", "=", "yaml", ".", "safe_dump", "(", "src", ",", "default_flow_style", "=", "False", ")", "return", "True" ]
Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777
[ "Set", "specified", "value", "to", "yaml", "path", ".", "Example", ":", "patch", "(", "application", "/", "components", "/", "child", "/", "configuration", "/", "__locator", ".", "application", "-", "id", "777", ")", "Will", "change", "child", "app", "ID", "to", "777" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/manifest.py#L63-L95
MatterMiners/cobald
cobald/daemon/runners/trio_runner.py
TrioRunner._await_all
async def _await_all(self): """Async component of _run""" delay = 0.0 # we run a top-level nursery that automatically reaps/cancels for us async with trio.open_nursery() as nursery: while self.running.is_set(): await self._start_payloads(nursery=nursery) await trio.sleep(delay) delay = min(delay + 0.1, 1.0) # cancel the scope to cancel all payloads nursery.cancel_scope.cancel()
python
async def _await_all(self): """Async component of _run""" delay = 0.0 # we run a top-level nursery that automatically reaps/cancels for us async with trio.open_nursery() as nursery: while self.running.is_set(): await self._start_payloads(nursery=nursery) await trio.sleep(delay) delay = min(delay + 0.1, 1.0) # cancel the scope to cancel all payloads nursery.cancel_scope.cancel()
[ "async", "def", "_await_all", "(", "self", ")", ":", "delay", "=", "0.0", "# we run a top-level nursery that automatically reaps/cancels for us", "async", "with", "trio", ".", "open_nursery", "(", ")", "as", "nursery", ":", "while", "self", ".", "running", ".", "is_set", "(", ")", ":", "await", "self", ".", "_start_payloads", "(", "nursery", "=", "nursery", ")", "await", "trio", ".", "sleep", "(", "delay", ")", "delay", "=", "min", "(", "delay", "+", "0.1", ",", "1.0", ")", "# cancel the scope to cancel all payloads", "nursery", ".", "cancel_scope", ".", "cancel", "(", ")" ]
Async component of _run
[ "Async", "component", "of", "_run" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/trio_runner.py#L28-L38
MatterMiners/cobald
cobald/daemon/runners/trio_runner.py
TrioRunner._start_payloads
async def _start_payloads(self, nursery): """Start all queued payloads""" with self._lock: for coroutine in self._payloads: nursery.start_soon(coroutine) self._payloads.clear() await trio.sleep(0)
python
async def _start_payloads(self, nursery): """Start all queued payloads""" with self._lock: for coroutine in self._payloads: nursery.start_soon(coroutine) self._payloads.clear() await trio.sleep(0)
[ "async", "def", "_start_payloads", "(", "self", ",", "nursery", ")", ":", "with", "self", ".", "_lock", ":", "for", "coroutine", "in", "self", ".", "_payloads", ":", "nursery", ".", "start_soon", "(", "coroutine", ")", "self", ".", "_payloads", ".", "clear", "(", ")", "await", "trio", ".", "sleep", "(", "0", ")" ]
Start all queued payloads
[ "Start", "all", "queued", "payloads" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/trio_runner.py#L40-L46
frostming/atoml
atoml/decoder.py
contains_list
def contains_list(longer, shorter): """Check if longer list starts with shorter list""" if len(longer) <= len(shorter): return False for a, b in zip(shorter, longer): if a != b: return False return True
python
def contains_list(longer, shorter): """Check if longer list starts with shorter list""" if len(longer) <= len(shorter): return False for a, b in zip(shorter, longer): if a != b: return False return True
[ "def", "contains_list", "(", "longer", ",", "shorter", ")", ":", "if", "len", "(", "longer", ")", "<=", "len", "(", "shorter", ")", ":", "return", "False", "for", "a", ",", "b", "in", "zip", "(", "shorter", ",", "longer", ")", ":", "if", "a", "!=", "b", ":", "return", "False", "return", "True" ]
Check if longer list starts with shorter list
[ "Check", "if", "longer", "list", "starts", "with", "shorter", "list" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/decoder.py#L28-L35
frostming/atoml
atoml/decoder.py
load
def load(f, dict_=dict): """Load and parse toml from a file object An additional argument `dict_` is used to specify the output type """ if not f.read: raise ValueError('The first parameter needs to be a file object, ', '%r is passed' % type(f)) return loads(f.read(), dict_)
python
def load(f, dict_=dict): """Load and parse toml from a file object An additional argument `dict_` is used to specify the output type """ if not f.read: raise ValueError('The first parameter needs to be a file object, ', '%r is passed' % type(f)) return loads(f.read(), dict_)
[ "def", "load", "(", "f", ",", "dict_", "=", "dict", ")", ":", "if", "not", "f", ".", "read", ":", "raise", "ValueError", "(", "'The first parameter needs to be a file object, '", ",", "'%r is passed'", "%", "type", "(", "f", ")", ")", "return", "loads", "(", "f", ".", "read", "(", ")", ",", "dict_", ")" ]
Load and parse toml from a file object An additional argument `dict_` is used to specify the output type
[ "Load", "and", "parse", "toml", "from", "a", "file", "object", "An", "additional", "argument", "dict_", "is", "used", "to", "specify", "the", "output", "type" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/decoder.py#L466-L473
frostming/atoml
atoml/decoder.py
loads
def loads(content, dict_=dict): """Parse a toml string An additional argument `dict_` is used to specify the output type """ if not isinstance(content, basestring): raise ValueError('The first parameter needs to be a string object, ', '%r is passed' % type(content)) decoder = Decoder(content, dict_) decoder.parse() return decoder.data
python
def loads(content, dict_=dict): """Parse a toml string An additional argument `dict_` is used to specify the output type """ if not isinstance(content, basestring): raise ValueError('The first parameter needs to be a string object, ', '%r is passed' % type(content)) decoder = Decoder(content, dict_) decoder.parse() return decoder.data
[ "def", "loads", "(", "content", ",", "dict_", "=", "dict", ")", ":", "if", "not", "isinstance", "(", "content", ",", "basestring", ")", ":", "raise", "ValueError", "(", "'The first parameter needs to be a string object, '", ",", "'%r is passed'", "%", "type", "(", "content", ")", ")", "decoder", "=", "Decoder", "(", "content", ",", "dict_", ")", "decoder", ".", "parse", "(", ")", "return", "decoder", ".", "data" ]
Parse a toml string An additional argument `dict_` is used to specify the output type
[ "Parse", "a", "toml", "string", "An", "additional", "argument", "dict_", "is", "used", "to", "specify", "the", "output", "type" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/decoder.py#L476-L485
frostming/atoml
atoml/decoder.py
Converter.convert
def convert(self, line=None, is_end=True): """Read the line content and return the converted value :param line: the line to feed to converter :param is_end: if set to True, will raise an error if the line has something remaining. """ if line is not None: self.line = line if not self.line: raise TomlDecodeError(self.parser.lineno, 'EOF is hit!') token = None self.line = self.line.lstrip() for key, pattern in self.patterns: m = pattern.match(self.line) if m: self.line = self.line[m.end():] handler = getattr(self, 'convert_%s' % key) token = handler(m) break else: raise TomlDecodeError(self.parser.lineno, 'Parsing error: %r' % self.line) if is_end and not BLANK_RE.match(self.line): raise TomlDecodeError(self.parser.lineno, 'Something is remained: %r' % self.line) return token
python
def convert(self, line=None, is_end=True): """Read the line content and return the converted value :param line: the line to feed to converter :param is_end: if set to True, will raise an error if the line has something remaining. """ if line is not None: self.line = line if not self.line: raise TomlDecodeError(self.parser.lineno, 'EOF is hit!') token = None self.line = self.line.lstrip() for key, pattern in self.patterns: m = pattern.match(self.line) if m: self.line = self.line[m.end():] handler = getattr(self, 'convert_%s' % key) token = handler(m) break else: raise TomlDecodeError(self.parser.lineno, 'Parsing error: %r' % self.line) if is_end and not BLANK_RE.match(self.line): raise TomlDecodeError(self.parser.lineno, 'Something is remained: %r' % self.line) return token
[ "def", "convert", "(", "self", ",", "line", "=", "None", ",", "is_end", "=", "True", ")", ":", "if", "line", "is", "not", "None", ":", "self", ".", "line", "=", "line", "if", "not", "self", ".", "line", ":", "raise", "TomlDecodeError", "(", "self", ".", "parser", ".", "lineno", ",", "'EOF is hit!'", ")", "token", "=", "None", "self", ".", "line", "=", "self", ".", "line", ".", "lstrip", "(", ")", "for", "key", ",", "pattern", "in", "self", ".", "patterns", ":", "m", "=", "pattern", ".", "match", "(", "self", ".", "line", ")", "if", "m", ":", "self", ".", "line", "=", "self", ".", "line", "[", "m", ".", "end", "(", ")", ":", "]", "handler", "=", "getattr", "(", "self", ",", "'convert_%s'", "%", "key", ")", "token", "=", "handler", "(", "m", ")", "break", "else", ":", "raise", "TomlDecodeError", "(", "self", ".", "parser", ".", "lineno", ",", "'Parsing error: %r'", "%", "self", ".", "line", ")", "if", "is_end", "and", "not", "BLANK_RE", ".", "match", "(", "self", ".", "line", ")", ":", "raise", "TomlDecodeError", "(", "self", ".", "parser", ".", "lineno", ",", "'Something is remained: %r'", "%", "self", ".", "line", ")", "return", "token" ]
Read the line content and return the converted value :param line: the line to feed to converter :param is_end: if set to True, will raise an error if the line has something remaining.
[ "Read", "the", "line", "content", "and", "return", "the", "converted", "value" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/decoder.py#L130-L157
frostming/atoml
atoml/decoder.py
Decoder.parse
def parse(self, data=None, table_name=None): """Parse the lines from index i :param data: optional, store the parsed result to it when specified :param table_name: when inside a table array, it is the table name """ temp = self.dict_() sub_table = None is_array = False line = '' while True: line = self._readline() if not line: self._store_table(sub_table, temp, is_array, data=data) break # EOF if BLANK_RE.match(line): continue if TABLE_RE.match(line): next_table = self.split_string( TABLE_RE.match(line).group(1), '.', False) if table_name and not contains_list(next_table, table_name): self._store_table(sub_table, temp, is_array, data=data) break table = cut_list(next_table, table_name) if sub_table == table: raise TomlDecodeError(self.lineno, 'Duplicate table name' 'in origin: %r' % sub_table) else: # different table name self._store_table(sub_table, temp, is_array, data=data) sub_table = table is_array = False elif TABLE_ARRAY_RE.match(line): next_table = self.split_string( TABLE_ARRAY_RE.match(line).group(1), '.', False) if table_name and not contains_list(next_table, table_name): # Out of current loop # write current data dict to table dict self._store_table(sub_table, temp, is_array, data=data) break table = cut_list(next_table, table_name) if sub_table == table and not is_array: raise TomlDecodeError(self.lineno, 'Duplicate name of ' 'table and array of table: %r' % sub_table) else: # Begin a nested loop # Write any temp data to table dict self._store_table(sub_table, temp, is_array, data=data) sub_table = table is_array = True self.parse(temp, next_table) elif KEY_RE.match(line): m = KEY_RE.match(line) keys = self.split_string(m.group(1), '.') value = self.converter.convert(line[m.end():]) if value is None: raise TomlDecodeError(self.lineno, 'Value is missing') self._store_table(keys[:-1], {keys[-1]: value}, data=temp) else: raise TomlDecodeError(self.lineno, 'Pattern is not recognized: %r' % line) # Rollback to the last line for next parse # This will do nothing if EOF is hit self.instream.seek(self.instream.tell() - len(line)) self.lineno -= 1
python
def parse(self, data=None, table_name=None): """Parse the lines from index i :param data: optional, store the parsed result to it when specified :param table_name: when inside a table array, it is the table name """ temp = self.dict_() sub_table = None is_array = False line = '' while True: line = self._readline() if not line: self._store_table(sub_table, temp, is_array, data=data) break # EOF if BLANK_RE.match(line): continue if TABLE_RE.match(line): next_table = self.split_string( TABLE_RE.match(line).group(1), '.', False) if table_name and not contains_list(next_table, table_name): self._store_table(sub_table, temp, is_array, data=data) break table = cut_list(next_table, table_name) if sub_table == table: raise TomlDecodeError(self.lineno, 'Duplicate table name' 'in origin: %r' % sub_table) else: # different table name self._store_table(sub_table, temp, is_array, data=data) sub_table = table is_array = False elif TABLE_ARRAY_RE.match(line): next_table = self.split_string( TABLE_ARRAY_RE.match(line).group(1), '.', False) if table_name and not contains_list(next_table, table_name): # Out of current loop # write current data dict to table dict self._store_table(sub_table, temp, is_array, data=data) break table = cut_list(next_table, table_name) if sub_table == table and not is_array: raise TomlDecodeError(self.lineno, 'Duplicate name of ' 'table and array of table: %r' % sub_table) else: # Begin a nested loop # Write any temp data to table dict self._store_table(sub_table, temp, is_array, data=data) sub_table = table is_array = True self.parse(temp, next_table) elif KEY_RE.match(line): m = KEY_RE.match(line) keys = self.split_string(m.group(1), '.') value = self.converter.convert(line[m.end():]) if value is None: raise TomlDecodeError(self.lineno, 'Value is missing') self._store_table(keys[:-1], {keys[-1]: value}, data=temp) else: raise TomlDecodeError(self.lineno, 'Pattern is not recognized: %r' % line) # Rollback to the last line for next parse # This will do nothing if EOF is hit self.instream.seek(self.instream.tell() - len(line)) self.lineno -= 1
[ "def", "parse", "(", "self", ",", "data", "=", "None", ",", "table_name", "=", "None", ")", ":", "temp", "=", "self", ".", "dict_", "(", ")", "sub_table", "=", "None", "is_array", "=", "False", "line", "=", "''", "while", "True", ":", "line", "=", "self", ".", "_readline", "(", ")", "if", "not", "line", ":", "self", ".", "_store_table", "(", "sub_table", ",", "temp", ",", "is_array", ",", "data", "=", "data", ")", "break", "# EOF", "if", "BLANK_RE", ".", "match", "(", "line", ")", ":", "continue", "if", "TABLE_RE", ".", "match", "(", "line", ")", ":", "next_table", "=", "self", ".", "split_string", "(", "TABLE_RE", ".", "match", "(", "line", ")", ".", "group", "(", "1", ")", ",", "'.'", ",", "False", ")", "if", "table_name", "and", "not", "contains_list", "(", "next_table", ",", "table_name", ")", ":", "self", ".", "_store_table", "(", "sub_table", ",", "temp", ",", "is_array", ",", "data", "=", "data", ")", "break", "table", "=", "cut_list", "(", "next_table", ",", "table_name", ")", "if", "sub_table", "==", "table", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Duplicate table name'", "'in origin: %r'", "%", "sub_table", ")", "else", ":", "# different table name", "self", ".", "_store_table", "(", "sub_table", ",", "temp", ",", "is_array", ",", "data", "=", "data", ")", "sub_table", "=", "table", "is_array", "=", "False", "elif", "TABLE_ARRAY_RE", ".", "match", "(", "line", ")", ":", "next_table", "=", "self", ".", "split_string", "(", "TABLE_ARRAY_RE", ".", "match", "(", "line", ")", ".", "group", "(", "1", ")", ",", "'.'", ",", "False", ")", "if", "table_name", "and", "not", "contains_list", "(", "next_table", ",", "table_name", ")", ":", "# Out of current loop", "# write current data dict to table dict", "self", ".", "_store_table", "(", "sub_table", ",", "temp", ",", "is_array", ",", "data", "=", "data", ")", "break", "table", "=", "cut_list", "(", "next_table", ",", "table_name", ")", "if", "sub_table", "==", "table", "and", "not", "is_array", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Duplicate name of '", "'table and array of table: %r'", "%", "sub_table", ")", "else", ":", "# Begin a nested loop", "# Write any temp data to table dict", "self", ".", "_store_table", "(", "sub_table", ",", "temp", ",", "is_array", ",", "data", "=", "data", ")", "sub_table", "=", "table", "is_array", "=", "True", "self", ".", "parse", "(", "temp", ",", "next_table", ")", "elif", "KEY_RE", ".", "match", "(", "line", ")", ":", "m", "=", "KEY_RE", ".", "match", "(", "line", ")", "keys", "=", "self", ".", "split_string", "(", "m", ".", "group", "(", "1", ")", ",", "'.'", ")", "value", "=", "self", ".", "converter", ".", "convert", "(", "line", "[", "m", ".", "end", "(", ")", ":", "]", ")", "if", "value", "is", "None", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Value is missing'", ")", "self", ".", "_store_table", "(", "keys", "[", ":", "-", "1", "]", ",", "{", "keys", "[", "-", "1", "]", ":", "value", "}", ",", "data", "=", "temp", ")", "else", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Pattern is not recognized: %r'", "%", "line", ")", "# Rollback to the last line for next parse", "# This will do nothing if EOF is hit", "self", ".", "instream", ".", "seek", "(", "self", ".", "instream", ".", "tell", "(", ")", "-", "len", "(", "line", ")", ")", "self", ".", "lineno", "-=", "1" ]
Parse the lines from index i :param data: optional, store the parsed result to it when specified :param table_name: when inside a table array, it is the table name
[ "Parse", "the", "lines", "from", "index", "i" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/decoder.py#L323-L386
frostming/atoml
atoml/decoder.py
Decoder.split_string
def split_string(self, string, splitter='.', allow_empty=True): """Split the string with respect of quotes""" i = 0 rv = [] need_split = False while i < len(string): m = re.compile(_KEY_NAME).match(string, i) if not need_split and m: i = m.end() body = m.group(1) if body[:3] == '"""': body = self.converter.unescape(body[3:-3]) elif body[:3] == "'''": body = body[3:-3] elif body[0] == '"': body = self.converter.unescape(body[1:-1]) elif body[0] == "'": body = body[1:-1] if not allow_empty and not body: raise TomlDecodeError( self.lineno, 'Empty section name is not allowed: %r' % string) rv.append(body) need_split = True elif need_split and string[i] == splitter: need_split = False i += 1 continue else: raise TomlDecodeError(self.lineno, 'Illegal section name: %r' % string) if not need_split: raise TomlDecodeError( self.lineno, 'Empty section name is not allowed: %r' % string) return rv
python
def split_string(self, string, splitter='.', allow_empty=True): """Split the string with respect of quotes""" i = 0 rv = [] need_split = False while i < len(string): m = re.compile(_KEY_NAME).match(string, i) if not need_split and m: i = m.end() body = m.group(1) if body[:3] == '"""': body = self.converter.unescape(body[3:-3]) elif body[:3] == "'''": body = body[3:-3] elif body[0] == '"': body = self.converter.unescape(body[1:-1]) elif body[0] == "'": body = body[1:-1] if not allow_empty and not body: raise TomlDecodeError( self.lineno, 'Empty section name is not allowed: %r' % string) rv.append(body) need_split = True elif need_split and string[i] == splitter: need_split = False i += 1 continue else: raise TomlDecodeError(self.lineno, 'Illegal section name: %r' % string) if not need_split: raise TomlDecodeError( self.lineno, 'Empty section name is not allowed: %r' % string) return rv
[ "def", "split_string", "(", "self", ",", "string", ",", "splitter", "=", "'.'", ",", "allow_empty", "=", "True", ")", ":", "i", "=", "0", "rv", "=", "[", "]", "need_split", "=", "False", "while", "i", "<", "len", "(", "string", ")", ":", "m", "=", "re", ".", "compile", "(", "_KEY_NAME", ")", ".", "match", "(", "string", ",", "i", ")", "if", "not", "need_split", "and", "m", ":", "i", "=", "m", ".", "end", "(", ")", "body", "=", "m", ".", "group", "(", "1", ")", "if", "body", "[", ":", "3", "]", "==", "'\"\"\"'", ":", "body", "=", "self", ".", "converter", ".", "unescape", "(", "body", "[", "3", ":", "-", "3", "]", ")", "elif", "body", "[", ":", "3", "]", "==", "\"'''\"", ":", "body", "=", "body", "[", "3", ":", "-", "3", "]", "elif", "body", "[", "0", "]", "==", "'\"'", ":", "body", "=", "self", ".", "converter", ".", "unescape", "(", "body", "[", "1", ":", "-", "1", "]", ")", "elif", "body", "[", "0", "]", "==", "\"'\"", ":", "body", "=", "body", "[", "1", ":", "-", "1", "]", "if", "not", "allow_empty", "and", "not", "body", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Empty section name is not allowed: %r'", "%", "string", ")", "rv", ".", "append", "(", "body", ")", "need_split", "=", "True", "elif", "need_split", "and", "string", "[", "i", "]", "==", "splitter", ":", "need_split", "=", "False", "i", "+=", "1", "continue", "else", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Illegal section name: %r'", "%", "string", ")", "if", "not", "need_split", ":", "raise", "TomlDecodeError", "(", "self", ".", "lineno", ",", "'Empty section name is not allowed: %r'", "%", "string", ")", "return", "rv" ]
Split the string with respect of quotes
[ "Split", "the", "string", "with", "respect", "of", "quotes" ]
train
https://github.com/frostming/atoml/blob/85414ef77777366887a819a05b496d5279296cd2/atoml/decoder.py#L428-L463
aroberge/experimental
experimental/transformers/where_clause.py
transform_source
def transform_source(text): '''removes a "where" clause which is identified by the use of "where" as an identifier and ends at the first DEDENT (i.e. decrease in indentation)''' toks = tokenize.generate_tokens(StringIO(text).readline) result = [] where_clause = False for toktype, tokvalue, _, _, _ in toks: if toktype == tokenize.NAME and tokvalue == "where": where_clause = True elif where_clause and toktype == tokenize.DEDENT: where_clause = False continue if not where_clause: result.append((toktype, tokvalue)) return tokenize.untokenize(result)
python
def transform_source(text): '''removes a "where" clause which is identified by the use of "where" as an identifier and ends at the first DEDENT (i.e. decrease in indentation)''' toks = tokenize.generate_tokens(StringIO(text).readline) result = [] where_clause = False for toktype, tokvalue, _, _, _ in toks: if toktype == tokenize.NAME and tokvalue == "where": where_clause = True elif where_clause and toktype == tokenize.DEDENT: where_clause = False continue if not where_clause: result.append((toktype, tokvalue)) return tokenize.untokenize(result)
[ "def", "transform_source", "(", "text", ")", ":", "toks", "=", "tokenize", ".", "generate_tokens", "(", "StringIO", "(", "text", ")", ".", "readline", ")", "result", "=", "[", "]", "where_clause", "=", "False", "for", "toktype", ",", "tokvalue", ",", "_", ",", "_", ",", "_", "in", "toks", ":", "if", "toktype", "==", "tokenize", ".", "NAME", "and", "tokvalue", "==", "\"where\"", ":", "where_clause", "=", "True", "elif", "where_clause", "and", "toktype", "==", "tokenize", ".", "DEDENT", ":", "where_clause", "=", "False", "continue", "if", "not", "where_clause", ":", "result", ".", "append", "(", "(", "toktype", ",", "tokvalue", ")", ")", "return", "tokenize", ".", "untokenize", "(", "result", ")" ]
removes a "where" clause which is identified by the use of "where" as an identifier and ends at the first DEDENT (i.e. decrease in indentation)
[ "removes", "a", "where", "clause", "which", "is", "identified", "by", "the", "use", "of", "where", "as", "an", "identifier", "and", "ends", "at", "the", "first", "DEDENT", "(", "i", ".", "e", ".", "decrease", "in", "indentation", ")" ]
train
https://github.com/aroberge/experimental/blob/031a9be10698b429998436da748b8fdb86f18b47/experimental/transformers/where_clause.py#L31-L46
developersociety/django-glitter
glitter/pages/models.py
Page.is_visible
def is_visible(self): """ Return a boolean if the page is visible in navigation. Pages must have show in navigation set. Regular pages must be published (published and have a current version - checked with `is_published`), pages with a glitter app associated don't need any page versions. """ if self.glitter_app_name: visible = self.show_in_navigation else: visible = self.show_in_navigation and self.is_published return visible
python
def is_visible(self): """ Return a boolean if the page is visible in navigation. Pages must have show in navigation set. Regular pages must be published (published and have a current version - checked with `is_published`), pages with a glitter app associated don't need any page versions. """ if self.glitter_app_name: visible = self.show_in_navigation else: visible = self.show_in_navigation and self.is_published return visible
[ "def", "is_visible", "(", "self", ")", ":", "if", "self", ".", "glitter_app_name", ":", "visible", "=", "self", ".", "show_in_navigation", "else", ":", "visible", "=", "self", ".", "show_in_navigation", "and", "self", ".", "is_published", "return", "visible" ]
Return a boolean if the page is visible in navigation. Pages must have show in navigation set. Regular pages must be published (published and have a current version - checked with `is_published`), pages with a glitter app associated don't need any page versions.
[ "Return", "a", "boolean", "if", "the", "page", "is", "visible", "in", "navigation", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/pages/models.py#L66-L79
DataMedSci/mcpartools
mcpartools/generatemc.py
main
def main(args=sys.argv[1:]): """ Main function, called from CLI script :return: """ import mcpartools parser = argparse.ArgumentParser() parser.add_argument('-V', '--version', action='version', version=mcpartools.__version__) parser.add_argument('-v', '--verbose', action='count', default=0, help='Give more output. Option is additive, ' 'and can be used up to 3 times') parser.add_argument('-q', '--quiet', action='count', default=0, help='Be silent') parser.add_argument('-w', '--workspace', type=str, help='workspace directory') parser.add_argument('-m', '--mc_run_template', type=str, default=None, help='path to optional MC run script') parser.add_argument('-s', '--scheduler_options', type=str, default=None, help='optional scheduler options: path to a file or list of options in square brackets') parser.add_argument('-e', '--mc_engine_options', type=str, default=None, help='optional MC engine options: path to a file or list of options in square brackets') parser.add_argument('-x', '--external_files', nargs='+', # list may be empty type=str, help='list of external files to be copied into each job working directory') parser.add_argument('-b', '--batch', type=str, default=None, choices=[b.id for b in SchedulerDiscover.supported], help='Available batch systems: {}'.format([b.id for b in SchedulerDiscover.supported])) parser.add_argument('-c', '--collect', type=str, default='mv', choices=Options.collect_methods, help='Available collect methods') parser.add_argument('-p', '--particle_no', dest='particle_no', metavar='particle_no', type=int, required=True, help='number of primary particles per job') parser.add_argument('-j', '--jobs_no', type=int, required=True, help='number of parallel jobs') parser.add_argument('input', type=str, help='path to input configuration') # TODO add grouping of options args = parser.parse_args(args) if args.quiet: if args.quiet == 1: level = "WARNING" elif args.quiet == 2: level = "ERROR" else: level = "CRITICAL" elif args.verbose: level = "DEBUG" else: level = "INFO" logging.basicConfig(level=level) opt = Options(args) generator = Generator(options=opt) ret_code = generator.run() return ret_code
python
def main(args=sys.argv[1:]): """ Main function, called from CLI script :return: """ import mcpartools parser = argparse.ArgumentParser() parser.add_argument('-V', '--version', action='version', version=mcpartools.__version__) parser.add_argument('-v', '--verbose', action='count', default=0, help='Give more output. Option is additive, ' 'and can be used up to 3 times') parser.add_argument('-q', '--quiet', action='count', default=0, help='Be silent') parser.add_argument('-w', '--workspace', type=str, help='workspace directory') parser.add_argument('-m', '--mc_run_template', type=str, default=None, help='path to optional MC run script') parser.add_argument('-s', '--scheduler_options', type=str, default=None, help='optional scheduler options: path to a file or list of options in square brackets') parser.add_argument('-e', '--mc_engine_options', type=str, default=None, help='optional MC engine options: path to a file or list of options in square brackets') parser.add_argument('-x', '--external_files', nargs='+', # list may be empty type=str, help='list of external files to be copied into each job working directory') parser.add_argument('-b', '--batch', type=str, default=None, choices=[b.id for b in SchedulerDiscover.supported], help='Available batch systems: {}'.format([b.id for b in SchedulerDiscover.supported])) parser.add_argument('-c', '--collect', type=str, default='mv', choices=Options.collect_methods, help='Available collect methods') parser.add_argument('-p', '--particle_no', dest='particle_no', metavar='particle_no', type=int, required=True, help='number of primary particles per job') parser.add_argument('-j', '--jobs_no', type=int, required=True, help='number of parallel jobs') parser.add_argument('input', type=str, help='path to input configuration') # TODO add grouping of options args = parser.parse_args(args) if args.quiet: if args.quiet == 1: level = "WARNING" elif args.quiet == 2: level = "ERROR" else: level = "CRITICAL" elif args.verbose: level = "DEBUG" else: level = "INFO" logging.basicConfig(level=level) opt = Options(args) generator = Generator(options=opt) ret_code = generator.run() return ret_code
[ "def", "main", "(", "args", "=", "sys", ".", "argv", "[", "1", ":", "]", ")", ":", "import", "mcpartools", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-V'", ",", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "mcpartools", ".", "__version__", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "action", "=", "'count'", ",", "default", "=", "0", ",", "help", "=", "'Give more output. Option is additive, '", "'and can be used up to 3 times'", ")", "parser", ".", "add_argument", "(", "'-q'", ",", "'--quiet'", ",", "action", "=", "'count'", ",", "default", "=", "0", ",", "help", "=", "'Be silent'", ")", "parser", ".", "add_argument", "(", "'-w'", ",", "'--workspace'", ",", "type", "=", "str", ",", "help", "=", "'workspace directory'", ")", "parser", ".", "add_argument", "(", "'-m'", ",", "'--mc_run_template'", ",", "type", "=", "str", ",", "default", "=", "None", ",", "help", "=", "'path to optional MC run script'", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--scheduler_options'", ",", "type", "=", "str", ",", "default", "=", "None", ",", "help", "=", "'optional scheduler options: path to a file or list of options in square brackets'", ")", "parser", ".", "add_argument", "(", "'-e'", ",", "'--mc_engine_options'", ",", "type", "=", "str", ",", "default", "=", "None", ",", "help", "=", "'optional MC engine options: path to a file or list of options in square brackets'", ")", "parser", ".", "add_argument", "(", "'-x'", ",", "'--external_files'", ",", "nargs", "=", "'+'", ",", "# list may be empty", "type", "=", "str", ",", "help", "=", "'list of external files to be copied into each job working directory'", ")", "parser", ".", "add_argument", "(", "'-b'", ",", "'--batch'", ",", "type", "=", "str", ",", "default", "=", "None", ",", "choices", "=", "[", "b", ".", "id", "for", "b", "in", "SchedulerDiscover", ".", "supported", "]", ",", "help", "=", "'Available batch systems: {}'", ".", "format", "(", "[", "b", ".", "id", "for", "b", "in", "SchedulerDiscover", ".", "supported", "]", ")", ")", "parser", ".", "add_argument", "(", "'-c'", ",", "'--collect'", ",", "type", "=", "str", ",", "default", "=", "'mv'", ",", "choices", "=", "Options", ".", "collect_methods", ",", "help", "=", "'Available collect methods'", ")", "parser", ".", "add_argument", "(", "'-p'", ",", "'--particle_no'", ",", "dest", "=", "'particle_no'", ",", "metavar", "=", "'particle_no'", ",", "type", "=", "int", ",", "required", "=", "True", ",", "help", "=", "'number of primary particles per job'", ")", "parser", ".", "add_argument", "(", "'-j'", ",", "'--jobs_no'", ",", "type", "=", "int", ",", "required", "=", "True", ",", "help", "=", "'number of parallel jobs'", ")", "parser", ".", "add_argument", "(", "'input'", ",", "type", "=", "str", ",", "help", "=", "'path to input configuration'", ")", "# TODO add grouping of options", "args", "=", "parser", ".", "parse_args", "(", "args", ")", "if", "args", ".", "quiet", ":", "if", "args", ".", "quiet", "==", "1", ":", "level", "=", "\"WARNING\"", "elif", "args", ".", "quiet", "==", "2", ":", "level", "=", "\"ERROR\"", "else", ":", "level", "=", "\"CRITICAL\"", "elif", "args", ".", "verbose", ":", "level", "=", "\"DEBUG\"", "else", ":", "level", "=", "\"INFO\"", "logging", ".", "basicConfig", "(", "level", "=", "level", ")", "opt", "=", "Options", "(", "args", ")", "generator", "=", "Generator", "(", "options", "=", "opt", ")", "ret_code", "=", "generator", ".", "run", "(", ")", "return", "ret_code" ]
Main function, called from CLI script :return:
[ "Main", "function", "called", "from", "CLI", "script", ":", "return", ":" ]
train
https://github.com/DataMedSci/mcpartools/blob/84f869094d05bf70f09e8aaeca671ddaa1c56ec4/mcpartools/generatemc.py#L10-L92
josiah-wolf-oberholtzer/uqbar
uqbar/apis/APIBuilder.py
APIBuilder.build_node_tree
def build_node_tree(self, source_paths): """ Build a node tree. """ import uqbar.apis root = PackageNode() # Build node tree, top-down for source_path in sorted( source_paths, key=lambda x: uqbar.apis.source_path_to_package_path(x) ): package_path = uqbar.apis.source_path_to_package_path(source_path) parts = package_path.split(".") if not self.document_private_modules and any( part.startswith("_") for part in parts ): continue # Find parent node. parent_node = root if len(parts) > 1: parent_package_path = ".".join(parts[:-1]) try: parent_node = root[parent_package_path] except KeyError: parent_node = root try: if parent_node is root: # Backfill missing parent node. grandparent_node = root if len(parts) > 2: grandparent_node = root[ parent_package_path.rpartition(".")[0] ] parent_node = PackageNode(name=parent_package_path) grandparent_node.append(parent_node) grandparent_node[:] = sorted( grandparent_node, key=lambda x: x.package_path ) except KeyError: parent_node = root # Create or update child node. node_class = ModuleNode if source_path.name == "__init__.py": node_class = PackageNode try: # If the child exists, it was previously backfilled. child_node = root[package_path] child_node.source_path = source_path except KeyError: # Otherwise it needs to be created and appended to the parent. child_node = node_class(name=package_path, source_path=source_path) parent_node.append(child_node) parent_node[:] = sorted(parent_node, key=lambda x: x.package_path) # Build documenters, bottom-up. # This allows parent documenters to easily aggregate their children. for node in root.depth_first(top_down=False): kwargs = dict( document_private_members=self.document_private_members, member_documenter_classes=self.member_documenter_classes, ) if isinstance(node, ModuleNode): node.documenter = self.module_documenter_class( node.package_path, **kwargs ) else: # Collect references to child modules and packages. node.documenter = self.module_documenter_class( node.package_path, module_documenters=[ child.documenter for child in node if child.documenter is not None ], **kwargs, ) if ( not self.document_empty_modules and not node.documenter.module_documenters and not node.documenter.member_documenters ): node.parent.remove(node) return root
python
def build_node_tree(self, source_paths): """ Build a node tree. """ import uqbar.apis root = PackageNode() # Build node tree, top-down for source_path in sorted( source_paths, key=lambda x: uqbar.apis.source_path_to_package_path(x) ): package_path = uqbar.apis.source_path_to_package_path(source_path) parts = package_path.split(".") if not self.document_private_modules and any( part.startswith("_") for part in parts ): continue # Find parent node. parent_node = root if len(parts) > 1: parent_package_path = ".".join(parts[:-1]) try: parent_node = root[parent_package_path] except KeyError: parent_node = root try: if parent_node is root: # Backfill missing parent node. grandparent_node = root if len(parts) > 2: grandparent_node = root[ parent_package_path.rpartition(".")[0] ] parent_node = PackageNode(name=parent_package_path) grandparent_node.append(parent_node) grandparent_node[:] = sorted( grandparent_node, key=lambda x: x.package_path ) except KeyError: parent_node = root # Create or update child node. node_class = ModuleNode if source_path.name == "__init__.py": node_class = PackageNode try: # If the child exists, it was previously backfilled. child_node = root[package_path] child_node.source_path = source_path except KeyError: # Otherwise it needs to be created and appended to the parent. child_node = node_class(name=package_path, source_path=source_path) parent_node.append(child_node) parent_node[:] = sorted(parent_node, key=lambda x: x.package_path) # Build documenters, bottom-up. # This allows parent documenters to easily aggregate their children. for node in root.depth_first(top_down=False): kwargs = dict( document_private_members=self.document_private_members, member_documenter_classes=self.member_documenter_classes, ) if isinstance(node, ModuleNode): node.documenter = self.module_documenter_class( node.package_path, **kwargs ) else: # Collect references to child modules and packages. node.documenter = self.module_documenter_class( node.package_path, module_documenters=[ child.documenter for child in node if child.documenter is not None ], **kwargs, ) if ( not self.document_empty_modules and not node.documenter.module_documenters and not node.documenter.member_documenters ): node.parent.remove(node) return root
[ "def", "build_node_tree", "(", "self", ",", "source_paths", ")", ":", "import", "uqbar", ".", "apis", "root", "=", "PackageNode", "(", ")", "# Build node tree, top-down", "for", "source_path", "in", "sorted", "(", "source_paths", ",", "key", "=", "lambda", "x", ":", "uqbar", ".", "apis", ".", "source_path_to_package_path", "(", "x", ")", ")", ":", "package_path", "=", "uqbar", ".", "apis", ".", "source_path_to_package_path", "(", "source_path", ")", "parts", "=", "package_path", ".", "split", "(", "\".\"", ")", "if", "not", "self", ".", "document_private_modules", "and", "any", "(", "part", ".", "startswith", "(", "\"_\"", ")", "for", "part", "in", "parts", ")", ":", "continue", "# Find parent node.", "parent_node", "=", "root", "if", "len", "(", "parts", ")", ">", "1", ":", "parent_package_path", "=", "\".\"", ".", "join", "(", "parts", "[", ":", "-", "1", "]", ")", "try", ":", "parent_node", "=", "root", "[", "parent_package_path", "]", "except", "KeyError", ":", "parent_node", "=", "root", "try", ":", "if", "parent_node", "is", "root", ":", "# Backfill missing parent node.", "grandparent_node", "=", "root", "if", "len", "(", "parts", ")", ">", "2", ":", "grandparent_node", "=", "root", "[", "parent_package_path", ".", "rpartition", "(", "\".\"", ")", "[", "0", "]", "]", "parent_node", "=", "PackageNode", "(", "name", "=", "parent_package_path", ")", "grandparent_node", ".", "append", "(", "parent_node", ")", "grandparent_node", "[", ":", "]", "=", "sorted", "(", "grandparent_node", ",", "key", "=", "lambda", "x", ":", "x", ".", "package_path", ")", "except", "KeyError", ":", "parent_node", "=", "root", "# Create or update child node.", "node_class", "=", "ModuleNode", "if", "source_path", ".", "name", "==", "\"__init__.py\"", ":", "node_class", "=", "PackageNode", "try", ":", "# If the child exists, it was previously backfilled.", "child_node", "=", "root", "[", "package_path", "]", "child_node", ".", "source_path", "=", "source_path", "except", "KeyError", ":", "# Otherwise it needs to be created and appended to the parent.", "child_node", "=", "node_class", "(", "name", "=", "package_path", ",", "source_path", "=", "source_path", ")", "parent_node", ".", "append", "(", "child_node", ")", "parent_node", "[", ":", "]", "=", "sorted", "(", "parent_node", ",", "key", "=", "lambda", "x", ":", "x", ".", "package_path", ")", "# Build documenters, bottom-up.", "# This allows parent documenters to easily aggregate their children.", "for", "node", "in", "root", ".", "depth_first", "(", "top_down", "=", "False", ")", ":", "kwargs", "=", "dict", "(", "document_private_members", "=", "self", ".", "document_private_members", ",", "member_documenter_classes", "=", "self", ".", "member_documenter_classes", ",", ")", "if", "isinstance", "(", "node", ",", "ModuleNode", ")", ":", "node", ".", "documenter", "=", "self", ".", "module_documenter_class", "(", "node", ".", "package_path", ",", "*", "*", "kwargs", ")", "else", ":", "# Collect references to child modules and packages.", "node", ".", "documenter", "=", "self", ".", "module_documenter_class", "(", "node", ".", "package_path", ",", "module_documenters", "=", "[", "child", ".", "documenter", "for", "child", "in", "node", "if", "child", ".", "documenter", "is", "not", "None", "]", ",", "*", "*", "kwargs", ",", ")", "if", "(", "not", "self", ".", "document_empty_modules", "and", "not", "node", ".", "documenter", ".", "module_documenters", "and", "not", "node", ".", "documenter", ".", "member_documenters", ")", ":", "node", ".", "parent", ".", "remove", "(", "node", ")", "return", "root" ]
Build a node tree.
[ "Build", "a", "node", "tree", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/apis/APIBuilder.py#L134-L215
developersociety/django-glitter
glitter/reminders/forms.py
ReminderInlineAdminForm.validate_unique
def validate_unique(self): """ Add this method because django doesn't validate correctly because required fields are excluded. """ unique_checks, date_checks = self.instance._get_unique_checks(exclude=[]) errors = self.instance._perform_unique_checks(unique_checks) if errors: self.add_error(None, errors)
python
def validate_unique(self): """ Add this method because django doesn't validate correctly because required fields are excluded. """ unique_checks, date_checks = self.instance._get_unique_checks(exclude=[]) errors = self.instance._perform_unique_checks(unique_checks) if errors: self.add_error(None, errors)
[ "def", "validate_unique", "(", "self", ")", ":", "unique_checks", ",", "date_checks", "=", "self", ".", "instance", ".", "_get_unique_checks", "(", "exclude", "=", "[", "]", ")", "errors", "=", "self", ".", "instance", ".", "_perform_unique_checks", "(", "unique_checks", ")", "if", "errors", ":", "self", ".", "add_error", "(", "None", ",", "errors", ")" ]
Add this method because django doesn't validate correctly because required fields are excluded.
[ "Add", "this", "method", "because", "django", "doesn", "t", "validate", "correctly", "because", "required", "fields", "are", "excluded", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/reminders/forms.py#L20-L28
qubell/contrib-python-qubell-client
qubell/monitor/monitor.py
prepare_monitor
def prepare_monitor(tenant=tenant, user=user, password=password, organization=organization, zone_name=zone_name): """ :param tenant: tenant url :param user: user's email :param password: user's password :param zone_name: (optional) zone_name :return: """ router = PrivatePath(tenant, verify_codes=False) payload = { "firstName": "AllSeeingEye", "lastName": "Monitor", "email": user, "password": password, "accept": "true" } try: router.post_quick_sign_up(data=payload) except exceptions.ApiUnauthorizedError: pass platform = QubellPlatform.connect(tenant=tenant, user=user, password=password) org = platform.organization(name=organization) if zone_name: zone = org.zones[zone_name] else: zone = org.zone env = org.environment(name="Monitor for "+zone.name, zone=zone.id) env.init_common_services(with_cloud_account=False, zone_name=zone_name) # todo: move to env policy_name = lambda policy: "{}.{}".format(policy.get('action'), policy.get('parameter')) env_data = env.json() key_id = [p for p in env_data['policies'] if 'provisionVms.publicKeyId' == policy_name(p)][0].get('value') with env as envbulk: envbulk.add_marker('monitor') envbulk.add_property('publicKeyId', 'string', key_id) monitor = Manifest(file=os.path.join(os.path.dirname(__file__), './monitor_manifests/monitor.yml')) monitor_child = Manifest(file=os.path.join(os.path.dirname(__file__), './monitor_manifests/monitor_child.yml')) org.application(manifest=monitor_child, name='monitor-child') app = org.application(manifest=monitor, name='monitor') return platform, org.id, app.id, env.id
python
def prepare_monitor(tenant=tenant, user=user, password=password, organization=organization, zone_name=zone_name): """ :param tenant: tenant url :param user: user's email :param password: user's password :param zone_name: (optional) zone_name :return: """ router = PrivatePath(tenant, verify_codes=False) payload = { "firstName": "AllSeeingEye", "lastName": "Monitor", "email": user, "password": password, "accept": "true" } try: router.post_quick_sign_up(data=payload) except exceptions.ApiUnauthorizedError: pass platform = QubellPlatform.connect(tenant=tenant, user=user, password=password) org = platform.organization(name=organization) if zone_name: zone = org.zones[zone_name] else: zone = org.zone env = org.environment(name="Monitor for "+zone.name, zone=zone.id) env.init_common_services(with_cloud_account=False, zone_name=zone_name) # todo: move to env policy_name = lambda policy: "{}.{}".format(policy.get('action'), policy.get('parameter')) env_data = env.json() key_id = [p for p in env_data['policies'] if 'provisionVms.publicKeyId' == policy_name(p)][0].get('value') with env as envbulk: envbulk.add_marker('monitor') envbulk.add_property('publicKeyId', 'string', key_id) monitor = Manifest(file=os.path.join(os.path.dirname(__file__), './monitor_manifests/monitor.yml')) monitor_child = Manifest(file=os.path.join(os.path.dirname(__file__), './monitor_manifests/monitor_child.yml')) org.application(manifest=monitor_child, name='monitor-child') app = org.application(manifest=monitor, name='monitor') return platform, org.id, app.id, env.id
[ "def", "prepare_monitor", "(", "tenant", "=", "tenant", ",", "user", "=", "user", ",", "password", "=", "password", ",", "organization", "=", "organization", ",", "zone_name", "=", "zone_name", ")", ":", "router", "=", "PrivatePath", "(", "tenant", ",", "verify_codes", "=", "False", ")", "payload", "=", "{", "\"firstName\"", ":", "\"AllSeeingEye\"", ",", "\"lastName\"", ":", "\"Monitor\"", ",", "\"email\"", ":", "user", ",", "\"password\"", ":", "password", ",", "\"accept\"", ":", "\"true\"", "}", "try", ":", "router", ".", "post_quick_sign_up", "(", "data", "=", "payload", ")", "except", "exceptions", ".", "ApiUnauthorizedError", ":", "pass", "platform", "=", "QubellPlatform", ".", "connect", "(", "tenant", "=", "tenant", ",", "user", "=", "user", ",", "password", "=", "password", ")", "org", "=", "platform", ".", "organization", "(", "name", "=", "organization", ")", "if", "zone_name", ":", "zone", "=", "org", ".", "zones", "[", "zone_name", "]", "else", ":", "zone", "=", "org", ".", "zone", "env", "=", "org", ".", "environment", "(", "name", "=", "\"Monitor for \"", "+", "zone", ".", "name", ",", "zone", "=", "zone", ".", "id", ")", "env", ".", "init_common_services", "(", "with_cloud_account", "=", "False", ",", "zone_name", "=", "zone_name", ")", "# todo: move to env", "policy_name", "=", "lambda", "policy", ":", "\"{}.{}\"", ".", "format", "(", "policy", ".", "get", "(", "'action'", ")", ",", "policy", ".", "get", "(", "'parameter'", ")", ")", "env_data", "=", "env", ".", "json", "(", ")", "key_id", "=", "[", "p", "for", "p", "in", "env_data", "[", "'policies'", "]", "if", "'provisionVms.publicKeyId'", "==", "policy_name", "(", "p", ")", "]", "[", "0", "]", ".", "get", "(", "'value'", ")", "with", "env", "as", "envbulk", ":", "envbulk", ".", "add_marker", "(", "'monitor'", ")", "envbulk", ".", "add_property", "(", "'publicKeyId'", ",", "'string'", ",", "key_id", ")", "monitor", "=", "Manifest", "(", "file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'./monitor_manifests/monitor.yml'", ")", ")", "monitor_child", "=", "Manifest", "(", "file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'./monitor_manifests/monitor_child.yml'", ")", ")", "org", ".", "application", "(", "manifest", "=", "monitor_child", ",", "name", "=", "'monitor-child'", ")", "app", "=", "org", ".", "application", "(", "manifest", "=", "monitor", ",", "name", "=", "'monitor'", ")", "return", "platform", ",", "org", ".", "id", ",", "app", ".", "id", ",", "env", ".", "id" ]
:param tenant: tenant url :param user: user's email :param password: user's password :param zone_name: (optional) zone_name :return:
[ ":", "param", "tenant", ":", "tenant", "url", ":", "param", "user", ":", "user", "s", "email", ":", "param", "password", ":", "user", "s", "password", ":", "param", "zone_name", ":", "(", "optional", ")", "zone_name", ":", "return", ":" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/monitor/monitor.py#L67-L113
qubell/contrib-python-qubell-client
qubell/monitor/monitor.py
Monitor.launch
def launch(self, timeout=2): """ Hierapp instance, with environment dependencies: - can be launched within short timeout - auto-destroys shortly """ self.start_time = time.time() self.end_time = time.time() instance = self.app.launch(environment=self.env) time.sleep(2) # Instance need time to appear in ui assert instance.running(timeout=timeout), "Monitor didn't get Active state" launched = instance.status == 'Active' instance.reschedule_workflow(workflow_name='destroy', timestamp=self.destroy_interval) assert instance.destroyed(timeout=timeout), "Monitor didn't get Destroyed after short time" stopped = instance.status == 'Destroyed' instance.force_remove() self.end_time = time.time() self.status = launched and stopped
python
def launch(self, timeout=2): """ Hierapp instance, with environment dependencies: - can be launched within short timeout - auto-destroys shortly """ self.start_time = time.time() self.end_time = time.time() instance = self.app.launch(environment=self.env) time.sleep(2) # Instance need time to appear in ui assert instance.running(timeout=timeout), "Monitor didn't get Active state" launched = instance.status == 'Active' instance.reschedule_workflow(workflow_name='destroy', timestamp=self.destroy_interval) assert instance.destroyed(timeout=timeout), "Monitor didn't get Destroyed after short time" stopped = instance.status == 'Destroyed' instance.force_remove() self.end_time = time.time() self.status = launched and stopped
[ "def", "launch", "(", "self", ",", "timeout", "=", "2", ")", ":", "self", ".", "start_time", "=", "time", ".", "time", "(", ")", "self", ".", "end_time", "=", "time", ".", "time", "(", ")", "instance", "=", "self", ".", "app", ".", "launch", "(", "environment", "=", "self", ".", "env", ")", "time", ".", "sleep", "(", "2", ")", "# Instance need time to appear in ui", "assert", "instance", ".", "running", "(", "timeout", "=", "timeout", ")", ",", "\"Monitor didn't get Active state\"", "launched", "=", "instance", ".", "status", "==", "'Active'", "instance", ".", "reschedule_workflow", "(", "workflow_name", "=", "'destroy'", ",", "timestamp", "=", "self", ".", "destroy_interval", ")", "assert", "instance", ".", "destroyed", "(", "timeout", "=", "timeout", ")", ",", "\"Monitor didn't get Destroyed after short time\"", "stopped", "=", "instance", ".", "status", "==", "'Destroyed'", "instance", ".", "force_remove", "(", ")", "self", ".", "end_time", "=", "time", ".", "time", "(", ")", "self", ".", "status", "=", "launched", "and", "stopped" ]
Hierapp instance, with environment dependencies: - can be launched within short timeout - auto-destroys shortly
[ "Hierapp", "instance", "with", "environment", "dependencies", ":", "-", "can", "be", "launched", "within", "short", "timeout", "-", "auto", "-", "destroys", "shortly" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/monitor/monitor.py#L138-L156
qubell/contrib-python-qubell-client
qubell/monitor/monitor.py
Monitor.clone
def clone(self): """ Do not initialize again since everything is ready to launch app. :return: Initialized monitor instance """ return Monitor(org=self.org, app=self.app, env=self.env)
python
def clone(self): """ Do not initialize again since everything is ready to launch app. :return: Initialized monitor instance """ return Monitor(org=self.org, app=self.app, env=self.env)
[ "def", "clone", "(", "self", ")", ":", "return", "Monitor", "(", "org", "=", "self", ".", "org", ",", "app", "=", "self", ".", "app", ",", "env", "=", "self", ".", "env", ")" ]
Do not initialize again since everything is ready to launch app. :return: Initialized monitor instance
[ "Do", "not", "initialize", "again", "since", "everything", "is", "ready", "to", "launch", "app", ".", ":", "return", ":", "Initialized", "monitor", "instance" ]
train
https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/monitor/monitor.py#L168-L173
quizl/quizler
quizler/models.py
Image.from_dict
def from_dict(raw_data): """Create Image from raw dictionary data.""" url = None width = None height = None try: url = raw_data['url'] width = raw_data['width'] height = raw_data['height'] except KeyError: raise ValueError('Unexpected image json structure') except TypeError: # Happens when raw_data is None, i.e. when a term has no image: pass return Image(url, width, height)
python
def from_dict(raw_data): """Create Image from raw dictionary data.""" url = None width = None height = None try: url = raw_data['url'] width = raw_data['width'] height = raw_data['height'] except KeyError: raise ValueError('Unexpected image json structure') except TypeError: # Happens when raw_data is None, i.e. when a term has no image: pass return Image(url, width, height)
[ "def", "from_dict", "(", "raw_data", ")", ":", "url", "=", "None", "width", "=", "None", "height", "=", "None", "try", ":", "url", "=", "raw_data", "[", "'url'", "]", "width", "=", "raw_data", "[", "'width'", "]", "height", "=", "raw_data", "[", "'height'", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Unexpected image json structure'", ")", "except", "TypeError", ":", "# Happens when raw_data is None, i.e. when a term has no image:", "pass", "return", "Image", "(", "url", ",", "width", ",", "height", ")" ]
Create Image from raw dictionary data.
[ "Create", "Image", "from", "raw", "dictionary", "data", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L13-L27
quizl/quizler
quizler/models.py
Image.to_dict
def to_dict(self): """Convert Image into raw dictionary data.""" if not self.url: return None return { 'url': self.url, 'width': self.width, 'height': self.height }
python
def to_dict(self): """Convert Image into raw dictionary data.""" if not self.url: return None return { 'url': self.url, 'width': self.width, 'height': self.height }
[ "def", "to_dict", "(", "self", ")", ":", "if", "not", "self", ".", "url", ":", "return", "None", "return", "{", "'url'", ":", "self", ".", "url", ",", "'width'", ":", "self", ".", "width", ",", "'height'", ":", "self", ".", "height", "}" ]
Convert Image into raw dictionary data.
[ "Convert", "Image", "into", "raw", "dictionary", "data", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L29-L37
quizl/quizler
quizler/models.py
Term.from_dict
def from_dict(raw_data): """Create Term from raw dictionary data.""" try: definition = raw_data['definition'] term_id = raw_data['id'] image = Image.from_dict(raw_data['image']) rank = raw_data['rank'] term = raw_data['term'] return Term(definition, term_id, image, rank, term) except KeyError: raise ValueError('Unexpected term json structure')
python
def from_dict(raw_data): """Create Term from raw dictionary data.""" try: definition = raw_data['definition'] term_id = raw_data['id'] image = Image.from_dict(raw_data['image']) rank = raw_data['rank'] term = raw_data['term'] return Term(definition, term_id, image, rank, term) except KeyError: raise ValueError('Unexpected term json structure')
[ "def", "from_dict", "(", "raw_data", ")", ":", "try", ":", "definition", "=", "raw_data", "[", "'definition'", "]", "term_id", "=", "raw_data", "[", "'id'", "]", "image", "=", "Image", ".", "from_dict", "(", "raw_data", "[", "'image'", "]", ")", "rank", "=", "raw_data", "[", "'rank'", "]", "term", "=", "raw_data", "[", "'term'", "]", "return", "Term", "(", "definition", ",", "term_id", ",", "image", ",", "rank", ",", "term", ")", "except", "KeyError", ":", "raise", "ValueError", "(", "'Unexpected term json structure'", ")" ]
Create Term from raw dictionary data.
[ "Create", "Term", "from", "raw", "dictionary", "data", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L60-L70
quizl/quizler
quizler/models.py
Term.to_dict
def to_dict(self): """Convert Term into raw dictionary data.""" return { 'definition': self.definition, 'id': self.term_id, 'image': self.image.to_dict(), 'rank': self.rank, 'term': self.term }
python
def to_dict(self): """Convert Term into raw dictionary data.""" return { 'definition': self.definition, 'id': self.term_id, 'image': self.image.to_dict(), 'rank': self.rank, 'term': self.term }
[ "def", "to_dict", "(", "self", ")", ":", "return", "{", "'definition'", ":", "self", ".", "definition", ",", "'id'", ":", "self", ".", "term_id", ",", "'image'", ":", "self", ".", "image", ".", "to_dict", "(", ")", ",", "'rank'", ":", "self", ".", "rank", ",", "'term'", ":", "self", ".", "term", "}" ]
Convert Term into raw dictionary data.
[ "Convert", "Term", "into", "raw", "dictionary", "data", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L72-L80
quizl/quizler
quizler/models.py
WordSet.has_common
def has_common(self, other): """Return set of common words between two word sets.""" if not isinstance(other, WordSet): raise ValueError('Can compare only WordSets') return self.term_set & other.term_set
python
def has_common(self, other): """Return set of common words between two word sets.""" if not isinstance(other, WordSet): raise ValueError('Can compare only WordSets') return self.term_set & other.term_set
[ "def", "has_common", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "WordSet", ")", ":", "raise", "ValueError", "(", "'Can compare only WordSets'", ")", "return", "self", ".", "term_set", "&", "other", ".", "term_set" ]
Return set of common words between two word sets.
[ "Return", "set", "of", "common", "words", "between", "two", "word", "sets", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L105-L109
quizl/quizler
quizler/models.py
WordSet.from_dict
def from_dict(raw_data): """Create WordSet from raw dictionary data.""" try: set_id = raw_data['id'] title = raw_data['title'] terms = [Term.from_dict(term) for term in raw_data['terms']] return WordSet(set_id, title, terms) except KeyError: raise ValueError('Unexpected set json structure')
python
def from_dict(raw_data): """Create WordSet from raw dictionary data.""" try: set_id = raw_data['id'] title = raw_data['title'] terms = [Term.from_dict(term) for term in raw_data['terms']] return WordSet(set_id, title, terms) except KeyError: raise ValueError('Unexpected set json structure')
[ "def", "from_dict", "(", "raw_data", ")", ":", "try", ":", "set_id", "=", "raw_data", "[", "'id'", "]", "title", "=", "raw_data", "[", "'title'", "]", "terms", "=", "[", "Term", ".", "from_dict", "(", "term", ")", "for", "term", "in", "raw_data", "[", "'terms'", "]", "]", "return", "WordSet", "(", "set_id", ",", "title", ",", "terms", ")", "except", "KeyError", ":", "raise", "ValueError", "(", "'Unexpected set json structure'", ")" ]
Create WordSet from raw dictionary data.
[ "Create", "WordSet", "from", "raw", "dictionary", "data", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L117-L125
quizl/quizler
quizler/models.py
WordSet.to_dict
def to_dict(self): """Convert WordSet into raw dictionary data.""" return { 'id': self.set_id, 'title': self.title, 'terms': [term.to_dict() for term in self.terms] }
python
def to_dict(self): """Convert WordSet into raw dictionary data.""" return { 'id': self.set_id, 'title': self.title, 'terms': [term.to_dict() for term in self.terms] }
[ "def", "to_dict", "(", "self", ")", ":", "return", "{", "'id'", ":", "self", ".", "set_id", ",", "'title'", ":", "self", ".", "title", ",", "'terms'", ":", "[", "term", ".", "to_dict", "(", ")", "for", "term", "in", "self", ".", "terms", "]", "}" ]
Convert WordSet into raw dictionary data.
[ "Convert", "WordSet", "into", "raw", "dictionary", "data", "." ]
train
https://github.com/quizl/quizler/blob/44b3fd91f7074e7013ffde8147455f45ebdccc46/quizler/models.py#L127-L133
quantmind/agile-toolkit
agiletoolkit/github/release.py
release
def release(ctx, yes, latest): """Create a new release in github """ m = RepoManager(ctx.obj['agile']) api = m.github_repo() if latest: latest = api.releases.latest() if latest: click.echo(latest['tag_name']) elif m.can_release('sandbox'): branch = m.info['branch'] version = m.validate_version() name = 'v%s' % version body = ['Release %s from agiletoolkit' % name] data = dict( tag_name=name, target_commitish=branch, name=name, body='\n\n'.join(body), draft=False, prerelease=False ) if yes: data = api.releases.create(data=data) m.message('Successfully created a new Github release') click.echo(niceJson(data)) else: click.echo('skipped')
python
def release(ctx, yes, latest): """Create a new release in github """ m = RepoManager(ctx.obj['agile']) api = m.github_repo() if latest: latest = api.releases.latest() if latest: click.echo(latest['tag_name']) elif m.can_release('sandbox'): branch = m.info['branch'] version = m.validate_version() name = 'v%s' % version body = ['Release %s from agiletoolkit' % name] data = dict( tag_name=name, target_commitish=branch, name=name, body='\n\n'.join(body), draft=False, prerelease=False ) if yes: data = api.releases.create(data=data) m.message('Successfully created a new Github release') click.echo(niceJson(data)) else: click.echo('skipped')
[ "def", "release", "(", "ctx", ",", "yes", ",", "latest", ")", ":", "m", "=", "RepoManager", "(", "ctx", ".", "obj", "[", "'agile'", "]", ")", "api", "=", "m", ".", "github_repo", "(", ")", "if", "latest", ":", "latest", "=", "api", ".", "releases", ".", "latest", "(", ")", "if", "latest", ":", "click", ".", "echo", "(", "latest", "[", "'tag_name'", "]", ")", "elif", "m", ".", "can_release", "(", "'sandbox'", ")", ":", "branch", "=", "m", ".", "info", "[", "'branch'", "]", "version", "=", "m", ".", "validate_version", "(", ")", "name", "=", "'v%s'", "%", "version", "body", "=", "[", "'Release %s from agiletoolkit'", "%", "name", "]", "data", "=", "dict", "(", "tag_name", "=", "name", ",", "target_commitish", "=", "branch", ",", "name", "=", "name", ",", "body", "=", "'\\n\\n'", ".", "join", "(", "body", ")", ",", "draft", "=", "False", ",", "prerelease", "=", "False", ")", "if", "yes", ":", "data", "=", "api", ".", "releases", ".", "create", "(", "data", "=", "data", ")", "m", ".", "message", "(", "'Successfully created a new Github release'", ")", "click", ".", "echo", "(", "niceJson", "(", "data", ")", ")", "else", ":", "click", ".", "echo", "(", "'skipped'", ")" ]
Create a new release in github
[ "Create", "a", "new", "release", "in", "github" ]
train
https://github.com/quantmind/agile-toolkit/blob/96028e36a842c57b171907c20583a60d1045fec1/agiletoolkit/github/release.py#L15-L42
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/book.py
on_builder_inited
def on_builder_inited(app): """ Hooks into Sphinx's ``builder-inited`` event. """ app.cache_db_path = ":memory:" if app.config["uqbar_book_use_cache"]: logger.info(bold("[uqbar-book]"), nonl=True) logger.info(" initializing cache db") app.connection = uqbar.book.sphinx.create_cache_db(app.cache_db_path)
python
def on_builder_inited(app): """ Hooks into Sphinx's ``builder-inited`` event. """ app.cache_db_path = ":memory:" if app.config["uqbar_book_use_cache"]: logger.info(bold("[uqbar-book]"), nonl=True) logger.info(" initializing cache db") app.connection = uqbar.book.sphinx.create_cache_db(app.cache_db_path)
[ "def", "on_builder_inited", "(", "app", ")", ":", "app", ".", "cache_db_path", "=", "\":memory:\"", "if", "app", ".", "config", "[", "\"uqbar_book_use_cache\"", "]", ":", "logger", ".", "info", "(", "bold", "(", "\"[uqbar-book]\"", ")", ",", "nonl", "=", "True", ")", "logger", ".", "info", "(", "\" initializing cache db\"", ")", "app", ".", "connection", "=", "uqbar", ".", "book", ".", "sphinx", ".", "create_cache_db", "(", "app", ".", "cache_db_path", ")" ]
Hooks into Sphinx's ``builder-inited`` event.
[ "Hooks", "into", "Sphinx", "s", "builder", "-", "inited", "event", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/book.py#L55-L63
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/book.py
on_config_inited
def on_config_inited(app, config): """ Hooks into Sphinx's ``config-inited`` event. """ extension_paths = config["uqbar_book_extensions"] or [ "uqbar.book.extensions.GraphExtension" ] app.uqbar_book_extensions = [] for extension_path in extension_paths: module_name, _, class_name = extension_path.rpartition(".") module = importlib.import_module(module_name) extension_class = getattr(module, class_name) extension_class.setup_sphinx(app) app.uqbar_book_extensions.append(extension_class)
python
def on_config_inited(app, config): """ Hooks into Sphinx's ``config-inited`` event. """ extension_paths = config["uqbar_book_extensions"] or [ "uqbar.book.extensions.GraphExtension" ] app.uqbar_book_extensions = [] for extension_path in extension_paths: module_name, _, class_name = extension_path.rpartition(".") module = importlib.import_module(module_name) extension_class = getattr(module, class_name) extension_class.setup_sphinx(app) app.uqbar_book_extensions.append(extension_class)
[ "def", "on_config_inited", "(", "app", ",", "config", ")", ":", "extension_paths", "=", "config", "[", "\"uqbar_book_extensions\"", "]", "or", "[", "\"uqbar.book.extensions.GraphExtension\"", "]", "app", ".", "uqbar_book_extensions", "=", "[", "]", "for", "extension_path", "in", "extension_paths", ":", "module_name", ",", "_", ",", "class_name", "=", "extension_path", ".", "rpartition", "(", "\".\"", ")", "module", "=", "importlib", ".", "import_module", "(", "module_name", ")", "extension_class", "=", "getattr", "(", "module", ",", "class_name", ")", "extension_class", ".", "setup_sphinx", "(", "app", ")", "app", ".", "uqbar_book_extensions", ".", "append", "(", "extension_class", ")" ]
Hooks into Sphinx's ``config-inited`` event.
[ "Hooks", "into", "Sphinx", "s", "config", "-", "inited", "event", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/book.py#L66-L79
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/book.py
on_doctree_read
def on_doctree_read(app, document): """ Hooks into Sphinx's ``doctree-read`` event. """ literal_blocks = uqbar.book.sphinx.collect_literal_blocks(document) cache_mapping = uqbar.book.sphinx.group_literal_blocks_by_cache_path(literal_blocks) node_mapping = {} use_cache = bool(app.config["uqbar_book_use_cache"]) for cache_path, literal_block_groups in cache_mapping.items(): kwargs = dict( extensions=app.uqbar_book_extensions, setup_lines=app.config["uqbar_book_console_setup"], teardown_lines=app.config["uqbar_book_console_teardown"], use_black=bool(app.config["uqbar_book_use_black"]), ) for literal_blocks in literal_block_groups: try: if use_cache: local_node_mapping = uqbar.book.sphinx.interpret_code_blocks_with_cache( literal_blocks, cache_path, app.connection, **kwargs ) else: local_node_mapping = uqbar.book.sphinx.interpret_code_blocks( literal_blocks, **kwargs ) node_mapping.update(local_node_mapping) except ConsoleError as exception: message = exception.args[0].splitlines()[-1] logger.warning(message, location=exception.args[1]) if app.config["uqbar_book_strict"]: raise uqbar.book.sphinx.rebuild_document(document, node_mapping)
python
def on_doctree_read(app, document): """ Hooks into Sphinx's ``doctree-read`` event. """ literal_blocks = uqbar.book.sphinx.collect_literal_blocks(document) cache_mapping = uqbar.book.sphinx.group_literal_blocks_by_cache_path(literal_blocks) node_mapping = {} use_cache = bool(app.config["uqbar_book_use_cache"]) for cache_path, literal_block_groups in cache_mapping.items(): kwargs = dict( extensions=app.uqbar_book_extensions, setup_lines=app.config["uqbar_book_console_setup"], teardown_lines=app.config["uqbar_book_console_teardown"], use_black=bool(app.config["uqbar_book_use_black"]), ) for literal_blocks in literal_block_groups: try: if use_cache: local_node_mapping = uqbar.book.sphinx.interpret_code_blocks_with_cache( literal_blocks, cache_path, app.connection, **kwargs ) else: local_node_mapping = uqbar.book.sphinx.interpret_code_blocks( literal_blocks, **kwargs ) node_mapping.update(local_node_mapping) except ConsoleError as exception: message = exception.args[0].splitlines()[-1] logger.warning(message, location=exception.args[1]) if app.config["uqbar_book_strict"]: raise uqbar.book.sphinx.rebuild_document(document, node_mapping)
[ "def", "on_doctree_read", "(", "app", ",", "document", ")", ":", "literal_blocks", "=", "uqbar", ".", "book", ".", "sphinx", ".", "collect_literal_blocks", "(", "document", ")", "cache_mapping", "=", "uqbar", ".", "book", ".", "sphinx", ".", "group_literal_blocks_by_cache_path", "(", "literal_blocks", ")", "node_mapping", "=", "{", "}", "use_cache", "=", "bool", "(", "app", ".", "config", "[", "\"uqbar_book_use_cache\"", "]", ")", "for", "cache_path", ",", "literal_block_groups", "in", "cache_mapping", ".", "items", "(", ")", ":", "kwargs", "=", "dict", "(", "extensions", "=", "app", ".", "uqbar_book_extensions", ",", "setup_lines", "=", "app", ".", "config", "[", "\"uqbar_book_console_setup\"", "]", ",", "teardown_lines", "=", "app", ".", "config", "[", "\"uqbar_book_console_teardown\"", "]", ",", "use_black", "=", "bool", "(", "app", ".", "config", "[", "\"uqbar_book_use_black\"", "]", ")", ",", ")", "for", "literal_blocks", "in", "literal_block_groups", ":", "try", ":", "if", "use_cache", ":", "local_node_mapping", "=", "uqbar", ".", "book", ".", "sphinx", ".", "interpret_code_blocks_with_cache", "(", "literal_blocks", ",", "cache_path", ",", "app", ".", "connection", ",", "*", "*", "kwargs", ")", "else", ":", "local_node_mapping", "=", "uqbar", ".", "book", ".", "sphinx", ".", "interpret_code_blocks", "(", "literal_blocks", ",", "*", "*", "kwargs", ")", "node_mapping", ".", "update", "(", "local_node_mapping", ")", "except", "ConsoleError", "as", "exception", ":", "message", "=", "exception", ".", "args", "[", "0", "]", ".", "splitlines", "(", ")", "[", "-", "1", "]", "logger", ".", "warning", "(", "message", ",", "location", "=", "exception", ".", "args", "[", "1", "]", ")", "if", "app", ".", "config", "[", "\"uqbar_book_strict\"", "]", ":", "raise", "uqbar", ".", "book", ".", "sphinx", ".", "rebuild_document", "(", "document", ",", "node_mapping", ")" ]
Hooks into Sphinx's ``doctree-read`` event.
[ "Hooks", "into", "Sphinx", "s", "doctree", "-", "read", "event", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/book.py#L82-L113
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/book.py
on_build_finished
def on_build_finished(app, exception): """ Hooks into Sphinx's ``build-finished`` event. """ if not app.config["uqbar_book_use_cache"]: return logger.info("") for row in app.connection.execute("SELECT path, hits FROM cache ORDER BY path"): path, hits = row if not hits: continue logger.info(bold("[uqbar-book]"), nonl=True) logger.info(" Cache hits for {}: {}".format(path, hits))
python
def on_build_finished(app, exception): """ Hooks into Sphinx's ``build-finished`` event. """ if not app.config["uqbar_book_use_cache"]: return logger.info("") for row in app.connection.execute("SELECT path, hits FROM cache ORDER BY path"): path, hits = row if not hits: continue logger.info(bold("[uqbar-book]"), nonl=True) logger.info(" Cache hits for {}: {}".format(path, hits))
[ "def", "on_build_finished", "(", "app", ",", "exception", ")", ":", "if", "not", "app", ".", "config", "[", "\"uqbar_book_use_cache\"", "]", ":", "return", "logger", ".", "info", "(", "\"\"", ")", "for", "row", "in", "app", ".", "connection", ".", "execute", "(", "\"SELECT path, hits FROM cache ORDER BY path\"", ")", ":", "path", ",", "hits", "=", "row", "if", "not", "hits", ":", "continue", "logger", ".", "info", "(", "bold", "(", "\"[uqbar-book]\"", ")", ",", "nonl", "=", "True", ")", "logger", ".", "info", "(", "\" Cache hits for {}: {}\"", ".", "format", "(", "path", ",", "hits", ")", ")" ]
Hooks into Sphinx's ``build-finished`` event.
[ "Hooks", "into", "Sphinx", "s", "build", "-", "finished", "event", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/book.py#L116-L128
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/style.py
handle_class
def handle_class(signature_node, module, object_name, cache): """ Styles ``autoclass`` entries. Adds ``abstract`` prefix to abstract classes. """ class_ = getattr(module, object_name, None) if class_ is None: return if class_ not in cache: cache[class_] = {} attributes = inspect.classify_class_attrs(class_) for attribute in attributes: cache[class_][attribute.name] = attribute if inspect.isabstract(class_): emphasis = nodes.emphasis("abstract ", "abstract ", classes=["property"]) signature_node.insert(0, emphasis)
python
def handle_class(signature_node, module, object_name, cache): """ Styles ``autoclass`` entries. Adds ``abstract`` prefix to abstract classes. """ class_ = getattr(module, object_name, None) if class_ is None: return if class_ not in cache: cache[class_] = {} attributes = inspect.classify_class_attrs(class_) for attribute in attributes: cache[class_][attribute.name] = attribute if inspect.isabstract(class_): emphasis = nodes.emphasis("abstract ", "abstract ", classes=["property"]) signature_node.insert(0, emphasis)
[ "def", "handle_class", "(", "signature_node", ",", "module", ",", "object_name", ",", "cache", ")", ":", "class_", "=", "getattr", "(", "module", ",", "object_name", ",", "None", ")", "if", "class_", "is", "None", ":", "return", "if", "class_", "not", "in", "cache", ":", "cache", "[", "class_", "]", "=", "{", "}", "attributes", "=", "inspect", ".", "classify_class_attrs", "(", "class_", ")", "for", "attribute", "in", "attributes", ":", "cache", "[", "class_", "]", "[", "attribute", ".", "name", "]", "=", "attribute", "if", "inspect", ".", "isabstract", "(", "class_", ")", ":", "emphasis", "=", "nodes", ".", "emphasis", "(", "\"abstract \"", ",", "\"abstract \"", ",", "classes", "=", "[", "\"property\"", "]", ")", "signature_node", ".", "insert", "(", "0", ",", "emphasis", ")" ]
Styles ``autoclass`` entries. Adds ``abstract`` prefix to abstract classes.
[ "Styles", "autoclass", "entries", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/style.py#L22-L38
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/style.py
handle_method
def handle_method(signature_node, module, object_name, cache): """ Styles ``automethod`` entries. Adds ``abstract`` prefix to abstract methods. Adds link to originating class for inherited methods. """ *class_names, attr_name = object_name.split(".") # Handle nested classes class_ = module for class_name in class_names: class_ = getattr(class_, class_name, None) if class_ is None: return attr = getattr(class_, attr_name) try: inspected_attr = cache[class_][attr_name] defining_class = inspected_attr.defining_class except KeyError: # TODO: This is a hack to handle bad interaction between enum and inspect defining_class = class_ if defining_class is not class_: reftarget = "{}.{}".format(defining_class.__module__, defining_class.__name__) xref_node = addnodes.pending_xref( "", refdomain="py", refexplicit=True, reftype="class", reftarget=reftarget ) name_node = nodes.literal( "", "{}".format(defining_class.__name__), classes=["descclassname"] ) xref_node.append(name_node) desc_annotation = list(signature_node.traverse(addnodes.desc_annotation)) index = len(desc_annotation) class_annotation = addnodes.desc_addname() class_annotation.extend([nodes.Text("("), xref_node, nodes.Text(").")]) class_annotation["xml:space"] = "preserve" signature_node.insert(index, class_annotation) else: is_overridden = False for class_ in defining_class.__mro__[1:]: if hasattr(class_, attr_name): is_overridden = True if is_overridden: emphasis = nodes.emphasis( "overridden ", "overridden ", classes=["property"] ) signature_node.insert(0, emphasis) if getattr(attr, "__isabstractmethod__", False): emphasis = nodes.emphasis("abstract", "abstract", classes=["property"]) signature_node.insert(0, emphasis)
python
def handle_method(signature_node, module, object_name, cache): """ Styles ``automethod`` entries. Adds ``abstract`` prefix to abstract methods. Adds link to originating class for inherited methods. """ *class_names, attr_name = object_name.split(".") # Handle nested classes class_ = module for class_name in class_names: class_ = getattr(class_, class_name, None) if class_ is None: return attr = getattr(class_, attr_name) try: inspected_attr = cache[class_][attr_name] defining_class = inspected_attr.defining_class except KeyError: # TODO: This is a hack to handle bad interaction between enum and inspect defining_class = class_ if defining_class is not class_: reftarget = "{}.{}".format(defining_class.__module__, defining_class.__name__) xref_node = addnodes.pending_xref( "", refdomain="py", refexplicit=True, reftype="class", reftarget=reftarget ) name_node = nodes.literal( "", "{}".format(defining_class.__name__), classes=["descclassname"] ) xref_node.append(name_node) desc_annotation = list(signature_node.traverse(addnodes.desc_annotation)) index = len(desc_annotation) class_annotation = addnodes.desc_addname() class_annotation.extend([nodes.Text("("), xref_node, nodes.Text(").")]) class_annotation["xml:space"] = "preserve" signature_node.insert(index, class_annotation) else: is_overridden = False for class_ in defining_class.__mro__[1:]: if hasattr(class_, attr_name): is_overridden = True if is_overridden: emphasis = nodes.emphasis( "overridden ", "overridden ", classes=["property"] ) signature_node.insert(0, emphasis) if getattr(attr, "__isabstractmethod__", False): emphasis = nodes.emphasis("abstract", "abstract", classes=["property"]) signature_node.insert(0, emphasis)
[ "def", "handle_method", "(", "signature_node", ",", "module", ",", "object_name", ",", "cache", ")", ":", "*", "class_names", ",", "attr_name", "=", "object_name", ".", "split", "(", "\".\"", ")", "# Handle nested classes", "class_", "=", "module", "for", "class_name", "in", "class_names", ":", "class_", "=", "getattr", "(", "class_", ",", "class_name", ",", "None", ")", "if", "class_", "is", "None", ":", "return", "attr", "=", "getattr", "(", "class_", ",", "attr_name", ")", "try", ":", "inspected_attr", "=", "cache", "[", "class_", "]", "[", "attr_name", "]", "defining_class", "=", "inspected_attr", ".", "defining_class", "except", "KeyError", ":", "# TODO: This is a hack to handle bad interaction between enum and inspect", "defining_class", "=", "class_", "if", "defining_class", "is", "not", "class_", ":", "reftarget", "=", "\"{}.{}\"", ".", "format", "(", "defining_class", ".", "__module__", ",", "defining_class", ".", "__name__", ")", "xref_node", "=", "addnodes", ".", "pending_xref", "(", "\"\"", ",", "refdomain", "=", "\"py\"", ",", "refexplicit", "=", "True", ",", "reftype", "=", "\"class\"", ",", "reftarget", "=", "reftarget", ")", "name_node", "=", "nodes", ".", "literal", "(", "\"\"", ",", "\"{}\"", ".", "format", "(", "defining_class", ".", "__name__", ")", ",", "classes", "=", "[", "\"descclassname\"", "]", ")", "xref_node", ".", "append", "(", "name_node", ")", "desc_annotation", "=", "list", "(", "signature_node", ".", "traverse", "(", "addnodes", ".", "desc_annotation", ")", ")", "index", "=", "len", "(", "desc_annotation", ")", "class_annotation", "=", "addnodes", ".", "desc_addname", "(", ")", "class_annotation", ".", "extend", "(", "[", "nodes", ".", "Text", "(", "\"(\"", ")", ",", "xref_node", ",", "nodes", ".", "Text", "(", "\").\"", ")", "]", ")", "class_annotation", "[", "\"xml:space\"", "]", "=", "\"preserve\"", "signature_node", ".", "insert", "(", "index", ",", "class_annotation", ")", "else", ":", "is_overridden", "=", "False", "for", "class_", "in", "defining_class", ".", "__mro__", "[", "1", ":", "]", ":", "if", "hasattr", "(", "class_", ",", "attr_name", ")", ":", "is_overridden", "=", "True", "if", "is_overridden", ":", "emphasis", "=", "nodes", ".", "emphasis", "(", "\"overridden \"", ",", "\"overridden \"", ",", "classes", "=", "[", "\"property\"", "]", ")", "signature_node", ".", "insert", "(", "0", ",", "emphasis", ")", "if", "getattr", "(", "attr", ",", "\"__isabstractmethod__\"", ",", "False", ")", ":", "emphasis", "=", "nodes", ".", "emphasis", "(", "\"abstract\"", ",", "\"abstract\"", ",", "classes", "=", "[", "\"property\"", "]", ")", "signature_node", ".", "insert", "(", "0", ",", "emphasis", ")" ]
Styles ``automethod`` entries. Adds ``abstract`` prefix to abstract methods. Adds link to originating class for inherited methods.
[ "Styles", "automethod", "entries", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/style.py#L41-L89
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/style.py
on_doctree_read
def on_doctree_read(app, document): """ Hooks into Sphinx's ``doctree-read`` event. """ cache: Dict[type, Dict[str, object]] = {} for desc_node in document.traverse(addnodes.desc): if desc_node.get("domain") != "py": continue signature_node = desc_node.traverse(addnodes.desc_signature)[0] module_name = signature_node.get("module") object_name = signature_node.get("fullname") object_type = desc_node.get("objtype") module = importlib.import_module(module_name) if object_type == "class": handle_class(signature_node, module, object_name, cache) elif object_type in ("method", "attribute", "staticmethod", "classmethod"): handle_method(signature_node, module, object_name, cache)
python
def on_doctree_read(app, document): """ Hooks into Sphinx's ``doctree-read`` event. """ cache: Dict[type, Dict[str, object]] = {} for desc_node in document.traverse(addnodes.desc): if desc_node.get("domain") != "py": continue signature_node = desc_node.traverse(addnodes.desc_signature)[0] module_name = signature_node.get("module") object_name = signature_node.get("fullname") object_type = desc_node.get("objtype") module = importlib.import_module(module_name) if object_type == "class": handle_class(signature_node, module, object_name, cache) elif object_type in ("method", "attribute", "staticmethod", "classmethod"): handle_method(signature_node, module, object_name, cache)
[ "def", "on_doctree_read", "(", "app", ",", "document", ")", ":", "cache", ":", "Dict", "[", "type", ",", "Dict", "[", "str", ",", "object", "]", "]", "=", "{", "}", "for", "desc_node", "in", "document", ".", "traverse", "(", "addnodes", ".", "desc", ")", ":", "if", "desc_node", ".", "get", "(", "\"domain\"", ")", "!=", "\"py\"", ":", "continue", "signature_node", "=", "desc_node", ".", "traverse", "(", "addnodes", ".", "desc_signature", ")", "[", "0", "]", "module_name", "=", "signature_node", ".", "get", "(", "\"module\"", ")", "object_name", "=", "signature_node", ".", "get", "(", "\"fullname\"", ")", "object_type", "=", "desc_node", ".", "get", "(", "\"objtype\"", ")", "module", "=", "importlib", ".", "import_module", "(", "module_name", ")", "if", "object_type", "==", "\"class\"", ":", "handle_class", "(", "signature_node", ",", "module", ",", "object_name", ",", "cache", ")", "elif", "object_type", "in", "(", "\"method\"", ",", "\"attribute\"", ",", "\"staticmethod\"", ",", "\"classmethod\"", ")", ":", "handle_method", "(", "signature_node", ",", "module", ",", "object_name", ",", "cache", ")" ]
Hooks into Sphinx's ``doctree-read`` event.
[ "Hooks", "into", "Sphinx", "s", "doctree", "-", "read", "event", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/style.py#L92-L108
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/style.py
on_builder_inited
def on_builder_inited(app): """ Hooks into Sphinx's ``builder-inited`` event. Used for copying over CSS files to theme directory. """ local_css_path = pathlib.Path(__file__).parent / "uqbar.css" theme_css_path = ( pathlib.Path(app.srcdir) / app.config.html_static_path[0] / "uqbar.css" ) with local_css_path.open("r") as file_pointer: local_css_contents = file_pointer.read() uqbar.io.write(local_css_contents, theme_css_path)
python
def on_builder_inited(app): """ Hooks into Sphinx's ``builder-inited`` event. Used for copying over CSS files to theme directory. """ local_css_path = pathlib.Path(__file__).parent / "uqbar.css" theme_css_path = ( pathlib.Path(app.srcdir) / app.config.html_static_path[0] / "uqbar.css" ) with local_css_path.open("r") as file_pointer: local_css_contents = file_pointer.read() uqbar.io.write(local_css_contents, theme_css_path)
[ "def", "on_builder_inited", "(", "app", ")", ":", "local_css_path", "=", "pathlib", ".", "Path", "(", "__file__", ")", ".", "parent", "/", "\"uqbar.css\"", "theme_css_path", "=", "(", "pathlib", ".", "Path", "(", "app", ".", "srcdir", ")", "/", "app", ".", "config", ".", "html_static_path", "[", "0", "]", "/", "\"uqbar.css\"", ")", "with", "local_css_path", ".", "open", "(", "\"r\"", ")", "as", "file_pointer", ":", "local_css_contents", "=", "file_pointer", ".", "read", "(", ")", "uqbar", ".", "io", ".", "write", "(", "local_css_contents", ",", "theme_css_path", ")" ]
Hooks into Sphinx's ``builder-inited`` event. Used for copying over CSS files to theme directory.
[ "Hooks", "into", "Sphinx", "s", "builder", "-", "inited", "event", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/style.py#L111-L123
josiah-wolf-oberholtzer/uqbar
uqbar/sphinx/style.py
setup
def setup(app) -> Dict[str, Any]: """ Sets up Sphinx extension. """ app.connect("doctree-read", on_doctree_read) app.connect("builder-inited", on_builder_inited) app.add_css_file("uqbar.css") app.add_node( nodes.classifier, override=True, html=(visit_classifier, depart_classifier) ) app.add_node( nodes.definition, override=True, html=(visit_definition, depart_definition) ) app.add_node(nodes.term, override=True, html=(visit_term, depart_term)) return { "version": uqbar.__version__, "parallel_read_safe": True, "parallel_write_safe": True, }
python
def setup(app) -> Dict[str, Any]: """ Sets up Sphinx extension. """ app.connect("doctree-read", on_doctree_read) app.connect("builder-inited", on_builder_inited) app.add_css_file("uqbar.css") app.add_node( nodes.classifier, override=True, html=(visit_classifier, depart_classifier) ) app.add_node( nodes.definition, override=True, html=(visit_definition, depart_definition) ) app.add_node(nodes.term, override=True, html=(visit_term, depart_term)) return { "version": uqbar.__version__, "parallel_read_safe": True, "parallel_write_safe": True, }
[ "def", "setup", "(", "app", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "app", ".", "connect", "(", "\"doctree-read\"", ",", "on_doctree_read", ")", "app", ".", "connect", "(", "\"builder-inited\"", ",", "on_builder_inited", ")", "app", ".", "add_css_file", "(", "\"uqbar.css\"", ")", "app", ".", "add_node", "(", "nodes", ".", "classifier", ",", "override", "=", "True", ",", "html", "=", "(", "visit_classifier", ",", "depart_classifier", ")", ")", "app", ".", "add_node", "(", "nodes", ".", "definition", ",", "override", "=", "True", ",", "html", "=", "(", "visit_definition", ",", "depart_definition", ")", ")", "app", ".", "add_node", "(", "nodes", ".", "term", ",", "override", "=", "True", ",", "html", "=", "(", "visit_term", ",", "depart_term", ")", ")", "return", "{", "\"version\"", ":", "uqbar", ".", "__version__", ",", "\"parallel_read_safe\"", ":", "True", ",", "\"parallel_write_safe\"", ":", "True", ",", "}" ]
Sets up Sphinx extension.
[ "Sets", "up", "Sphinx", "extension", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/style.py#L164-L182
xenadevel/PyXenaManager
xenamanager/xena_app.py
init_xena
def init_xena(api, logger, owner, ip=None, port=57911): """ Create XenaManager object. :param api: cli/rest :param logger: python logger :param owner: owner of the scripting session :param ip: rest server IP :param port: rest server TCP port :return: Xena object :rtype: XenaApp """ if api == ApiType.socket: api_wrapper = XenaCliWrapper(logger) elif api == ApiType.rest: api_wrapper = XenaRestWrapper(logger, ip, port) return XenaApp(logger, owner, api_wrapper)
python
def init_xena(api, logger, owner, ip=None, port=57911): """ Create XenaManager object. :param api: cli/rest :param logger: python logger :param owner: owner of the scripting session :param ip: rest server IP :param port: rest server TCP port :return: Xena object :rtype: XenaApp """ if api == ApiType.socket: api_wrapper = XenaCliWrapper(logger) elif api == ApiType.rest: api_wrapper = XenaRestWrapper(logger, ip, port) return XenaApp(logger, owner, api_wrapper)
[ "def", "init_xena", "(", "api", ",", "logger", ",", "owner", ",", "ip", "=", "None", ",", "port", "=", "57911", ")", ":", "if", "api", "==", "ApiType", ".", "socket", ":", "api_wrapper", "=", "XenaCliWrapper", "(", "logger", ")", "elif", "api", "==", "ApiType", ".", "rest", ":", "api_wrapper", "=", "XenaRestWrapper", "(", "logger", ",", "ip", ",", "port", ")", "return", "XenaApp", "(", "logger", ",", "owner", ",", "api_wrapper", ")" ]
Create XenaManager object. :param api: cli/rest :param logger: python logger :param owner: owner of the scripting session :param ip: rest server IP :param port: rest server TCP port :return: Xena object :rtype: XenaApp
[ "Create", "XenaManager", "object", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L17-L33
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaSession.add_chassis
def add_chassis(self, chassis, port=22611, password='xena'): """ Add chassis. XenaManager-2G -> Add Chassis. :param chassis: chassis IP address :param port: chassis port number :param password: chassis password :return: newly created chassis :rtype: xenamanager.xena_app.XenaChassis """ if chassis not in self.chassis_list: try: XenaChassis(self, chassis, port, password) except Exception as error: self.objects.pop('{}/{}'.format(self.owner, chassis)) raise error return self.chassis_list[chassis]
python
def add_chassis(self, chassis, port=22611, password='xena'): """ Add chassis. XenaManager-2G -> Add Chassis. :param chassis: chassis IP address :param port: chassis port number :param password: chassis password :return: newly created chassis :rtype: xenamanager.xena_app.XenaChassis """ if chassis not in self.chassis_list: try: XenaChassis(self, chassis, port, password) except Exception as error: self.objects.pop('{}/{}'.format(self.owner, chassis)) raise error return self.chassis_list[chassis]
[ "def", "add_chassis", "(", "self", ",", "chassis", ",", "port", "=", "22611", ",", "password", "=", "'xena'", ")", ":", "if", "chassis", "not", "in", "self", ".", "chassis_list", ":", "try", ":", "XenaChassis", "(", "self", ",", "chassis", ",", "port", ",", "password", ")", "except", "Exception", "as", "error", ":", "self", ".", "objects", ".", "pop", "(", "'{}/{}'", ".", "format", "(", "self", ".", "owner", ",", "chassis", ")", ")", "raise", "error", "return", "self", ".", "chassis_list", "[", "chassis", "]" ]
Add chassis. XenaManager-2G -> Add Chassis. :param chassis: chassis IP address :param port: chassis port number :param password: chassis password :return: newly created chassis :rtype: xenamanager.xena_app.XenaChassis
[ "Add", "chassis", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L74-L92
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaSession.inventory
def inventory(self): """ Get inventory for all chassis. """ for chassis in self.chassis_list.values(): chassis.inventory(modules_inventory=True)
python
def inventory(self): """ Get inventory for all chassis. """ for chassis in self.chassis_list.values(): chassis.inventory(modules_inventory=True)
[ "def", "inventory", "(", "self", ")", ":", "for", "chassis", "in", "self", ".", "chassis_list", ".", "values", "(", ")", ":", "chassis", ".", "inventory", "(", "modules_inventory", "=", "True", ")" ]
Get inventory for all chassis.
[ "Get", "inventory", "for", "all", "chassis", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L100-L104
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaSession.reserve_ports
def reserve_ports(self, locations, force=False, reset=True): """ Reserve ports and reset factory defaults. XenaManager-2G -> Reserve/Relinquish Port. XenaManager-2G -> Reserve Port. :param locations: list of ports locations in the form <ip/slot/port> to reserve :param force: True - take forcefully. False - fail if port is reserved by other user :param reset: True - reset port, False - leave port configuration :return: ports dictionary (index: object) """ for location in locations: ip, module, port = location.split('/') self.chassis_list[ip].reserve_ports(['{}/{}'.format(module, port)], force, reset) return self.ports
python
def reserve_ports(self, locations, force=False, reset=True): """ Reserve ports and reset factory defaults. XenaManager-2G -> Reserve/Relinquish Port. XenaManager-2G -> Reserve Port. :param locations: list of ports locations in the form <ip/slot/port> to reserve :param force: True - take forcefully. False - fail if port is reserved by other user :param reset: True - reset port, False - leave port configuration :return: ports dictionary (index: object) """ for location in locations: ip, module, port = location.split('/') self.chassis_list[ip].reserve_ports(['{}/{}'.format(module, port)], force, reset) return self.ports
[ "def", "reserve_ports", "(", "self", ",", "locations", ",", "force", "=", "False", ",", "reset", "=", "True", ")", ":", "for", "location", "in", "locations", ":", "ip", ",", "module", ",", "port", "=", "location", ".", "split", "(", "'/'", ")", "self", ".", "chassis_list", "[", "ip", "]", ".", "reserve_ports", "(", "[", "'{}/{}'", ".", "format", "(", "module", ",", "port", ")", "]", ",", "force", ",", "reset", ")", "return", "self", ".", "ports" ]
Reserve ports and reset factory defaults. XenaManager-2G -> Reserve/Relinquish Port. XenaManager-2G -> Reserve Port. :param locations: list of ports locations in the form <ip/slot/port> to reserve :param force: True - take forcefully. False - fail if port is reserved by other user :param reset: True - reset port, False - leave port configuration :return: ports dictionary (index: object)
[ "Reserve", "ports", "and", "reset", "factory", "defaults", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L106-L122
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaSession.start_traffic
def start_traffic(self, blocking=False, *ports): """ Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports. """ for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.start_traffic(False, *chassis_ports) if blocking: for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.wait_traffic(*chassis_ports)
python
def start_traffic(self, blocking=False, *ports): """ Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports. """ for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.start_traffic(False, *chassis_ports) if blocking: for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.wait_traffic(*chassis_ports)
[ "def", "start_traffic", "(", "self", ",", "blocking", "=", "False", ",", "*", "ports", ")", ":", "for", "chassis", ",", "chassis_ports", "in", "self", ".", "_per_chassis_ports", "(", "*", "self", ".", "_get_operation_ports", "(", "*", "ports", ")", ")", ".", "items", "(", ")", ":", "chassis", ".", "start_traffic", "(", "False", ",", "*", "chassis_ports", ")", "if", "blocking", ":", "for", "chassis", ",", "chassis_ports", "in", "self", ".", "_per_chassis_ports", "(", "*", "self", ".", "_get_operation_ports", "(", "*", "ports", ")", ")", ".", "items", "(", ")", ":", "chassis", ".", "wait_traffic", "(", "*", "chassis_ports", ")" ]
Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports.
[ "Start", "traffic", "on", "list", "of", "ports", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L133-L144
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaSession.stop_traffic
def stop_traffic(self, *ports): """ Stop traffic on list of ports. :param ports: list of ports to stop traffic on. Default - all session ports. """ for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.stop_traffic(*chassis_ports)
python
def stop_traffic(self, *ports): """ Stop traffic on list of ports. :param ports: list of ports to stop traffic on. Default - all session ports. """ for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.stop_traffic(*chassis_ports)
[ "def", "stop_traffic", "(", "self", ",", "*", "ports", ")", ":", "for", "chassis", ",", "chassis_ports", "in", "self", ".", "_per_chassis_ports", "(", "*", "self", ".", "_get_operation_ports", "(", "*", "ports", ")", ")", ".", "items", "(", ")", ":", "chassis", ".", "stop_traffic", "(", "*", "chassis_ports", ")" ]
Stop traffic on list of ports. :param ports: list of ports to stop traffic on. Default - all session ports.
[ "Stop", "traffic", "on", "list", "of", "ports", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L146-L153
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaSession.ports
def ports(self): """ :return: dictionary {name: object} of all ports. """ ports = {} for chassis in self.chassis_list.values(): ports.update({str(p): p for p in chassis.get_objects_by_type('port')}) return ports
python
def ports(self): """ :return: dictionary {name: object} of all ports. """ ports = {} for chassis in self.chassis_list.values(): ports.update({str(p): p for p in chassis.get_objects_by_type('port')}) return ports
[ "def", "ports", "(", "self", ")", ":", "ports", "=", "{", "}", "for", "chassis", "in", "self", ".", "chassis_list", ".", "values", "(", ")", ":", "ports", ".", "update", "(", "{", "str", "(", "p", ")", ":", "p", "for", "p", "in", "chassis", ".", "get_objects_by_type", "(", "'port'", ")", "}", ")", "return", "ports" ]
:return: dictionary {name: object} of all ports.
[ ":", "return", ":", "dictionary", "{", "name", ":", "object", "}", "of", "all", "ports", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L195-L203
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaChassis.inventory
def inventory(self, modules_inventory=False): """ Get chassis inventory. :param modules_inventory: True - read modules inventory, false - don't read. """ self.c_info = self.get_attributes() for m_index, m_portcounts in enumerate(self.c_info['c_portcounts'].split()): if int(m_portcounts): module = XenaModule(parent=self, index=m_index) if modules_inventory: module.inventory()
python
def inventory(self, modules_inventory=False): """ Get chassis inventory. :param modules_inventory: True - read modules inventory, false - don't read. """ self.c_info = self.get_attributes() for m_index, m_portcounts in enumerate(self.c_info['c_portcounts'].split()): if int(m_portcounts): module = XenaModule(parent=self, index=m_index) if modules_inventory: module.inventory()
[ "def", "inventory", "(", "self", ",", "modules_inventory", "=", "False", ")", ":", "self", ".", "c_info", "=", "self", ".", "get_attributes", "(", ")", "for", "m_index", ",", "m_portcounts", "in", "enumerate", "(", "self", ".", "c_info", "[", "'c_portcounts'", "]", ".", "split", "(", ")", ")", ":", "if", "int", "(", "m_portcounts", ")", ":", "module", "=", "XenaModule", "(", "parent", "=", "self", ",", "index", "=", "m_index", ")", "if", "modules_inventory", ":", "module", ".", "inventory", "(", ")" ]
Get chassis inventory. :param modules_inventory: True - read modules inventory, false - don't read.
[ "Get", "chassis", "inventory", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L258-L269
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaChassis.reserve_ports
def reserve_ports(self, locations, force=False, reset=True): """ Reserve ports and reset factory defaults. XenaManager-2G -> Reserve/Relinquish Port. XenaManager-2G -> Reset port. :param locations: list of ports locations in the form <module/port> to reserve :param force: True - take forcefully, False - fail if port is reserved by other user :param reset: True - reset port, False - leave port configuration :return: ports dictionary (index: object) """ for location in locations: port = XenaPort(parent=self, index=location) port.reserve(force) if reset: port.reset() return self.ports
python
def reserve_ports(self, locations, force=False, reset=True): """ Reserve ports and reset factory defaults. XenaManager-2G -> Reserve/Relinquish Port. XenaManager-2G -> Reset port. :param locations: list of ports locations in the form <module/port> to reserve :param force: True - take forcefully, False - fail if port is reserved by other user :param reset: True - reset port, False - leave port configuration :return: ports dictionary (index: object) """ for location in locations: port = XenaPort(parent=self, index=location) port.reserve(force) if reset: port.reset() return self.ports
[ "def", "reserve_ports", "(", "self", ",", "locations", ",", "force", "=", "False", ",", "reset", "=", "True", ")", ":", "for", "location", "in", "locations", ":", "port", "=", "XenaPort", "(", "parent", "=", "self", ",", "index", "=", "location", ")", "port", ".", "reserve", "(", "force", ")", "if", "reset", ":", "port", ".", "reset", "(", ")", "return", "self", ".", "ports" ]
Reserve ports and reset factory defaults. XenaManager-2G -> Reserve/Relinquish Port. XenaManager-2G -> Reset port. :param locations: list of ports locations in the form <module/port> to reserve :param force: True - take forcefully, False - fail if port is reserved by other user :param reset: True - reset port, False - leave port configuration :return: ports dictionary (index: object)
[ "Reserve", "ports", "and", "reset", "factory", "defaults", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L271-L289
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaChassis.start_traffic
def start_traffic(self, blocking=False, *ports): """ Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports. """ self._traffic_command('on', *ports) if blocking: self.wait_traffic(*ports)
python
def start_traffic(self, blocking=False, *ports): """ Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports. """ self._traffic_command('on', *ports) if blocking: self.wait_traffic(*ports)
[ "def", "start_traffic", "(", "self", ",", "blocking", "=", "False", ",", "*", "ports", ")", ":", "self", ".", "_traffic_command", "(", "'on'", ",", "*", "ports", ")", "if", "blocking", ":", "self", ".", "wait_traffic", "(", "*", "ports", ")" ]
Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports.
[ "Start", "traffic", "on", "list", "of", "ports", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L300-L309
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaChassis.modules
def modules(self): """ :return: dictionary {index: object} of all modules. """ if not self.get_objects_by_type('module'): self.inventory() return {int(c.index): c for c in self.get_objects_by_type('module')}
python
def modules(self): """ :return: dictionary {index: object} of all modules. """ if not self.get_objects_by_type('module'): self.inventory() return {int(c.index): c for c in self.get_objects_by_type('module')}
[ "def", "modules", "(", "self", ")", ":", "if", "not", "self", ".", "get_objects_by_type", "(", "'module'", ")", ":", "self", ".", "inventory", "(", ")", "return", "{", "int", "(", "c", ".", "index", ")", ":", "c", "for", "c", "in", "self", ".", "get_objects_by_type", "(", "'module'", ")", "}" ]
:return: dictionary {index: object} of all modules.
[ ":", "return", ":", "dictionary", "{", "index", ":", "object", "}", "of", "all", "modules", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L339-L346
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaModule.inventory
def inventory(self): """ Get module inventory. """ self.m_info = self.get_attributes() if 'NOTCFP' in self.m_info['m_cfptype']: a = self.get_attribute('m_portcount') m_portcount = int(a) else: m_portcount = int(self.get_attribute('m_cfpconfig').split()[0]) for p_index in range(m_portcount): XenaPort(parent=self, index='{}/{}'.format(self.index, p_index)).inventory()
python
def inventory(self): """ Get module inventory. """ self.m_info = self.get_attributes() if 'NOTCFP' in self.m_info['m_cfptype']: a = self.get_attribute('m_portcount') m_portcount = int(a) else: m_portcount = int(self.get_attribute('m_cfpconfig').split()[0]) for p_index in range(m_portcount): XenaPort(parent=self, index='{}/{}'.format(self.index, p_index)).inventory()
[ "def", "inventory", "(", "self", ")", ":", "self", ".", "m_info", "=", "self", ".", "get_attributes", "(", ")", "if", "'NOTCFP'", "in", "self", ".", "m_info", "[", "'m_cfptype'", "]", ":", "a", "=", "self", ".", "get_attribute", "(", "'m_portcount'", ")", "m_portcount", "=", "int", "(", "a", ")", "else", ":", "m_portcount", "=", "int", "(", "self", ".", "get_attribute", "(", "'m_cfpconfig'", ")", ".", "split", "(", ")", "[", "0", "]", ")", "for", "p_index", "in", "range", "(", "m_portcount", ")", ":", "XenaPort", "(", "parent", "=", "self", ",", "index", "=", "'{}/{}'", ".", "format", "(", "self", ".", "index", ",", "p_index", ")", ")", ".", "inventory", "(", ")" ]
Get module inventory.
[ "Get", "module", "inventory", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L386-L396
xenadevel/PyXenaManager
xenamanager/xena_app.py
XenaModule.ports
def ports(self): """ :return: dictionary {index: object} of all ports. """ if not self.get_objects_by_type('port'): self.inventory() return {int(p.index.split('/')[1]): p for p in self.get_objects_by_type('port')}
python
def ports(self): """ :return: dictionary {index: object} of all ports. """ if not self.get_objects_by_type('port'): self.inventory() return {int(p.index.split('/')[1]): p for p in self.get_objects_by_type('port')}
[ "def", "ports", "(", "self", ")", ":", "if", "not", "self", ".", "get_objects_by_type", "(", "'port'", ")", ":", "self", ".", "inventory", "(", ")", "return", "{", "int", "(", "p", ".", "index", ".", "split", "(", "'/'", ")", "[", "1", "]", ")", ":", "p", "for", "p", "in", "self", ".", "get_objects_by_type", "(", "'port'", ")", "}" ]
:return: dictionary {index: object} of all ports.
[ ":", "return", ":", "dictionary", "{", "index", ":", "object", "}", "of", "all", "ports", "." ]
train
https://github.com/xenadevel/PyXenaManager/blob/384ca265f73044b8a8b471f5dd7a6103fc54f4df/xenamanager/xena_app.py#L403-L410
MainRo/cyclotron-aio
cyclotron_aio/runner.py
run
def run(entry_point, drivers, loop = None): ''' This is a runner wrapping the cyclotron "run" implementation. It takes an additional parameter to provide a custom asyncio mainloop. ''' program = setup(entry_point, drivers) dispose = program.run() if loop == None: loop = asyncio.get_event_loop() loop.run_forever() dispose()
python
def run(entry_point, drivers, loop = None): ''' This is a runner wrapping the cyclotron "run" implementation. It takes an additional parameter to provide a custom asyncio mainloop. ''' program = setup(entry_point, drivers) dispose = program.run() if loop == None: loop = asyncio.get_event_loop() loop.run_forever() dispose()
[ "def", "run", "(", "entry_point", ",", "drivers", ",", "loop", "=", "None", ")", ":", "program", "=", "setup", "(", "entry_point", ",", "drivers", ")", "dispose", "=", "program", ".", "run", "(", ")", "if", "loop", "==", "None", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "loop", ".", "run_forever", "(", ")", "dispose", "(", ")" ]
This is a runner wrapping the cyclotron "run" implementation. It takes an additional parameter to provide a custom asyncio mainloop.
[ "This", "is", "a", "runner", "wrapping", "the", "cyclotron", "run", "implementation", ".", "It", "takes", "an", "additional", "parameter", "to", "provide", "a", "custom", "asyncio", "mainloop", "." ]
train
https://github.com/MainRo/cyclotron-aio/blob/4401076aafe4a72de1d3c4ad6bb7ffa648506f7e/cyclotron_aio/runner.py#L4-L14
developersociety/django-glitter
glitter/blockadmin/blocks.py
register
def register(model, admin=None, category=None): """ Decorator to registering you Admin class. """ def _model_admin_wrapper(admin_class): site.register(model, admin_class=admin_class) if category: site.register_block(model, category) return admin_class return _model_admin_wrapper
python
def register(model, admin=None, category=None): """ Decorator to registering you Admin class. """ def _model_admin_wrapper(admin_class): site.register(model, admin_class=admin_class) if category: site.register_block(model, category) return admin_class return _model_admin_wrapper
[ "def", "register", "(", "model", ",", "admin", "=", "None", ",", "category", "=", "None", ")", ":", "def", "_model_admin_wrapper", "(", "admin_class", ")", ":", "site", ".", "register", "(", "model", ",", "admin_class", "=", "admin_class", ")", "if", "category", ":", "site", ".", "register_block", "(", "model", ",", "category", ")", "return", "admin_class", "return", "_model_admin_wrapper" ]
Decorator to registering you Admin class.
[ "Decorator", "to", "registering", "you", "Admin", "class", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/blockadmin/blocks.py#L390-L400
developersociety/django-glitter
glitter/blockadmin/blocks.py
BlockAdmin.has_glitter_edit_permission
def has_glitter_edit_permission(self, request, obj): """ Return a boolean if a user has edit access to the glitter object/page this object is on. """ # We're testing for the edit permission here with the glitter object - not the current # object, not the change permission. Once a user has edit access to an object they can edit # all content on it. permission_name = '{}.edit_{}'.format( obj._meta.app_label, obj._meta.model_name, ) has_permission = ( request.user.has_perm(permission_name) or request.user.has_perm(permission_name, obj=obj) ) return has_permission
python
def has_glitter_edit_permission(self, request, obj): """ Return a boolean if a user has edit access to the glitter object/page this object is on. """ # We're testing for the edit permission here with the glitter object - not the current # object, not the change permission. Once a user has edit access to an object they can edit # all content on it. permission_name = '{}.edit_{}'.format( obj._meta.app_label, obj._meta.model_name, ) has_permission = ( request.user.has_perm(permission_name) or request.user.has_perm(permission_name, obj=obj) ) return has_permission
[ "def", "has_glitter_edit_permission", "(", "self", ",", "request", ",", "obj", ")", ":", "# We're testing for the edit permission here with the glitter object - not the current", "# object, not the change permission. Once a user has edit access to an object they can edit", "# all content on it.", "permission_name", "=", "'{}.edit_{}'", ".", "format", "(", "obj", ".", "_meta", ".", "app_label", ",", "obj", ".", "_meta", ".", "model_name", ",", ")", "has_permission", "=", "(", "request", ".", "user", ".", "has_perm", "(", "permission_name", ")", "or", "request", ".", "user", ".", "has_perm", "(", "permission_name", ",", "obj", "=", "obj", ")", ")", "return", "has_permission" ]
Return a boolean if a user has edit access to the glitter object/page this object is on.
[ "Return", "a", "boolean", "if", "a", "user", "has", "edit", "access", "to", "the", "glitter", "object", "/", "page", "this", "object", "is", "on", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/blockadmin/blocks.py#L201-L216
developersociety/django-glitter
glitter/blockadmin/blocks.py
BlockAdmin.change_view
def change_view(self, request, object_id, form_url='', extra_context=None): """The 'change' admin view for this model.""" obj = self.get_object(request, unquote(object_id)) if obj is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_text(self.opts.verbose_name), 'key': escape(object_id), }) if not self.has_change_permission(request, obj): raise PermissionDenied content_block = obj.content_block version = content_block.obj_version # Version must not be saved, and must belong to this user if version.version_number or version.owner != request.user: raise PermissionDenied return super().change_view(request, object_id, form_url, extra_context)
python
def change_view(self, request, object_id, form_url='', extra_context=None): """The 'change' admin view for this model.""" obj = self.get_object(request, unquote(object_id)) if obj is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_text(self.opts.verbose_name), 'key': escape(object_id), }) if not self.has_change_permission(request, obj): raise PermissionDenied content_block = obj.content_block version = content_block.obj_version # Version must not be saved, and must belong to this user if version.version_number or version.owner != request.user: raise PermissionDenied return super().change_view(request, object_id, form_url, extra_context)
[ "def", "change_view", "(", "self", ",", "request", ",", "object_id", ",", "form_url", "=", "''", ",", "extra_context", "=", "None", ")", ":", "obj", "=", "self", ".", "get_object", "(", "request", ",", "unquote", "(", "object_id", ")", ")", "if", "obj", "is", "None", ":", "raise", "Http404", "(", "_", "(", "'%(name)s object with primary key %(key)r does not exist.'", ")", "%", "{", "'name'", ":", "force_text", "(", "self", ".", "opts", ".", "verbose_name", ")", ",", "'key'", ":", "escape", "(", "object_id", ")", ",", "}", ")", "if", "not", "self", ".", "has_change_permission", "(", "request", ",", "obj", ")", ":", "raise", "PermissionDenied", "content_block", "=", "obj", ".", "content_block", "version", "=", "content_block", ".", "obj_version", "# Version must not be saved, and must belong to this user", "if", "version", ".", "version_number", "or", "version", ".", "owner", "!=", "request", ".", "user", ":", "raise", "PermissionDenied", "return", "super", "(", ")", ".", "change_view", "(", "request", ",", "object_id", ",", "form_url", ",", "extra_context", ")" ]
The 'change' admin view for this model.
[ "The", "change", "admin", "view", "for", "this", "model", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/blockadmin/blocks.py#L264-L285
developersociety/django-glitter
glitter/blockadmin/blocks.py
BlockAdmin.response_change
def response_change(self, request, obj): """Determine the HttpResponse for the change_view stage.""" opts = self.opts.app_label, self.opts.model_name pk_value = obj._get_pk_val() if '_continue' in request.POST: msg = _( 'The %(name)s block was changed successfully. You may edit it again below.' ) % {'name': force_text(self.opts.verbose_name)} self.message_user(request, msg, messages.SUCCESS) # We redirect to the save and continue page, which updates the # parent window in javascript and redirects back to the edit page # in javascript. return HttpResponseRedirect(reverse( 'admin:%s_%s_continue' % opts, args=(pk_value,), current_app=self.admin_site.name )) # Update column and close popup - don't bother with a message as they won't see it return self.response_rerender(request, obj, 'admin/glitter/update_column.html')
python
def response_change(self, request, obj): """Determine the HttpResponse for the change_view stage.""" opts = self.opts.app_label, self.opts.model_name pk_value = obj._get_pk_val() if '_continue' in request.POST: msg = _( 'The %(name)s block was changed successfully. You may edit it again below.' ) % {'name': force_text(self.opts.verbose_name)} self.message_user(request, msg, messages.SUCCESS) # We redirect to the save and continue page, which updates the # parent window in javascript and redirects back to the edit page # in javascript. return HttpResponseRedirect(reverse( 'admin:%s_%s_continue' % opts, args=(pk_value,), current_app=self.admin_site.name )) # Update column and close popup - don't bother with a message as they won't see it return self.response_rerender(request, obj, 'admin/glitter/update_column.html')
[ "def", "response_change", "(", "self", ",", "request", ",", "obj", ")", ":", "opts", "=", "self", ".", "opts", ".", "app_label", ",", "self", ".", "opts", ".", "model_name", "pk_value", "=", "obj", ".", "_get_pk_val", "(", ")", "if", "'_continue'", "in", "request", ".", "POST", ":", "msg", "=", "_", "(", "'The %(name)s block was changed successfully. You may edit it again below.'", ")", "%", "{", "'name'", ":", "force_text", "(", "self", ".", "opts", ".", "verbose_name", ")", "}", "self", ".", "message_user", "(", "request", ",", "msg", ",", "messages", ".", "SUCCESS", ")", "# We redirect to the save and continue page, which updates the", "# parent window in javascript and redirects back to the edit page", "# in javascript.", "return", "HttpResponseRedirect", "(", "reverse", "(", "'admin:%s_%s_continue'", "%", "opts", ",", "args", "=", "(", "pk_value", ",", ")", ",", "current_app", "=", "self", ".", "admin_site", ".", "name", ")", ")", "# Update column and close popup - don't bother with a message as they won't see it", "return", "self", ".", "response_rerender", "(", "request", ",", "obj", ",", "'admin/glitter/update_column.html'", ")" ]
Determine the HttpResponse for the change_view stage.
[ "Determine", "the", "HttpResponse", "for", "the", "change_view", "stage", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/blockadmin/blocks.py#L287-L309
Karaage-Cluster/python-tldap
tldap/query.py
get_filter_item
def get_filter_item(name: str, operation: bytes, value: bytes) -> bytes: """ A field could be found for this term, try to get filter string for it. """ assert isinstance(name, str) assert isinstance(value, bytes) if operation is None: return filter_format(b"(%s=%s)", [name, value]) elif operation == "contains": assert value != "" return filter_format(b"(%s=*%s*)", [name, value]) else: raise ValueError("Unknown search operation %s" % operation)
python
def get_filter_item(name: str, operation: bytes, value: bytes) -> bytes: """ A field could be found for this term, try to get filter string for it. """ assert isinstance(name, str) assert isinstance(value, bytes) if operation is None: return filter_format(b"(%s=%s)", [name, value]) elif operation == "contains": assert value != "" return filter_format(b"(%s=*%s*)", [name, value]) else: raise ValueError("Unknown search operation %s" % operation)
[ "def", "get_filter_item", "(", "name", ":", "str", ",", "operation", ":", "bytes", ",", "value", ":", "bytes", ")", "->", "bytes", ":", "assert", "isinstance", "(", "name", ",", "str", ")", "assert", "isinstance", "(", "value", ",", "bytes", ")", "if", "operation", "is", "None", ":", "return", "filter_format", "(", "b\"(%s=%s)\"", ",", "[", "name", ",", "value", "]", ")", "elif", "operation", "==", "\"contains\"", ":", "assert", "value", "!=", "\"\"", "return", "filter_format", "(", "b\"(%s=*%s*)\"", ",", "[", "name", ",", "value", "]", ")", "else", ":", "raise", "ValueError", "(", "\"Unknown search operation %s\"", "%", "operation", ")" ]
A field could be found for this term, try to get filter string for it.
[ "A", "field", "could", "be", "found", "for", "this", "term", "try", "to", "get", "filter", "string", "for", "it", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/query.py#L28-L40
Karaage-Cluster/python-tldap
tldap/query.py
get_filter
def get_filter(q: tldap.Q, fields: Dict[str, tldap.fields.Field], pk: str): """ Translate the Q tree into a filter string to search for, or None if no results possible. """ # check the details are valid if q.negated and len(q.children) == 1: op = b"!" elif q.connector == tldap.Q.AND: op = b"&" elif q.connector == tldap.Q.OR: op = b"|" else: raise ValueError("Invalid value of op found") # scan through every child search = [] for child in q.children: # if this child is a node, then descend into it if isinstance(child, tldap.Q): search.append(get_filter(child, fields, pk)) else: # otherwise get the values in this node name, value = child # split the name if possible name, _, operation = name.rpartition("__") if name == "": name, operation = operation, None # replace pk with the real attribute if name == "pk": name = pk # DN is a special case if name == "dn": dn_name = "entryDN:" if isinstance(value, list): s = [] for v in value: assert isinstance(v, str) v = v.encode('utf_8') s.append(get_filter_item(dn_name, operation, v)) search.append("(&".join(search) + ")") # or process just the single value else: assert isinstance(value, str) v = value.encode('utf_8') search.append(get_filter_item(dn_name, operation, v)) continue # try to find field associated with name field = fields[name] if isinstance(value, list) and len(value) == 1: value = value[0] assert isinstance(value, str) # process as list if isinstance(value, list): s = [] for v in value: v = field.value_to_filter(v) s.append(get_filter_item(name, operation, v)) search.append(b"(&".join(search) + b")") # or process just the single value else: value = field.value_to_filter(value) search.append(get_filter_item(name, operation, value)) # output the results if len(search) == 1 and not q.negated: # just one non-negative term, return it return search[0] else: # multiple terms return b"(" + op + b"".join(search) + b")"
python
def get_filter(q: tldap.Q, fields: Dict[str, tldap.fields.Field], pk: str): """ Translate the Q tree into a filter string to search for, or None if no results possible. """ # check the details are valid if q.negated and len(q.children) == 1: op = b"!" elif q.connector == tldap.Q.AND: op = b"&" elif q.connector == tldap.Q.OR: op = b"|" else: raise ValueError("Invalid value of op found") # scan through every child search = [] for child in q.children: # if this child is a node, then descend into it if isinstance(child, tldap.Q): search.append(get_filter(child, fields, pk)) else: # otherwise get the values in this node name, value = child # split the name if possible name, _, operation = name.rpartition("__") if name == "": name, operation = operation, None # replace pk with the real attribute if name == "pk": name = pk # DN is a special case if name == "dn": dn_name = "entryDN:" if isinstance(value, list): s = [] for v in value: assert isinstance(v, str) v = v.encode('utf_8') s.append(get_filter_item(dn_name, operation, v)) search.append("(&".join(search) + ")") # or process just the single value else: assert isinstance(value, str) v = value.encode('utf_8') search.append(get_filter_item(dn_name, operation, v)) continue # try to find field associated with name field = fields[name] if isinstance(value, list) and len(value) == 1: value = value[0] assert isinstance(value, str) # process as list if isinstance(value, list): s = [] for v in value: v = field.value_to_filter(v) s.append(get_filter_item(name, operation, v)) search.append(b"(&".join(search) + b")") # or process just the single value else: value = field.value_to_filter(value) search.append(get_filter_item(name, operation, value)) # output the results if len(search) == 1 and not q.negated: # just one non-negative term, return it return search[0] else: # multiple terms return b"(" + op + b"".join(search) + b")"
[ "def", "get_filter", "(", "q", ":", "tldap", ".", "Q", ",", "fields", ":", "Dict", "[", "str", ",", "tldap", ".", "fields", ".", "Field", "]", ",", "pk", ":", "str", ")", ":", "# check the details are valid", "if", "q", ".", "negated", "and", "len", "(", "q", ".", "children", ")", "==", "1", ":", "op", "=", "b\"!\"", "elif", "q", ".", "connector", "==", "tldap", ".", "Q", ".", "AND", ":", "op", "=", "b\"&\"", "elif", "q", ".", "connector", "==", "tldap", ".", "Q", ".", "OR", ":", "op", "=", "b\"|\"", "else", ":", "raise", "ValueError", "(", "\"Invalid value of op found\"", ")", "# scan through every child", "search", "=", "[", "]", "for", "child", "in", "q", ".", "children", ":", "# if this child is a node, then descend into it", "if", "isinstance", "(", "child", ",", "tldap", ".", "Q", ")", ":", "search", ".", "append", "(", "get_filter", "(", "child", ",", "fields", ",", "pk", ")", ")", "else", ":", "# otherwise get the values in this node", "name", ",", "value", "=", "child", "# split the name if possible", "name", ",", "_", ",", "operation", "=", "name", ".", "rpartition", "(", "\"__\"", ")", "if", "name", "==", "\"\"", ":", "name", ",", "operation", "=", "operation", ",", "None", "# replace pk with the real attribute", "if", "name", "==", "\"pk\"", ":", "name", "=", "pk", "# DN is a special case", "if", "name", "==", "\"dn\"", ":", "dn_name", "=", "\"entryDN:\"", "if", "isinstance", "(", "value", ",", "list", ")", ":", "s", "=", "[", "]", "for", "v", "in", "value", ":", "assert", "isinstance", "(", "v", ",", "str", ")", "v", "=", "v", ".", "encode", "(", "'utf_8'", ")", "s", ".", "append", "(", "get_filter_item", "(", "dn_name", ",", "operation", ",", "v", ")", ")", "search", ".", "append", "(", "\"(&\"", ".", "join", "(", "search", ")", "+", "\")\"", ")", "# or process just the single value", "else", ":", "assert", "isinstance", "(", "value", ",", "str", ")", "v", "=", "value", ".", "encode", "(", "'utf_8'", ")", "search", ".", "append", "(", "get_filter_item", "(", "dn_name", ",", "operation", ",", "v", ")", ")", "continue", "# try to find field associated with name", "field", "=", "fields", "[", "name", "]", "if", "isinstance", "(", "value", ",", "list", ")", "and", "len", "(", "value", ")", "==", "1", ":", "value", "=", "value", "[", "0", "]", "assert", "isinstance", "(", "value", ",", "str", ")", "# process as list", "if", "isinstance", "(", "value", ",", "list", ")", ":", "s", "=", "[", "]", "for", "v", "in", "value", ":", "v", "=", "field", ".", "value_to_filter", "(", "v", ")", "s", ".", "append", "(", "get_filter_item", "(", "name", ",", "operation", ",", "v", ")", ")", "search", ".", "append", "(", "b\"(&\"", ".", "join", "(", "search", ")", "+", "b\")\"", ")", "# or process just the single value", "else", ":", "value", "=", "field", ".", "value_to_filter", "(", "value", ")", "search", ".", "append", "(", "get_filter_item", "(", "name", ",", "operation", ",", "value", ")", ")", "# output the results", "if", "len", "(", "search", ")", "==", "1", "and", "not", "q", ".", "negated", ":", "# just one non-negative term, return it", "return", "search", "[", "0", "]", "else", ":", "# multiple terms", "return", "b\"(\"", "+", "op", "+", "b\"\"", ".", "join", "(", "search", ")", "+", "b\")\"" ]
Translate the Q tree into a filter string to search for, or None if no results possible.
[ "Translate", "the", "Q", "tree", "into", "a", "filter", "string", "to", "search", "for", "or", "None", "if", "no", "results", "possible", "." ]
train
https://github.com/Karaage-Cluster/python-tldap/blob/61f1af74a3648cb6491e7eeb1ee2eb395d67bf59/tldap/query.py#L43-L120
josiah-wolf-oberholtzer/uqbar
uqbar/cli/CLI.py
CLI.program_name
def program_name(self): r"""The name of the script, callable from the command line. """ name = "-".join( word.lower() for word in uqbar.strings.delimit_words(type(self).__name__) ) return name
python
def program_name(self): r"""The name of the script, callable from the command line. """ name = "-".join( word.lower() for word in uqbar.strings.delimit_words(type(self).__name__) ) return name
[ "def", "program_name", "(", "self", ")", ":", "name", "=", "\"-\"", ".", "join", "(", "word", ".", "lower", "(", ")", "for", "word", "in", "uqbar", ".", "strings", ".", "delimit_words", "(", "type", "(", "self", ")", ".", "__name__", ")", ")", "return", "name" ]
r"""The name of the script, callable from the command line.
[ "r", "The", "name", "of", "the", "script", "callable", "from", "the", "command", "line", "." ]
train
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/cli/CLI.py#L137-L143
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
node_is_result_assignment
def node_is_result_assignment(node: ast.AST) -> bool: """ Args: node: An ``ast`` node. Returns: bool: ``node`` corresponds to the code ``result =``, assignment to the ``result `` variable. Note: Performs a very weak test that the line starts with 'result =' rather than testing the tokens. """ # `.first_token` is added by asttokens token = node.first_token # type: ignore return token.line.strip().startswith('result =')
python
def node_is_result_assignment(node: ast.AST) -> bool: """ Args: node: An ``ast`` node. Returns: bool: ``node`` corresponds to the code ``result =``, assignment to the ``result `` variable. Note: Performs a very weak test that the line starts with 'result =' rather than testing the tokens. """ # `.first_token` is added by asttokens token = node.first_token # type: ignore return token.line.strip().startswith('result =')
[ "def", "node_is_result_assignment", "(", "node", ":", "ast", ".", "AST", ")", "->", "bool", ":", "# `.first_token` is added by asttokens", "token", "=", "node", ".", "first_token", "# type: ignore", "return", "token", ".", "line", ".", "strip", "(", ")", ".", "startswith", "(", "'result ='", ")" ]
Args: node: An ``ast`` node. Returns: bool: ``node`` corresponds to the code ``result =``, assignment to the ``result `` variable. Note: Performs a very weak test that the line starts with 'result =' rather than testing the tokens.
[ "Args", ":", "node", ":", "An", "ast", "node", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L63-L78
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
node_is_noop
def node_is_noop(node: ast.AST) -> bool: """ Node does nothing. """ return isinstance(node.value, ast.Str) if isinstance(node, ast.Expr) else isinstance(node, ast.Pass)
python
def node_is_noop(node: ast.AST) -> bool: """ Node does nothing. """ return isinstance(node.value, ast.Str) if isinstance(node, ast.Expr) else isinstance(node, ast.Pass)
[ "def", "node_is_noop", "(", "node", ":", "ast", ".", "AST", ")", "->", "bool", ":", "return", "isinstance", "(", "node", ".", "value", ",", "ast", ".", "Str", ")", "if", "isinstance", "(", "node", ",", "ast", ".", "Expr", ")", "else", "isinstance", "(", "node", ",", "ast", ".", "Pass", ")" ]
Node does nothing.
[ "Node", "does", "nothing", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L105-L109
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
function_is_noop
def function_is_noop(function_node: ast.FunctionDef) -> bool: """ Function does nothing - is just ``pass`` or docstring. """ return all(node_is_noop(n) for n in function_node.body)
python
def function_is_noop(function_node: ast.FunctionDef) -> bool: """ Function does nothing - is just ``pass`` or docstring. """ return all(node_is_noop(n) for n in function_node.body)
[ "def", "function_is_noop", "(", "function_node", ":", "ast", ".", "FunctionDef", ")", "->", "bool", ":", "return", "all", "(", "node_is_noop", "(", "n", ")", "for", "n", "in", "function_node", ".", "body", ")" ]
Function does nothing - is just ``pass`` or docstring.
[ "Function", "does", "nothing", "-", "is", "just", "pass", "or", "docstring", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L112-L116
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
add_node_parents
def add_node_parents(root: ast.AST) -> None: """ Adds "parent" attribute to all child nodes of passed node. Code taken from https://stackoverflow.com/a/43311383/1286705 """ for node in ast.walk(root): for child in ast.iter_child_nodes(node): child.parent = node
python
def add_node_parents(root: ast.AST) -> None: """ Adds "parent" attribute to all child nodes of passed node. Code taken from https://stackoverflow.com/a/43311383/1286705 """ for node in ast.walk(root): for child in ast.iter_child_nodes(node): child.parent = node
[ "def", "add_node_parents", "(", "root", ":", "ast", ".", "AST", ")", "->", "None", ":", "for", "node", "in", "ast", ".", "walk", "(", "root", ")", ":", "for", "child", "in", "ast", ".", "iter_child_nodes", "(", "node", ")", ":", "child", ".", "parent", "=", "node" ]
Adds "parent" attribute to all child nodes of passed node. Code taken from https://stackoverflow.com/a/43311383/1286705
[ "Adds", "parent", "attribute", "to", "all", "child", "nodes", "of", "passed", "node", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L146-L154
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
build_footprint
def build_footprint(node: ast.AST, first_line_no: int) -> Set[int]: """ Generates a list of lines that the passed node covers, relative to the marked lines list - i.e. start of function is line 0. """ return set( range( get_first_token(node).start[0] - first_line_no, get_last_token(node).end[0] - first_line_no + 1, ) )
python
def build_footprint(node: ast.AST, first_line_no: int) -> Set[int]: """ Generates a list of lines that the passed node covers, relative to the marked lines list - i.e. start of function is line 0. """ return set( range( get_first_token(node).start[0] - first_line_no, get_last_token(node).end[0] - first_line_no + 1, ) )
[ "def", "build_footprint", "(", "node", ":", "ast", ".", "AST", ",", "first_line_no", ":", "int", ")", "->", "Set", "[", "int", "]", ":", "return", "set", "(", "range", "(", "get_first_token", "(", "node", ")", ".", "start", "[", "0", "]", "-", "first_line_no", ",", "get_last_token", "(", "node", ")", ".", "end", "[", "0", "]", "-", "first_line_no", "+", "1", ",", ")", ")" ]
Generates a list of lines that the passed node covers, relative to the marked lines list - i.e. start of function is line 0.
[ "Generates", "a", "list", "of", "lines", "that", "the", "passed", "node", "covers", "relative", "to", "the", "marked", "lines", "list", "-", "i", ".", "e", ".", "start", "of", "function", "is", "line", "0", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L157-L167
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
filter_arrange_nodes
def filter_arrange_nodes(nodes: List[ast.stmt], max_line_number: int) -> List[ast.stmt]: """ Finds all nodes that are before the ``max_line_number`` and are not docstrings or ``pass``. """ return [ node for node in nodes if node.lineno < max_line_number and not isinstance(node, ast.Pass) and not (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) ]
python
def filter_arrange_nodes(nodes: List[ast.stmt], max_line_number: int) -> List[ast.stmt]: """ Finds all nodes that are before the ``max_line_number`` and are not docstrings or ``pass``. """ return [ node for node in nodes if node.lineno < max_line_number and not isinstance(node, ast.Pass) and not (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) ]
[ "def", "filter_arrange_nodes", "(", "nodes", ":", "List", "[", "ast", ".", "stmt", "]", ",", "max_line_number", ":", "int", ")", "->", "List", "[", "ast", ".", "stmt", "]", ":", "return", "[", "node", "for", "node", "in", "nodes", "if", "node", ".", "lineno", "<", "max_line_number", "and", "not", "isinstance", "(", "node", ",", "ast", ".", "Pass", ")", "and", "not", "(", "isinstance", "(", "node", ",", "ast", ".", "Expr", ")", "and", "isinstance", "(", "node", ".", "value", ",", "ast", ".", "Str", ")", ")", "]" ]
Finds all nodes that are before the ``max_line_number`` and are not docstrings or ``pass``.
[ "Finds", "all", "nodes", "that", "are", "before", "the", "max_line_number", "and", "are", "not", "docstrings", "or", "pass", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L170-L178
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
filter_assert_nodes
def filter_assert_nodes(nodes: List[ast.stmt], min_line_number: int) -> List[ast.stmt]: """ Finds all nodes that are after the ``min_line_number`` """ return [node for node in nodes if node.lineno > min_line_number]
python
def filter_assert_nodes(nodes: List[ast.stmt], min_line_number: int) -> List[ast.stmt]: """ Finds all nodes that are after the ``min_line_number`` """ return [node for node in nodes if node.lineno > min_line_number]
[ "def", "filter_assert_nodes", "(", "nodes", ":", "List", "[", "ast", ".", "stmt", "]", ",", "min_line_number", ":", "int", ")", "->", "List", "[", "ast", ".", "stmt", "]", ":", "return", "[", "node", "for", "node", "in", "nodes", "if", "node", ".", "lineno", ">", "min_line_number", "]" ]
Finds all nodes that are after the ``min_line_number``
[ "Finds", "all", "nodes", "that", "are", "after", "the", "min_line_number" ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L181-L185
jamescooke/flake8-aaa
src/flake8_aaa/helpers.py
find_stringy_lines
def find_stringy_lines(tree: ast.AST, first_line_no: int) -> Set[int]: """ Finds all lines that contain a string in a tree, usually a function. These lines will be ignored when searching for blank lines. """ str_footprints = set() for node in ast.walk(tree): if isinstance(node, ast.Str): str_footprints.update(build_footprint(node, first_line_no)) return str_footprints
python
def find_stringy_lines(tree: ast.AST, first_line_no: int) -> Set[int]: """ Finds all lines that contain a string in a tree, usually a function. These lines will be ignored when searching for blank lines. """ str_footprints = set() for node in ast.walk(tree): if isinstance(node, ast.Str): str_footprints.update(build_footprint(node, first_line_no)) return str_footprints
[ "def", "find_stringy_lines", "(", "tree", ":", "ast", ".", "AST", ",", "first_line_no", ":", "int", ")", "->", "Set", "[", "int", "]", ":", "str_footprints", "=", "set", "(", ")", "for", "node", "in", "ast", ".", "walk", "(", "tree", ")", ":", "if", "isinstance", "(", "node", ",", "ast", ".", "Str", ")", ":", "str_footprints", ".", "update", "(", "build_footprint", "(", "node", ",", "first_line_no", ")", ")", "return", "str_footprints" ]
Finds all lines that contain a string in a tree, usually a function. These lines will be ignored when searching for blank lines.
[ "Finds", "all", "lines", "that", "contain", "a", "string", "in", "a", "tree", "usually", "a", "function", ".", "These", "lines", "will", "be", "ignored", "when", "searching", "for", "blank", "lines", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/helpers.py#L188-L197
jamescooke/flake8-aaa
src/flake8_aaa/function.py
Function.check_all
def check_all(self) -> Generator[AAAError, None, None]: """ Run everything required for checking this function. Returns: A generator of errors. Raises: ValidationError: A non-recoverable linting error is found. """ # Function def if function_is_noop(self.node): return self.mark_bl() self.mark_def() # ACT # Load act block and kick out when none is found self.act_node = self.load_act_node() self.act_block = Block.build_act(self.act_node.node, self.node) act_block_first_line_no, act_block_last_line_no = self.act_block.get_span(0) # ARRANGE self.arrange_block = Block.build_arrange(self.node.body, act_block_first_line_no) # ASSERT assert self.act_node self.assert_block = Block.build_assert(self.node.body, act_block_last_line_no) # SPACING for block in ['arrange', 'act', 'assert']: self_block = getattr(self, '{}_block'.format(block)) try: span = self_block.get_span(self.first_line_no) except EmptyBlock: continue self.line_markers.update(span, self_block.line_type) yield from self.line_markers.check_arrange_act_spacing() yield from self.line_markers.check_act_assert_spacing() yield from self.line_markers.check_blank_lines()
python
def check_all(self) -> Generator[AAAError, None, None]: """ Run everything required for checking this function. Returns: A generator of errors. Raises: ValidationError: A non-recoverable linting error is found. """ # Function def if function_is_noop(self.node): return self.mark_bl() self.mark_def() # ACT # Load act block and kick out when none is found self.act_node = self.load_act_node() self.act_block = Block.build_act(self.act_node.node, self.node) act_block_first_line_no, act_block_last_line_no = self.act_block.get_span(0) # ARRANGE self.arrange_block = Block.build_arrange(self.node.body, act_block_first_line_no) # ASSERT assert self.act_node self.assert_block = Block.build_assert(self.node.body, act_block_last_line_no) # SPACING for block in ['arrange', 'act', 'assert']: self_block = getattr(self, '{}_block'.format(block)) try: span = self_block.get_span(self.first_line_no) except EmptyBlock: continue self.line_markers.update(span, self_block.line_type) yield from self.line_markers.check_arrange_act_spacing() yield from self.line_markers.check_act_assert_spacing() yield from self.line_markers.check_blank_lines()
[ "def", "check_all", "(", "self", ")", "->", "Generator", "[", "AAAError", ",", "None", ",", "None", "]", ":", "# Function def", "if", "function_is_noop", "(", "self", ".", "node", ")", ":", "return", "self", ".", "mark_bl", "(", ")", "self", ".", "mark_def", "(", ")", "# ACT", "# Load act block and kick out when none is found", "self", ".", "act_node", "=", "self", ".", "load_act_node", "(", ")", "self", ".", "act_block", "=", "Block", ".", "build_act", "(", "self", ".", "act_node", ".", "node", ",", "self", ".", "node", ")", "act_block_first_line_no", ",", "act_block_last_line_no", "=", "self", ".", "act_block", ".", "get_span", "(", "0", ")", "# ARRANGE", "self", ".", "arrange_block", "=", "Block", ".", "build_arrange", "(", "self", ".", "node", ".", "body", ",", "act_block_first_line_no", ")", "# ASSERT", "assert", "self", ".", "act_node", "self", ".", "assert_block", "=", "Block", ".", "build_assert", "(", "self", ".", "node", ".", "body", ",", "act_block_last_line_no", ")", "# SPACING", "for", "block", "in", "[", "'arrange'", ",", "'act'", ",", "'assert'", "]", ":", "self_block", "=", "getattr", "(", "self", ",", "'{}_block'", ".", "format", "(", "block", ")", ")", "try", ":", "span", "=", "self_block", ".", "get_span", "(", "self", ".", "first_line_no", ")", "except", "EmptyBlock", ":", "continue", "self", ".", "line_markers", ".", "update", "(", "span", ",", "self_block", ".", "line_type", ")", "yield", "from", "self", ".", "line_markers", ".", "check_arrange_act_spacing", "(", ")", "yield", "from", "self", ".", "line_markers", ".", "check_act_assert_spacing", "(", ")", "yield", "from", "self", ".", "line_markers", ".", "check_blank_lines", "(", ")" ]
Run everything required for checking this function. Returns: A generator of errors. Raises: ValidationError: A non-recoverable linting error is found.
[ "Run", "everything", "required", "for", "checking", "this", "function", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/function.py#L64-L99
jamescooke/flake8-aaa
src/flake8_aaa/function.py
Function.load_act_node
def load_act_node(self) -> ActNode: """ Raises: ValidationError: AAA01 when no act block is found and AAA02 when multiple act blocks are found. """ act_nodes = ActNode.build_body(self.node.body) if not act_nodes: raise ValidationError(self.first_line_no, self.node.col_offset, 'AAA01 no Act block found in test') # Allow `pytest.raises` and `self.assertRaises()` in assert nodes - if # any of the additional nodes are `pytest.raises`, then raise for a_n in act_nodes[1:]: if a_n.block_type in [ActNodeType.marked_act, ActNodeType.result_assignment]: raise ValidationError( self.first_line_no, self.node.col_offset, 'AAA02 multiple Act blocks found in test', ) return act_nodes[0]
python
def load_act_node(self) -> ActNode: """ Raises: ValidationError: AAA01 when no act block is found and AAA02 when multiple act blocks are found. """ act_nodes = ActNode.build_body(self.node.body) if not act_nodes: raise ValidationError(self.first_line_no, self.node.col_offset, 'AAA01 no Act block found in test') # Allow `pytest.raises` and `self.assertRaises()` in assert nodes - if # any of the additional nodes are `pytest.raises`, then raise for a_n in act_nodes[1:]: if a_n.block_type in [ActNodeType.marked_act, ActNodeType.result_assignment]: raise ValidationError( self.first_line_no, self.node.col_offset, 'AAA02 multiple Act blocks found in test', ) return act_nodes[0]
[ "def", "load_act_node", "(", "self", ")", "->", "ActNode", ":", "act_nodes", "=", "ActNode", ".", "build_body", "(", "self", ".", "node", ".", "body", ")", "if", "not", "act_nodes", ":", "raise", "ValidationError", "(", "self", ".", "first_line_no", ",", "self", ".", "node", ".", "col_offset", ",", "'AAA01 no Act block found in test'", ")", "# Allow `pytest.raises` and `self.assertRaises()` in assert nodes - if", "# any of the additional nodes are `pytest.raises`, then raise", "for", "a_n", "in", "act_nodes", "[", "1", ":", "]", ":", "if", "a_n", ".", "block_type", "in", "[", "ActNodeType", ".", "marked_act", ",", "ActNodeType", ".", "result_assignment", "]", ":", "raise", "ValidationError", "(", "self", ".", "first_line_no", ",", "self", ".", "node", ".", "col_offset", ",", "'AAA02 multiple Act blocks found in test'", ",", ")", "return", "act_nodes", "[", "0", "]" ]
Raises: ValidationError: AAA01 when no act block is found and AAA02 when multiple act blocks are found.
[ "Raises", ":", "ValidationError", ":", "AAA01", "when", "no", "act", "block", "is", "found", "and", "AAA02", "when", "multiple", "act", "blocks", "are", "found", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/function.py#L101-L122
jamescooke/flake8-aaa
src/flake8_aaa/function.py
Function.get_line_relative_to_node
def get_line_relative_to_node(self, target_node: ast.AST, offset: int) -> str: """ Raises: IndexError: when ``offset`` takes the request out of bounds of this Function's lines. """ return self.lines[target_node.lineno - self.node.lineno + offset]
python
def get_line_relative_to_node(self, target_node: ast.AST, offset: int) -> str: """ Raises: IndexError: when ``offset`` takes the request out of bounds of this Function's lines. """ return self.lines[target_node.lineno - self.node.lineno + offset]
[ "def", "get_line_relative_to_node", "(", "self", ",", "target_node", ":", "ast", ".", "AST", ",", "offset", ":", "int", ")", "->", "str", ":", "return", "self", ".", "lines", "[", "target_node", ".", "lineno", "-", "self", ".", "node", ".", "lineno", "+", "offset", "]" ]
Raises: IndexError: when ``offset`` takes the request out of bounds of this Function's lines.
[ "Raises", ":", "IndexError", ":", "when", "offset", "takes", "the", "request", "out", "of", "bounds", "of", "this", "Function", "s", "lines", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/function.py#L124-L130
jamescooke/flake8-aaa
src/flake8_aaa/function.py
Function.mark_def
def mark_def(self) -> int: """ Marks up this Function's definition lines (including decorators) into the ``line_markers`` attribute. Returns: Number of lines found for the definition. Note: Does not spot the closing ``):`` of a function when it occurs on its own line. Note: Can not use ``helpers.build_footprint()`` because function nodes cover the whole function. In this case, just the def lines are wanted with any decorators. """ first_line = get_first_token(self.node).start[0] - self.first_line_no # Should usually be 0 try: end_token = get_last_token(self.node.args.args[-1]) except IndexError: # Fn has no args, so end of function is the fn def itself... end_token = get_first_token(self.node) last_line = end_token.end[0] - self.first_line_no self.line_markers.update((first_line, last_line), LineType.func_def) return last_line - first_line + 1
python
def mark_def(self) -> int: """ Marks up this Function's definition lines (including decorators) into the ``line_markers`` attribute. Returns: Number of lines found for the definition. Note: Does not spot the closing ``):`` of a function when it occurs on its own line. Note: Can not use ``helpers.build_footprint()`` because function nodes cover the whole function. In this case, just the def lines are wanted with any decorators. """ first_line = get_first_token(self.node).start[0] - self.first_line_no # Should usually be 0 try: end_token = get_last_token(self.node.args.args[-1]) except IndexError: # Fn has no args, so end of function is the fn def itself... end_token = get_first_token(self.node) last_line = end_token.end[0] - self.first_line_no self.line_markers.update((first_line, last_line), LineType.func_def) return last_line - first_line + 1
[ "def", "mark_def", "(", "self", ")", "->", "int", ":", "first_line", "=", "get_first_token", "(", "self", ".", "node", ")", ".", "start", "[", "0", "]", "-", "self", ".", "first_line_no", "# Should usually be 0", "try", ":", "end_token", "=", "get_last_token", "(", "self", ".", "node", ".", "args", ".", "args", "[", "-", "1", "]", ")", "except", "IndexError", ":", "# Fn has no args, so end of function is the fn def itself...", "end_token", "=", "get_first_token", "(", "self", ".", "node", ")", "last_line", "=", "end_token", ".", "end", "[", "0", "]", "-", "self", ".", "first_line_no", "self", ".", "line_markers", ".", "update", "(", "(", "first_line", ",", "last_line", ")", ",", "LineType", ".", "func_def", ")", "return", "last_line", "-", "first_line", "+", "1" ]
Marks up this Function's definition lines (including decorators) into the ``line_markers`` attribute. Returns: Number of lines found for the definition. Note: Does not spot the closing ``):`` of a function when it occurs on its own line. Note: Can not use ``helpers.build_footprint()`` because function nodes cover the whole function. In this case, just the def lines are wanted with any decorators.
[ "Marks", "up", "this", "Function", "s", "definition", "lines", "(", "including", "decorators", ")", "into", "the", "line_markers", "attribute", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/function.py#L132-L157
jamescooke/flake8-aaa
src/flake8_aaa/function.py
Function.mark_bl
def mark_bl(self) -> int: """ Mark unprocessed lines that have no content and no string nodes covering them as blank line BL. Returns: Number of blank lines found with no stringy parent node. """ counter = 0 stringy_lines = find_stringy_lines(self.node, self.first_line_no) for relative_line_number, line in enumerate(self.lines): if relative_line_number not in stringy_lines and line.strip() == '': counter += 1 self.line_markers[relative_line_number] = LineType.blank_line return counter
python
def mark_bl(self) -> int: """ Mark unprocessed lines that have no content and no string nodes covering them as blank line BL. Returns: Number of blank lines found with no stringy parent node. """ counter = 0 stringy_lines = find_stringy_lines(self.node, self.first_line_no) for relative_line_number, line in enumerate(self.lines): if relative_line_number not in stringy_lines and line.strip() == '': counter += 1 self.line_markers[relative_line_number] = LineType.blank_line return counter
[ "def", "mark_bl", "(", "self", ")", "->", "int", ":", "counter", "=", "0", "stringy_lines", "=", "find_stringy_lines", "(", "self", ".", "node", ",", "self", ".", "first_line_no", ")", "for", "relative_line_number", ",", "line", "in", "enumerate", "(", "self", ".", "lines", ")", ":", "if", "relative_line_number", "not", "in", "stringy_lines", "and", "line", ".", "strip", "(", ")", "==", "''", ":", "counter", "+=", "1", "self", ".", "line_markers", "[", "relative_line_number", "]", "=", "LineType", ".", "blank_line", "return", "counter" ]
Mark unprocessed lines that have no content and no string nodes covering them as blank line BL. Returns: Number of blank lines found with no stringy parent node.
[ "Mark", "unprocessed", "lines", "that", "have", "no", "content", "and", "no", "string", "nodes", "covering", "them", "as", "blank", "line", "BL", "." ]
train
https://github.com/jamescooke/flake8-aaa/blob/29938b96845fe32ced4358ba66af3b3be2a37794/src/flake8_aaa/function.py#L159-L174
davidblaisonneau-orange/foreman
foreman/itemHost.py
ItemHost.enhance
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'puppetclasses': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemPuppetClasses)}) self.update({'parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemParameter)}) self.update({'interfaces': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemInterface)}) self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemSmartClassParameter)})
python
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'puppetclasses': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemPuppetClasses)}) self.update({'parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemParameter)}) self.update({'interfaces': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemInterface)}) self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemSmartClassParameter)})
[ "def", "enhance", "(", "self", ")", ":", "self", ".", "update", "(", "{", "'puppetclasses'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemPuppetClasses", ")", "}", ")", "self", ".", "update", "(", "{", "'parameters'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemParameter", ")", "}", ")", "self", ".", "update", "(", "{", "'interfaces'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemInterface", ")", "}", ")", "self", ".", "update", "(", "{", "'smart_class_parameters'", ":", "SubDict", "(", "self", ".", "api", ",", "self", ".", "objName", ",", "self", ".", "payloadObj", ",", "self", ".", "key", ",", "SubItemSmartClassParameter", ")", "}", ")" ]
Function enhance Enhance the object with new item or enhanced items
[ "Function", "enhance", "Enhance", "the", "object", "with", "new", "item", "or", "enhanced", "items" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemHost.py#L39-L58
davidblaisonneau-orange/foreman
foreman/itemHost.py
ItemHost.getParamFromEnv
def getParamFromEnv(self, var, default=''): """ Function getParamFromEnv Search a parameter in the host environment @param var: the var name @param hostgroup: the hostgroup item linked to this host @param default: default value @return RETURN: the value """ if self.getParam(var): return self.getParam(var) if self.hostgroup: if self.hostgroup.getParam(var): return self.hostgroup.getParam(var) if self.domain.getParam('password'): return self.domain.getParam('password') else: return default
python
def getParamFromEnv(self, var, default=''): """ Function getParamFromEnv Search a parameter in the host environment @param var: the var name @param hostgroup: the hostgroup item linked to this host @param default: default value @return RETURN: the value """ if self.getParam(var): return self.getParam(var) if self.hostgroup: if self.hostgroup.getParam(var): return self.hostgroup.getParam(var) if self.domain.getParam('password'): return self.domain.getParam('password') else: return default
[ "def", "getParamFromEnv", "(", "self", ",", "var", ",", "default", "=", "''", ")", ":", "if", "self", ".", "getParam", "(", "var", ")", ":", "return", "self", ".", "getParam", "(", "var", ")", "if", "self", ".", "hostgroup", ":", "if", "self", ".", "hostgroup", ".", "getParam", "(", "var", ")", ":", "return", "self", ".", "hostgroup", ".", "getParam", "(", "var", ")", "if", "self", ".", "domain", ".", "getParam", "(", "'password'", ")", ":", "return", "self", ".", "domain", ".", "getParam", "(", "'password'", ")", "else", ":", "return", "default" ]
Function getParamFromEnv Search a parameter in the host environment @param var: the var name @param hostgroup: the hostgroup item linked to this host @param default: default value @return RETURN: the value
[ "Function", "getParamFromEnv", "Search", "a", "parameter", "in", "the", "host", "environment" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemHost.py#L114-L131
davidblaisonneau-orange/foreman
foreman/itemHost.py
ItemHost.getUserData
def getUserData(self, hostgroup, domain, defaultPwd='', defaultSshKey='', proxyHostname='', tplFolder='metadata/templates/'): """ Function getUserData Generate a userdata script for metadata server from Foreman API @param domain: the domain item linked to this host @param hostgroup: the hostgroup item linked to this host @param defaultPwd: the default password if no password is specified in the host>hostgroup>domain params @param defaultSshKey: the default ssh key if no password is specified in the host>hostgroup>domain params @param proxyHostname: hostname of the smartproxy @param tplFolder: the templates folder @return RETURN: the user data """ if 'user-data' in self.keys(): return self['user-data'] else: self.hostgroup = hostgroup self.domain = domain if proxyHostname == '': proxyHostname = 'foreman.' + domain['name'] password = self.getParamFromEnv('password', defaultPwd) sshauthkeys = self.getParamFromEnv('global_sshkey', defaultSshKey) with open(tplFolder+'puppet.conf', 'r') as puppet_file: p = MyTemplate(puppet_file.read()) content = p.substitute(foremanHostname=proxyHostname) enc_puppet_file = base64.b64encode(bytes(content, 'utf-8')) with open(tplFolder+'cloud-init.tpl', 'r') as content_file: s = MyTemplate(content_file.read()) if sshauthkeys: sshauthkeys = ' - '+sshauthkeys self.userdata = s.substitute( password=password, fqdn=self['name'], sshauthkeys=sshauthkeys, foremanurlbuilt="http://{}/unattended/built" .format(proxyHostname), puppet_conf_content=enc_puppet_file.decode('utf-8')) return self.userdata
python
def getUserData(self, hostgroup, domain, defaultPwd='', defaultSshKey='', proxyHostname='', tplFolder='metadata/templates/'): """ Function getUserData Generate a userdata script for metadata server from Foreman API @param domain: the domain item linked to this host @param hostgroup: the hostgroup item linked to this host @param defaultPwd: the default password if no password is specified in the host>hostgroup>domain params @param defaultSshKey: the default ssh key if no password is specified in the host>hostgroup>domain params @param proxyHostname: hostname of the smartproxy @param tplFolder: the templates folder @return RETURN: the user data """ if 'user-data' in self.keys(): return self['user-data'] else: self.hostgroup = hostgroup self.domain = domain if proxyHostname == '': proxyHostname = 'foreman.' + domain['name'] password = self.getParamFromEnv('password', defaultPwd) sshauthkeys = self.getParamFromEnv('global_sshkey', defaultSshKey) with open(tplFolder+'puppet.conf', 'r') as puppet_file: p = MyTemplate(puppet_file.read()) content = p.substitute(foremanHostname=proxyHostname) enc_puppet_file = base64.b64encode(bytes(content, 'utf-8')) with open(tplFolder+'cloud-init.tpl', 'r') as content_file: s = MyTemplate(content_file.read()) if sshauthkeys: sshauthkeys = ' - '+sshauthkeys self.userdata = s.substitute( password=password, fqdn=self['name'], sshauthkeys=sshauthkeys, foremanurlbuilt="http://{}/unattended/built" .format(proxyHostname), puppet_conf_content=enc_puppet_file.decode('utf-8')) return self.userdata
[ "def", "getUserData", "(", "self", ",", "hostgroup", ",", "domain", ",", "defaultPwd", "=", "''", ",", "defaultSshKey", "=", "''", ",", "proxyHostname", "=", "''", ",", "tplFolder", "=", "'metadata/templates/'", ")", ":", "if", "'user-data'", "in", "self", ".", "keys", "(", ")", ":", "return", "self", "[", "'user-data'", "]", "else", ":", "self", ".", "hostgroup", "=", "hostgroup", "self", ".", "domain", "=", "domain", "if", "proxyHostname", "==", "''", ":", "proxyHostname", "=", "'foreman.'", "+", "domain", "[", "'name'", "]", "password", "=", "self", ".", "getParamFromEnv", "(", "'password'", ",", "defaultPwd", ")", "sshauthkeys", "=", "self", ".", "getParamFromEnv", "(", "'global_sshkey'", ",", "defaultSshKey", ")", "with", "open", "(", "tplFolder", "+", "'puppet.conf'", ",", "'r'", ")", "as", "puppet_file", ":", "p", "=", "MyTemplate", "(", "puppet_file", ".", "read", "(", ")", ")", "content", "=", "p", ".", "substitute", "(", "foremanHostname", "=", "proxyHostname", ")", "enc_puppet_file", "=", "base64", ".", "b64encode", "(", "bytes", "(", "content", ",", "'utf-8'", ")", ")", "with", "open", "(", "tplFolder", "+", "'cloud-init.tpl'", ",", "'r'", ")", "as", "content_file", ":", "s", "=", "MyTemplate", "(", "content_file", ".", "read", "(", ")", ")", "if", "sshauthkeys", ":", "sshauthkeys", "=", "' - '", "+", "sshauthkeys", "self", ".", "userdata", "=", "s", ".", "substitute", "(", "password", "=", "password", ",", "fqdn", "=", "self", "[", "'name'", "]", ",", "sshauthkeys", "=", "sshauthkeys", ",", "foremanurlbuilt", "=", "\"http://{}/unattended/built\"", ".", "format", "(", "proxyHostname", ")", ",", "puppet_conf_content", "=", "enc_puppet_file", ".", "decode", "(", "'utf-8'", ")", ")", "return", "self", ".", "userdata" ]
Function getUserData Generate a userdata script for metadata server from Foreman API @param domain: the domain item linked to this host @param hostgroup: the hostgroup item linked to this host @param defaultPwd: the default password if no password is specified in the host>hostgroup>domain params @param defaultSshKey: the default ssh key if no password is specified in the host>hostgroup>domain params @param proxyHostname: hostname of the smartproxy @param tplFolder: the templates folder @return RETURN: the user data
[ "Function", "getUserData", "Generate", "a", "userdata", "script", "for", "metadata", "server", "from", "Foreman", "API" ]
train
https://github.com/davidblaisonneau-orange/foreman/blob/acb8fd8d74657cfac3b25c82e9c6028b93eb6c92/foreman/itemHost.py#L133-L177
MatterMiners/cobald
cobald/daemon/runners/meta_runner.py
MetaRunner.register_payload
def register_payload(self, *payloads, flavour: ModuleType): """Queue one or more payload for execution after its runner is started""" for payload in payloads: self._logger.debug('registering payload %s (%s)', NameRepr(payload), NameRepr(flavour)) self.runners[flavour].register_payload(payload)
python
def register_payload(self, *payloads, flavour: ModuleType): """Queue one or more payload for execution after its runner is started""" for payload in payloads: self._logger.debug('registering payload %s (%s)', NameRepr(payload), NameRepr(flavour)) self.runners[flavour].register_payload(payload)
[ "def", "register_payload", "(", "self", ",", "*", "payloads", ",", "flavour", ":", "ModuleType", ")", ":", "for", "payload", "in", "payloads", ":", "self", ".", "_logger", ".", "debug", "(", "'registering payload %s (%s)'", ",", "NameRepr", "(", "payload", ")", ",", "NameRepr", "(", "flavour", ")", ")", "self", ".", "runners", "[", "flavour", "]", ".", "register_payload", "(", "payload", ")" ]
Queue one or more payload for execution after its runner is started
[ "Queue", "one", "or", "more", "payload", "for", "execution", "after", "its", "runner", "is", "started" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/meta_runner.py#L35-L39
MatterMiners/cobald
cobald/daemon/runners/meta_runner.py
MetaRunner.run_payload
def run_payload(self, payload, *, flavour: ModuleType): """Execute one payload after its runner is started and return its output""" return self.runners[flavour].run_payload(payload)
python
def run_payload(self, payload, *, flavour: ModuleType): """Execute one payload after its runner is started and return its output""" return self.runners[flavour].run_payload(payload)
[ "def", "run_payload", "(", "self", ",", "payload", ",", "*", ",", "flavour", ":", "ModuleType", ")", ":", "return", "self", ".", "runners", "[", "flavour", "]", ".", "run_payload", "(", "payload", ")" ]
Execute one payload after its runner is started and return its output
[ "Execute", "one", "payload", "after", "its", "runner", "is", "started", "and", "return", "its", "output" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/meta_runner.py#L41-L43
MatterMiners/cobald
cobald/daemon/runners/meta_runner.py
MetaRunner.run
def run(self): """Run all runners, blocking until completion or error""" self._logger.info('starting all runners') try: with self._lock: assert not self.running.set(), 'cannot re-run: %s' % self self.running.set() thread_runner = self.runners[threading] for runner in self.runners.values(): if runner is not thread_runner: thread_runner.register_payload(runner.run) if threading.current_thread() == threading.main_thread(): asyncio_main_run(root_runner=thread_runner) else: thread_runner.run() except Exception as err: self._logger.exception('runner terminated: %s', err) raise RuntimeError from err finally: self._stop_runners() self._logger.info('stopped all runners') self.running.clear()
python
def run(self): """Run all runners, blocking until completion or error""" self._logger.info('starting all runners') try: with self._lock: assert not self.running.set(), 'cannot re-run: %s' % self self.running.set() thread_runner = self.runners[threading] for runner in self.runners.values(): if runner is not thread_runner: thread_runner.register_payload(runner.run) if threading.current_thread() == threading.main_thread(): asyncio_main_run(root_runner=thread_runner) else: thread_runner.run() except Exception as err: self._logger.exception('runner terminated: %s', err) raise RuntimeError from err finally: self._stop_runners() self._logger.info('stopped all runners') self.running.clear()
[ "def", "run", "(", "self", ")", ":", "self", ".", "_logger", ".", "info", "(", "'starting all runners'", ")", "try", ":", "with", "self", ".", "_lock", ":", "assert", "not", "self", ".", "running", ".", "set", "(", ")", ",", "'cannot re-run: %s'", "%", "self", "self", ".", "running", ".", "set", "(", ")", "thread_runner", "=", "self", ".", "runners", "[", "threading", "]", "for", "runner", "in", "self", ".", "runners", ".", "values", "(", ")", ":", "if", "runner", "is", "not", "thread_runner", ":", "thread_runner", ".", "register_payload", "(", "runner", ".", "run", ")", "if", "threading", ".", "current_thread", "(", ")", "==", "threading", ".", "main_thread", "(", ")", ":", "asyncio_main_run", "(", "root_runner", "=", "thread_runner", ")", "else", ":", "thread_runner", ".", "run", "(", ")", "except", "Exception", "as", "err", ":", "self", ".", "_logger", ".", "exception", "(", "'runner terminated: %s'", ",", "err", ")", "raise", "RuntimeError", "from", "err", "finally", ":", "self", ".", "_stop_runners", "(", ")", "self", ".", "_logger", ".", "info", "(", "'stopped all runners'", ")", "self", ".", "running", ".", "clear", "(", ")" ]
Run all runners, blocking until completion or error
[ "Run", "all", "runners", "blocking", "until", "completion", "or", "error" ]
train
https://github.com/MatterMiners/cobald/blob/264138de4382d1c9b53fabcbc6660e10b33a914d/cobald/daemon/runners/meta_runner.py#L45-L66
developersociety/django-glitter
glitter/blocks/image/admin.py
ImageBlockAdmin.formfield_for_dbfield
def formfield_for_dbfield(self, db_field, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ formfield = super().formfield_for_dbfield(db_field, **kwargs) if db_field.name == 'image': formfield.widget = ImageRelatedFieldWidgetWrapper( ImageSelect(), db_field.rel, self.admin_site, can_add_related=True, can_change_related=True, ) return formfield
python
def formfield_for_dbfield(self, db_field, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ formfield = super().formfield_for_dbfield(db_field, **kwargs) if db_field.name == 'image': formfield.widget = ImageRelatedFieldWidgetWrapper( ImageSelect(), db_field.rel, self.admin_site, can_add_related=True, can_change_related=True, ) return formfield
[ "def", "formfield_for_dbfield", "(", "self", ",", "db_field", ",", "*", "*", "kwargs", ")", ":", "formfield", "=", "super", "(", ")", ".", "formfield_for_dbfield", "(", "db_field", ",", "*", "*", "kwargs", ")", "if", "db_field", ".", "name", "==", "'image'", ":", "formfield", ".", "widget", "=", "ImageRelatedFieldWidgetWrapper", "(", "ImageSelect", "(", ")", ",", "db_field", ".", "rel", ",", "self", ".", "admin_site", ",", "can_add_related", "=", "True", ",", "can_change_related", "=", "True", ",", ")", "return", "formfield" ]
Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor.
[ "Hook", "for", "specifying", "the", "form", "Field", "instance", "for", "a", "given", "database", "Field", "instance", "." ]
train
https://github.com/developersociety/django-glitter/blob/2c0280ec83afee80deee94ee3934fc54239c2e87/glitter/blocks/image/admin.py#L21-L34
beregond/jsonmodels
jsonmodels/utilities.py
compare_schemas
def compare_schemas(one, two): """Compare two structures that represents JSON schemas. For comparison you can't use normal comparison, because in JSON schema lists DO NOT keep order (and Python lists do), so this must be taken into account during comparison. Note this wont check all configurations, only first one that seems to match, which can lead to wrong results. :param one: First schema to compare. :param two: Second schema to compare. :rtype: `bool` """ one = _normalize_string_type(one) two = _normalize_string_type(two) _assert_same_types(one, two) if isinstance(one, list): return _compare_lists(one, two) elif isinstance(one, dict): return _compare_dicts(one, two) elif isinstance(one, SCALAR_TYPES): return one == two elif one is None: return one is two else: raise RuntimeError('Not allowed type "{type}"'.format( type=type(one).__name__))
python
def compare_schemas(one, two): """Compare two structures that represents JSON schemas. For comparison you can't use normal comparison, because in JSON schema lists DO NOT keep order (and Python lists do), so this must be taken into account during comparison. Note this wont check all configurations, only first one that seems to match, which can lead to wrong results. :param one: First schema to compare. :param two: Second schema to compare. :rtype: `bool` """ one = _normalize_string_type(one) two = _normalize_string_type(two) _assert_same_types(one, two) if isinstance(one, list): return _compare_lists(one, two) elif isinstance(one, dict): return _compare_dicts(one, two) elif isinstance(one, SCALAR_TYPES): return one == two elif one is None: return one is two else: raise RuntimeError('Not allowed type "{type}"'.format( type=type(one).__name__))
[ "def", "compare_schemas", "(", "one", ",", "two", ")", ":", "one", "=", "_normalize_string_type", "(", "one", ")", "two", "=", "_normalize_string_type", "(", "two", ")", "_assert_same_types", "(", "one", ",", "two", ")", "if", "isinstance", "(", "one", ",", "list", ")", ":", "return", "_compare_lists", "(", "one", ",", "two", ")", "elif", "isinstance", "(", "one", ",", "dict", ")", ":", "return", "_compare_dicts", "(", "one", ",", "two", ")", "elif", "isinstance", "(", "one", ",", "SCALAR_TYPES", ")", ":", "return", "one", "==", "two", "elif", "one", "is", "None", ":", "return", "one", "is", "two", "else", ":", "raise", "RuntimeError", "(", "'Not allowed type \"{type}\"'", ".", "format", "(", "type", "=", "type", "(", "one", ")", ".", "__name__", ")", ")" ]
Compare two structures that represents JSON schemas. For comparison you can't use normal comparison, because in JSON schema lists DO NOT keep order (and Python lists do), so this must be taken into account during comparison. Note this wont check all configurations, only first one that seems to match, which can lead to wrong results. :param one: First schema to compare. :param two: Second schema to compare. :rtype: `bool`
[ "Compare", "two", "structures", "that", "represents", "JSON", "schemas", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/utilities.py#L60-L90
beregond/jsonmodels
jsonmodels/utilities.py
is_ecma_regex
def is_ecma_regex(regex): """Check if given regex is of type ECMA 262 or not. :rtype: bool """ parts = regex.split('/') if len(parts) == 1: return False if len(parts) < 3: raise ValueError('Given regex isn\'t ECMA regex nor Python regex.') parts.pop() parts.append('') raw_regex = '/'.join(parts) if raw_regex.startswith('/') and raw_regex.endswith('/'): return True return False
python
def is_ecma_regex(regex): """Check if given regex is of type ECMA 262 or not. :rtype: bool """ parts = regex.split('/') if len(parts) == 1: return False if len(parts) < 3: raise ValueError('Given regex isn\'t ECMA regex nor Python regex.') parts.pop() parts.append('') raw_regex = '/'.join(parts) if raw_regex.startswith('/') and raw_regex.endswith('/'): return True return False
[ "def", "is_ecma_regex", "(", "regex", ")", ":", "parts", "=", "regex", ".", "split", "(", "'/'", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "return", "False", "if", "len", "(", "parts", ")", "<", "3", ":", "raise", "ValueError", "(", "'Given regex isn\\'t ECMA regex nor Python regex.'", ")", "parts", ".", "pop", "(", ")", "parts", ".", "append", "(", "''", ")", "raw_regex", "=", "'/'", ".", "join", "(", "parts", ")", "if", "raw_regex", ".", "startswith", "(", "'/'", ")", "and", "raw_regex", ".", "endswith", "(", "'/'", ")", ":", "return", "True", "return", "False" ]
Check if given regex is of type ECMA 262 or not. :rtype: bool
[ "Check", "if", "given", "regex", "is", "of", "type", "ECMA", "262", "or", "not", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/utilities.py#L93-L112
beregond/jsonmodels
jsonmodels/utilities.py
convert_ecma_regex_to_python
def convert_ecma_regex_to_python(value): """Convert ECMA 262 regex to Python tuple with regex and flags. If given value is already Python regex it will be returned unchanged. :param string value: ECMA regex. :return: 2-tuple with `regex` and `flags` :rtype: namedtuple """ if not is_ecma_regex(value): return PythonRegex(value, []) parts = value.split('/') flags = parts.pop() try: result_flags = [ECMA_TO_PYTHON_FLAGS[f] for f in flags] except KeyError: raise ValueError('Wrong flags "{}".'.format(flags)) return PythonRegex('/'.join(parts[1:]), result_flags)
python
def convert_ecma_regex_to_python(value): """Convert ECMA 262 regex to Python tuple with regex and flags. If given value is already Python regex it will be returned unchanged. :param string value: ECMA regex. :return: 2-tuple with `regex` and `flags` :rtype: namedtuple """ if not is_ecma_regex(value): return PythonRegex(value, []) parts = value.split('/') flags = parts.pop() try: result_flags = [ECMA_TO_PYTHON_FLAGS[f] for f in flags] except KeyError: raise ValueError('Wrong flags "{}".'.format(flags)) return PythonRegex('/'.join(parts[1:]), result_flags)
[ "def", "convert_ecma_regex_to_python", "(", "value", ")", ":", "if", "not", "is_ecma_regex", "(", "value", ")", ":", "return", "PythonRegex", "(", "value", ",", "[", "]", ")", "parts", "=", "value", ".", "split", "(", "'/'", ")", "flags", "=", "parts", ".", "pop", "(", ")", "try", ":", "result_flags", "=", "[", "ECMA_TO_PYTHON_FLAGS", "[", "f", "]", "for", "f", "in", "flags", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Wrong flags \"{}\".'", ".", "format", "(", "flags", ")", ")", "return", "PythonRegex", "(", "'/'", ".", "join", "(", "parts", "[", "1", ":", "]", ")", ",", "result_flags", ")" ]
Convert ECMA 262 regex to Python tuple with regex and flags. If given value is already Python regex it will be returned unchanged. :param string value: ECMA regex. :return: 2-tuple with `regex` and `flags` :rtype: namedtuple
[ "Convert", "ECMA", "262", "regex", "to", "Python", "tuple", "with", "regex", "and", "flags", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/utilities.py#L115-L136
beregond/jsonmodels
jsonmodels/utilities.py
convert_python_regex_to_ecma
def convert_python_regex_to_ecma(value, flags=[]): """Convert Python regex to ECMA 262 regex. If given value is already ECMA regex it will be returned unchanged. :param string value: Python regex. :param list flags: List of flags (allowed flags: `re.I`, `re.M`) :return: ECMA 262 regex :rtype: str """ if is_ecma_regex(value): return value result_flags = [PYTHON_TO_ECMA_FLAGS[f] for f in flags] result_flags = ''.join(result_flags) return '/{value}/{flags}'.format(value=value, flags=result_flags)
python
def convert_python_regex_to_ecma(value, flags=[]): """Convert Python regex to ECMA 262 regex. If given value is already ECMA regex it will be returned unchanged. :param string value: Python regex. :param list flags: List of flags (allowed flags: `re.I`, `re.M`) :return: ECMA 262 regex :rtype: str """ if is_ecma_regex(value): return value result_flags = [PYTHON_TO_ECMA_FLAGS[f] for f in flags] result_flags = ''.join(result_flags) return '/{value}/{flags}'.format(value=value, flags=result_flags)
[ "def", "convert_python_regex_to_ecma", "(", "value", ",", "flags", "=", "[", "]", ")", ":", "if", "is_ecma_regex", "(", "value", ")", ":", "return", "value", "result_flags", "=", "[", "PYTHON_TO_ECMA_FLAGS", "[", "f", "]", "for", "f", "in", "flags", "]", "result_flags", "=", "''", ".", "join", "(", "result_flags", ")", "return", "'/{value}/{flags}'", ".", "format", "(", "value", "=", "value", ",", "flags", "=", "result_flags", ")" ]
Convert Python regex to ECMA 262 regex. If given value is already ECMA regex it will be returned unchanged. :param string value: Python regex. :param list flags: List of flags (allowed flags: `re.I`, `re.M`) :return: ECMA 262 regex :rtype: str
[ "Convert", "Python", "regex", "to", "ECMA", "262", "regex", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/utilities.py#L139-L156
beregond/jsonmodels
jsonmodels/models.py
Base.populate
def populate(self, **values): """Populate values to fields. Skip non-existing.""" values = values.copy() fields = list(self.iterate_with_name()) for _, structure_name, field in fields: if structure_name in values: field.__set__(self, values.pop(structure_name)) for name, _, field in fields: if name in values: field.__set__(self, values.pop(name))
python
def populate(self, **values): """Populate values to fields. Skip non-existing.""" values = values.copy() fields = list(self.iterate_with_name()) for _, structure_name, field in fields: if structure_name in values: field.__set__(self, values.pop(structure_name)) for name, _, field in fields: if name in values: field.__set__(self, values.pop(name))
[ "def", "populate", "(", "self", ",", "*", "*", "values", ")", ":", "values", "=", "values", ".", "copy", "(", ")", "fields", "=", "list", "(", "self", ".", "iterate_with_name", "(", ")", ")", "for", "_", ",", "structure_name", ",", "field", "in", "fields", ":", "if", "structure_name", "in", "values", ":", "field", ".", "__set__", "(", "self", ",", "values", ".", "pop", "(", "structure_name", ")", ")", "for", "name", ",", "_", ",", "field", "in", "fields", ":", "if", "name", "in", "values", ":", "field", ".", "__set__", "(", "self", ",", "values", ".", "pop", "(", "name", ")", ")" ]
Populate values to fields. Skip non-existing.
[ "Populate", "values", "to", "fields", ".", "Skip", "non", "-", "existing", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/models.py#L36-L45
beregond/jsonmodels
jsonmodels/models.py
Base.get_field
def get_field(self, field_name): """Get field associated with given attribute.""" for attr_name, field in self: if field_name == attr_name: return field raise errors.FieldNotFound('Field not found', field_name)
python
def get_field(self, field_name): """Get field associated with given attribute.""" for attr_name, field in self: if field_name == attr_name: return field raise errors.FieldNotFound('Field not found', field_name)
[ "def", "get_field", "(", "self", ",", "field_name", ")", ":", "for", "attr_name", ",", "field", "in", "self", ":", "if", "field_name", "==", "attr_name", ":", "return", "field", "raise", "errors", ".", "FieldNotFound", "(", "'Field not found'", ",", "field_name", ")" ]
Get field associated with given attribute.
[ "Get", "field", "associated", "with", "given", "attribute", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/models.py#L47-L53
beregond/jsonmodels
jsonmodels/models.py
Base.validate
def validate(self): """Explicitly validate all the fields.""" for name, field in self: try: field.validate_for_object(self) except ValidationError as error: raise ValidationError( "Error for field '{name}'.".format(name=name), error, )
python
def validate(self): """Explicitly validate all the fields.""" for name, field in self: try: field.validate_for_object(self) except ValidationError as error: raise ValidationError( "Error for field '{name}'.".format(name=name), error, )
[ "def", "validate", "(", "self", ")", ":", "for", "name", ",", "field", "in", "self", ":", "try", ":", "field", ".", "validate_for_object", "(", "self", ")", "except", "ValidationError", "as", "error", ":", "raise", "ValidationError", "(", "\"Error for field '{name}'.\"", ".", "format", "(", "name", "=", "name", ")", ",", "error", ",", ")" ]
Explicitly validate all the fields.
[ "Explicitly", "validate", "all", "the", "fields", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/models.py#L60-L69
beregond/jsonmodels
jsonmodels/models.py
Base.iterate_over_fields
def iterate_over_fields(cls): """Iterate through fields as `(attribute_name, field_instance)`.""" for attr in dir(cls): clsattr = getattr(cls, attr) if isinstance(clsattr, BaseField): yield attr, clsattr
python
def iterate_over_fields(cls): """Iterate through fields as `(attribute_name, field_instance)`.""" for attr in dir(cls): clsattr = getattr(cls, attr) if isinstance(clsattr, BaseField): yield attr, clsattr
[ "def", "iterate_over_fields", "(", "cls", ")", ":", "for", "attr", "in", "dir", "(", "cls", ")", ":", "clsattr", "=", "getattr", "(", "cls", ",", "attr", ")", "if", "isinstance", "(", "clsattr", ",", "BaseField", ")", ":", "yield", "attr", ",", "clsattr" ]
Iterate through fields as `(attribute_name, field_instance)`.
[ "Iterate", "through", "fields", "as", "(", "attribute_name", "field_instance", ")", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/models.py#L72-L77
beregond/jsonmodels
jsonmodels/models.py
Base.iterate_with_name
def iterate_with_name(cls): """Iterate over fields, but also give `structure_name`. Format is `(attribute_name, structue_name, field_instance)`. Structure name is name under which value is seen in structure and schema (in primitives) and only there. """ for attr_name, field in cls.iterate_over_fields(): structure_name = field.structue_name(attr_name) yield attr_name, structure_name, field
python
def iterate_with_name(cls): """Iterate over fields, but also give `structure_name`. Format is `(attribute_name, structue_name, field_instance)`. Structure name is name under which value is seen in structure and schema (in primitives) and only there. """ for attr_name, field in cls.iterate_over_fields(): structure_name = field.structue_name(attr_name) yield attr_name, structure_name, field
[ "def", "iterate_with_name", "(", "cls", ")", ":", "for", "attr_name", ",", "field", "in", "cls", ".", "iterate_over_fields", "(", ")", ":", "structure_name", "=", "field", ".", "structue_name", "(", "attr_name", ")", "yield", "attr_name", ",", "structure_name", ",", "field" ]
Iterate over fields, but also give `structure_name`. Format is `(attribute_name, structue_name, field_instance)`. Structure name is name under which value is seen in structure and schema (in primitives) and only there.
[ "Iterate", "over", "fields", "but", "also", "give", "structure_name", "." ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/models.py#L80-L89
beregond/jsonmodels
jsonmodels/fields.py
IntField.parse_value
def parse_value(self, value): """Cast value to `int`, e.g. from string or long""" parsed = super(IntField, self).parse_value(value) if parsed is None: return parsed return int(parsed)
python
def parse_value(self, value): """Cast value to `int`, e.g. from string or long""" parsed = super(IntField, self).parse_value(value) if parsed is None: return parsed return int(parsed)
[ "def", "parse_value", "(", "self", ",", "value", ")", ":", "parsed", "=", "super", "(", "IntField", ",", "self", ")", ".", "parse_value", "(", "value", ")", "if", "parsed", "is", "None", ":", "return", "parsed", "return", "int", "(", "parsed", ")" ]
Cast value to `int`, e.g. from string or long
[ "Cast", "value", "to", "int", "e", ".", "g", ".", "from", "string", "or", "long" ]
train
https://github.com/beregond/jsonmodels/blob/97a1a6b90a49490fc5a6078f49027055d2e13541/jsonmodels/fields.py#L157-L162