repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
peterbe/gg
gg/builtins/bugzilla.py
login
def login(config, api_key=""): """Store your Bugzilla API Key""" if not api_key: info_out( "If you don't have an API Key, go to:\n" "https://bugzilla.mozilla.org/userprefs.cgi?tab=apikey\n" ) api_key = getpass.getpass("API Key: ") # Before we store it, let's test it. url = urllib.parse.urljoin(config.bugzilla_url, "/rest/whoami") assert url.startswith("https://"), url response = requests.get(url, params={"api_key": api_key}) if response.status_code == 200: if response.json().get("error"): error_out("Failed - {}".format(response.json())) else: update( config.configfile, { "BUGZILLA": { "bugzilla_url": config.bugzilla_url, "api_key": api_key, # "login": login, } }, ) success_out("Yay! It worked!") else: error_out("Failed - {} ({})".format(response.status_code, response.json()))
python
def login(config, api_key=""): """Store your Bugzilla API Key""" if not api_key: info_out( "If you don't have an API Key, go to:\n" "https://bugzilla.mozilla.org/userprefs.cgi?tab=apikey\n" ) api_key = getpass.getpass("API Key: ") # Before we store it, let's test it. url = urllib.parse.urljoin(config.bugzilla_url, "/rest/whoami") assert url.startswith("https://"), url response = requests.get(url, params={"api_key": api_key}) if response.status_code == 200: if response.json().get("error"): error_out("Failed - {}".format(response.json())) else: update( config.configfile, { "BUGZILLA": { "bugzilla_url": config.bugzilla_url, "api_key": api_key, # "login": login, } }, ) success_out("Yay! It worked!") else: error_out("Failed - {} ({})".format(response.status_code, response.json()))
[ "def", "login", "(", "config", ",", "api_key", "=", "\"\"", ")", ":", "if", "not", "api_key", ":", "info_out", "(", "\"If you don't have an API Key, go to:\\n\"", "\"https://bugzilla.mozilla.org/userprefs.cgi?tab=apikey\\n\"", ")", "api_key", "=", "getpass", ".", "getpass", "(", "\"API Key: \"", ")", "# Before we store it, let's test it.", "url", "=", "urllib", ".", "parse", ".", "urljoin", "(", "config", ".", "bugzilla_url", ",", "\"/rest/whoami\"", ")", "assert", "url", ".", "startswith", "(", "\"https://\"", ")", ",", "url", "response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "{", "\"api_key\"", ":", "api_key", "}", ")", "if", "response", ".", "status_code", "==", "200", ":", "if", "response", ".", "json", "(", ")", ".", "get", "(", "\"error\"", ")", ":", "error_out", "(", "\"Failed - {}\"", ".", "format", "(", "response", ".", "json", "(", ")", ")", ")", "else", ":", "update", "(", "config", ".", "configfile", ",", "{", "\"BUGZILLA\"", ":", "{", "\"bugzilla_url\"", ":", "config", ".", "bugzilla_url", ",", "\"api_key\"", ":", "api_key", ",", "# \"login\": login,", "}", "}", ",", ")", "success_out", "(", "\"Yay! It worked!\"", ")", "else", ":", "error_out", "(", "\"Failed - {} ({})\"", ".", "format", "(", "response", ".", "status_code", ",", "response", ".", "json", "(", ")", ")", ")" ]
Store your Bugzilla API Key
[ "Store", "your", "Bugzilla", "API", "Key" ]
2aace5bdb4a9b1cb65bea717784edf54c63b7bad
https://github.com/peterbe/gg/blob/2aace5bdb4a9b1cb65bea717784edf54c63b7bad/gg/builtins/bugzilla.py#L35-L64
train
peterbe/gg
gg/builtins/bugzilla.py
logout
def logout(config): """Remove and forget your Bugzilla credentials""" state = read(config.configfile) if state.get("BUGZILLA"): remove(config.configfile, "BUGZILLA") success_out("Forgotten") else: error_out("No stored Bugzilla credentials")
python
def logout(config): """Remove and forget your Bugzilla credentials""" state = read(config.configfile) if state.get("BUGZILLA"): remove(config.configfile, "BUGZILLA") success_out("Forgotten") else: error_out("No stored Bugzilla credentials")
[ "def", "logout", "(", "config", ")", ":", "state", "=", "read", "(", "config", ".", "configfile", ")", "if", "state", ".", "get", "(", "\"BUGZILLA\"", ")", ":", "remove", "(", "config", ".", "configfile", ",", "\"BUGZILLA\"", ")", "success_out", "(", "\"Forgotten\"", ")", "else", ":", "error_out", "(", "\"No stored Bugzilla credentials\"", ")" ]
Remove and forget your Bugzilla credentials
[ "Remove", "and", "forget", "your", "Bugzilla", "credentials" ]
2aace5bdb4a9b1cb65bea717784edf54c63b7bad
https://github.com/peterbe/gg/blob/2aace5bdb4a9b1cb65bea717784edf54c63b7bad/gg/builtins/bugzilla.py#L69-L76
train
flo-compbio/xlmhg
xlmhg/visualize.py
get_hypergeometric_stats
def get_hypergeometric_stats(N, indices): """Calculates hypergeom. p-values and fold enrichments for all cutoffs. Parameters ---------- N: int The length of the list indices: `numpy.ndarray` with ``dtype=np.uint16`` The (sorted) indices of the "1's" in the list. """ assert isinstance(N, (int, np.integer)) assert isinstance(indices, np.ndarray) and \ np.issubdtype(indices.dtype, np.uint16) K = indices.size pvals = np.empty(N+1, dtype=np.float64) folds = np.empty(N+1, dtype=np.float64) pvals[0] = 1.0 folds[0] = 1.0 n = 0 k = 0 p = 1.0 while n < N: if k < K and indices[k] == n: # "add one" # calculate f(k+1; N,K,n+1) from f(k; N,K,n) p *= (float((n+1) * (K-k)) / \ float((N-n) * (k+1))) k += 1 else: # "add zero" # calculate f(k; N,K,n+1) from f(k; N,K,n) p *= (float((n+1) * (N-K-n+k)) / float((N-n) * (n-k+1))) n += 1 # calculate hypergeometric p-value pvals[n] = get_hgp(p, k, N, K, n) # calculate fold enrichment folds[n] = k / (K*(n/float(N))) return pvals, folds
python
def get_hypergeometric_stats(N, indices): """Calculates hypergeom. p-values and fold enrichments for all cutoffs. Parameters ---------- N: int The length of the list indices: `numpy.ndarray` with ``dtype=np.uint16`` The (sorted) indices of the "1's" in the list. """ assert isinstance(N, (int, np.integer)) assert isinstance(indices, np.ndarray) and \ np.issubdtype(indices.dtype, np.uint16) K = indices.size pvals = np.empty(N+1, dtype=np.float64) folds = np.empty(N+1, dtype=np.float64) pvals[0] = 1.0 folds[0] = 1.0 n = 0 k = 0 p = 1.0 while n < N: if k < K and indices[k] == n: # "add one" # calculate f(k+1; N,K,n+1) from f(k; N,K,n) p *= (float((n+1) * (K-k)) / \ float((N-n) * (k+1))) k += 1 else: # "add zero" # calculate f(k; N,K,n+1) from f(k; N,K,n) p *= (float((n+1) * (N-K-n+k)) / float((N-n) * (n-k+1))) n += 1 # calculate hypergeometric p-value pvals[n] = get_hgp(p, k, N, K, n) # calculate fold enrichment folds[n] = k / (K*(n/float(N))) return pvals, folds
[ "def", "get_hypergeometric_stats", "(", "N", ",", "indices", ")", ":", "assert", "isinstance", "(", "N", ",", "(", "int", ",", "np", ".", "integer", ")", ")", "assert", "isinstance", "(", "indices", ",", "np", ".", "ndarray", ")", "and", "np", ".", "issubdtype", "(", "indices", ".", "dtype", ",", "np", ".", "uint16", ")", "K", "=", "indices", ".", "size", "pvals", "=", "np", ".", "empty", "(", "N", "+", "1", ",", "dtype", "=", "np", ".", "float64", ")", "folds", "=", "np", ".", "empty", "(", "N", "+", "1", ",", "dtype", "=", "np", ".", "float64", ")", "pvals", "[", "0", "]", "=", "1.0", "folds", "[", "0", "]", "=", "1.0", "n", "=", "0", "k", "=", "0", "p", "=", "1.0", "while", "n", "<", "N", ":", "if", "k", "<", "K", "and", "indices", "[", "k", "]", "==", "n", ":", "# \"add one\"", "# calculate f(k+1; N,K,n+1) from f(k; N,K,n)", "p", "*=", "(", "float", "(", "(", "n", "+", "1", ")", "*", "(", "K", "-", "k", ")", ")", "/", "float", "(", "(", "N", "-", "n", ")", "*", "(", "k", "+", "1", ")", ")", ")", "k", "+=", "1", "else", ":", "# \"add zero\"", "# calculate f(k; N,K,n+1) from f(k; N,K,n)", "p", "*=", "(", "float", "(", "(", "n", "+", "1", ")", "*", "(", "N", "-", "K", "-", "n", "+", "k", ")", ")", "/", "float", "(", "(", "N", "-", "n", ")", "*", "(", "n", "-", "k", "+", "1", ")", ")", ")", "n", "+=", "1", "# calculate hypergeometric p-value", "pvals", "[", "n", "]", "=", "get_hgp", "(", "p", ",", "k", ",", "N", ",", "K", ",", "n", ")", "# calculate fold enrichment", "folds", "[", "n", "]", "=", "k", "/", "(", "K", "*", "(", "n", "/", "float", "(", "N", ")", ")", ")", "return", "pvals", ",", "folds" ]
Calculates hypergeom. p-values and fold enrichments for all cutoffs. Parameters ---------- N: int The length of the list indices: `numpy.ndarray` with ``dtype=np.uint16`` The (sorted) indices of the "1's" in the list.
[ "Calculates", "hypergeom", ".", "p", "-", "values", "and", "fold", "enrichments", "for", "all", "cutoffs", "." ]
8e5929ee1dc91b95e343b7a2b1b1d6664c4540a1
https://github.com/flo-compbio/xlmhg/blob/8e5929ee1dc91b95e343b7a2b1b1d6664c4540a1/xlmhg/visualize.py#L35-L77
train
gofed/gofedlib
gofedlib/providers/upstreamprovider.py
UpstreamProvider.parse
def parse(self, prefix): """ Parse import path into provider, project, repository and other recognizable parts :param importpath: import path to parse :type importpath: str :return: bool """ # reset to default values self._prefix = "" url = re.sub(r'http://', '', prefix) url = re.sub(r'https://', '', url) # any prefix customization before parsing? custom_prefix = self.detectCustomImportPaths(url) if custom_prefix != {}: url = custom_prefix["provider_prefix"] info = self._parsePrefix(url) self._signature = info["signature"] self._prefix = info["prefix"] return self
python
def parse(self, prefix): """ Parse import path into provider, project, repository and other recognizable parts :param importpath: import path to parse :type importpath: str :return: bool """ # reset to default values self._prefix = "" url = re.sub(r'http://', '', prefix) url = re.sub(r'https://', '', url) # any prefix customization before parsing? custom_prefix = self.detectCustomImportPaths(url) if custom_prefix != {}: url = custom_prefix["provider_prefix"] info = self._parsePrefix(url) self._signature = info["signature"] self._prefix = info["prefix"] return self
[ "def", "parse", "(", "self", ",", "prefix", ")", ":", "# reset to default values", "self", ".", "_prefix", "=", "\"\"", "url", "=", "re", ".", "sub", "(", "r'http://'", ",", "''", ",", "prefix", ")", "url", "=", "re", ".", "sub", "(", "r'https://'", ",", "''", ",", "url", ")", "# any prefix customization before parsing?", "custom_prefix", "=", "self", ".", "detectCustomImportPaths", "(", "url", ")", "if", "custom_prefix", "!=", "{", "}", ":", "url", "=", "custom_prefix", "[", "\"provider_prefix\"", "]", "info", "=", "self", ".", "_parsePrefix", "(", "url", ")", "self", ".", "_signature", "=", "info", "[", "\"signature\"", "]", "self", ".", "_prefix", "=", "info", "[", "\"prefix\"", "]", "return", "self" ]
Parse import path into provider, project, repository and other recognizable parts :param importpath: import path to parse :type importpath: str :return: bool
[ "Parse", "import", "path", "into", "provider", "project", "repository", "and", "other", "recognizable", "parts" ]
0674c248fe3d8706f98f912996b65af469f96b10
https://github.com/gofed/gofedlib/blob/0674c248fe3d8706f98f912996b65af469f96b10/gofedlib/providers/upstreamprovider.py#L20-L44
train
gofed/gofedlib
gofedlib/providers/upstreamprovider.py
UpstreamProvider.detectKnownRepo
def detectKnownRepo(self, url): """ For given import path detect provider. """ if url.startswith('github.com'): return GITHUB if url.startswith('code.google.com/p'): return GOOGLECODE if url.startswith('golang.org/x'): return GOLANGORG if url.startswith('gopkg.in'): return GOPKG if url.startswith('bitbucket.org'): return BITBUCKET if url.startswith('google.golang.org'): return GOOGLEGOLANGORG return UNKNOWN
python
def detectKnownRepo(self, url): """ For given import path detect provider. """ if url.startswith('github.com'): return GITHUB if url.startswith('code.google.com/p'): return GOOGLECODE if url.startswith('golang.org/x'): return GOLANGORG if url.startswith('gopkg.in'): return GOPKG if url.startswith('bitbucket.org'): return BITBUCKET if url.startswith('google.golang.org'): return GOOGLEGOLANGORG return UNKNOWN
[ "def", "detectKnownRepo", "(", "self", ",", "url", ")", ":", "if", "url", ".", "startswith", "(", "'github.com'", ")", ":", "return", "GITHUB", "if", "url", ".", "startswith", "(", "'code.google.com/p'", ")", ":", "return", "GOOGLECODE", "if", "url", ".", "startswith", "(", "'golang.org/x'", ")", ":", "return", "GOLANGORG", "if", "url", ".", "startswith", "(", "'gopkg.in'", ")", ":", "return", "GOPKG", "if", "url", ".", "startswith", "(", "'bitbucket.org'", ")", ":", "return", "BITBUCKET", "if", "url", ".", "startswith", "(", "'google.golang.org'", ")", ":", "return", "GOOGLEGOLANGORG", "return", "UNKNOWN" ]
For given import path detect provider.
[ "For", "given", "import", "path", "detect", "provider", "." ]
0674c248fe3d8706f98f912996b65af469f96b10
https://github.com/gofed/gofedlib/blob/0674c248fe3d8706f98f912996b65af469f96b10/gofedlib/providers/upstreamprovider.py#L83-L100
train
unt-libraries/pyuntl
pyuntl/form_logic.py
get_qualifier_dict
def get_qualifier_dict(vocabularies, qualifier_vocab): """Get the qualifier dictionary based on the element's qualifier vocabulary. """ # Raise exception if the vocabulary can't be found. if vocabularies.get(qualifier_vocab, None) is None: raise UNTLFormException( 'Could not retrieve qualifier vocabulary "%s" for the form.' % (qualifier_vocab) ) else: # Return the sorted vocabulary. return vocabularies.get(qualifier_vocab)
python
def get_qualifier_dict(vocabularies, qualifier_vocab): """Get the qualifier dictionary based on the element's qualifier vocabulary. """ # Raise exception if the vocabulary can't be found. if vocabularies.get(qualifier_vocab, None) is None: raise UNTLFormException( 'Could not retrieve qualifier vocabulary "%s" for the form.' % (qualifier_vocab) ) else: # Return the sorted vocabulary. return vocabularies.get(qualifier_vocab)
[ "def", "get_qualifier_dict", "(", "vocabularies", ",", "qualifier_vocab", ")", ":", "# Raise exception if the vocabulary can't be found.", "if", "vocabularies", ".", "get", "(", "qualifier_vocab", ",", "None", ")", "is", "None", ":", "raise", "UNTLFormException", "(", "'Could not retrieve qualifier vocabulary \"%s\" for the form.'", "%", "(", "qualifier_vocab", ")", ")", "else", ":", "# Return the sorted vocabulary.", "return", "vocabularies", ".", "get", "(", "qualifier_vocab", ")" ]
Get the qualifier dictionary based on the element's qualifier vocabulary.
[ "Get", "the", "qualifier", "dictionary", "based", "on", "the", "element", "s", "qualifier", "vocabulary", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L30-L42
train
unt-libraries/pyuntl
pyuntl/form_logic.py
get_content_dict
def get_content_dict(vocabularies, content_vocab): """Get the content dictionary based on the element's content vocabulary. """ # Raise exception if the vocabulary can't be found. if vocabularies.get(content_vocab, None) is None: raise UNTLFormException( 'Could not retrieve content vocabulary "%s" for the form.' % (content_vocab) ) else: # Return the sorted vocabulary. return vocabularies.get(content_vocab)
python
def get_content_dict(vocabularies, content_vocab): """Get the content dictionary based on the element's content vocabulary. """ # Raise exception if the vocabulary can't be found. if vocabularies.get(content_vocab, None) is None: raise UNTLFormException( 'Could not retrieve content vocabulary "%s" for the form.' % (content_vocab) ) else: # Return the sorted vocabulary. return vocabularies.get(content_vocab)
[ "def", "get_content_dict", "(", "vocabularies", ",", "content_vocab", ")", ":", "# Raise exception if the vocabulary can't be found.", "if", "vocabularies", ".", "get", "(", "content_vocab", ",", "None", ")", "is", "None", ":", "raise", "UNTLFormException", "(", "'Could not retrieve content vocabulary \"%s\" for the form.'", "%", "(", "content_vocab", ")", ")", "else", ":", "# Return the sorted vocabulary.", "return", "vocabularies", ".", "get", "(", "content_vocab", ")" ]
Get the content dictionary based on the element's content vocabulary.
[ "Get", "the", "content", "dictionary", "based", "on", "the", "element", "s", "content", "vocabulary", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L45-L57
train
unt-libraries/pyuntl
pyuntl/form_logic.py
FormGroup.get_group_usage_link
def get_group_usage_link(self): """Get the usage link for the group element.""" first_element = self.group_list[0] usage_link = getattr(first_element.form, 'usage_link', None) return usage_link
python
def get_group_usage_link(self): """Get the usage link for the group element.""" first_element = self.group_list[0] usage_link = getattr(first_element.form, 'usage_link', None) return usage_link
[ "def", "get_group_usage_link", "(", "self", ")", ":", "first_element", "=", "self", ".", "group_list", "[", "0", "]", "usage_link", "=", "getattr", "(", "first_element", ".", "form", ",", "'usage_link'", ",", "None", ")", "return", "usage_link" ]
Get the usage link for the group element.
[ "Get", "the", "usage", "link", "for", "the", "group", "element", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L101-L105
train
unt-libraries/pyuntl
pyuntl/form_logic.py
FormGroup.get_adjustable_form
def get_adjustable_form(self, element_dispatch): """Create an adjustable form from an element dispatch table.""" adjustable_form = {} # Loop through the qualifiers to create the adjustable form. for key in element_dispatch.keys(): adjustable_form[key] = element_dispatch[key]() return adjustable_form
python
def get_adjustable_form(self, element_dispatch): """Create an adjustable form from an element dispatch table.""" adjustable_form = {} # Loop through the qualifiers to create the adjustable form. for key in element_dispatch.keys(): adjustable_form[key] = element_dispatch[key]() return adjustable_form
[ "def", "get_adjustable_form", "(", "self", ",", "element_dispatch", ")", ":", "adjustable_form", "=", "{", "}", "# Loop through the qualifiers to create the adjustable form.", "for", "key", "in", "element_dispatch", ".", "keys", "(", ")", ":", "adjustable_form", "[", "key", "]", "=", "element_dispatch", "[", "key", "]", "(", ")", "return", "adjustable_form" ]
Create an adjustable form from an element dispatch table.
[ "Create", "an", "adjustable", "form", "from", "an", "element", "dispatch", "table", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L107-L113
train
unt-libraries/pyuntl
pyuntl/form_logic.py
CoverageGroup.set_coverage_placeName
def set_coverage_placeName(self): """Determine the properties for the placeName coverage field.""" if (self.solr_response and self.solr_response != 'error' and self.solr_response.response != 'error'): location_list = self.solr_response.get_location_list_facet().facet_list else: location_list = [] form_dict = { 'view_type': 'prefill', 'value_json': json.dumps(location_list, ensure_ascii=False), 'value_py': location_list, } return form_dict
python
def set_coverage_placeName(self): """Determine the properties for the placeName coverage field.""" if (self.solr_response and self.solr_response != 'error' and self.solr_response.response != 'error'): location_list = self.solr_response.get_location_list_facet().facet_list else: location_list = [] form_dict = { 'view_type': 'prefill', 'value_json': json.dumps(location_list, ensure_ascii=False), 'value_py': location_list, } return form_dict
[ "def", "set_coverage_placeName", "(", "self", ")", ":", "if", "(", "self", ".", "solr_response", "and", "self", ".", "solr_response", "!=", "'error'", "and", "self", ".", "solr_response", ".", "response", "!=", "'error'", ")", ":", "location_list", "=", "self", ".", "solr_response", ".", "get_location_list_facet", "(", ")", ".", "facet_list", "else", ":", "location_list", "=", "[", "]", "form_dict", "=", "{", "'view_type'", ":", "'prefill'", ",", "'value_json'", ":", "json", ".", "dumps", "(", "location_list", ",", "ensure_ascii", "=", "False", ")", ",", "'value_py'", ":", "location_list", ",", "}", "return", "form_dict" ]
Determine the properties for the placeName coverage field.
[ "Determine", "the", "properties", "for", "the", "placeName", "coverage", "field", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L143-L156
train
unt-libraries/pyuntl
pyuntl/form_logic.py
Meta.get_meta_attributes
def get_meta_attributes(self, **kwargs): """Determine the form attributes for the meta field.""" superuser = kwargs.get('superuser', False) if (self.untl_object.qualifier == 'recordStatus' or self.untl_object.qualifier == 'system'): if superuser: self.editable = True self.repeatable = True else: self.editable = False self.view_type = 'qualified-input' elif self.untl_object.qualifier == 'hidden': self.label = 'Object Hidden' self.view_type = 'radio' else: self.editable = False self.view_type = 'qualified-input'
python
def get_meta_attributes(self, **kwargs): """Determine the form attributes for the meta field.""" superuser = kwargs.get('superuser', False) if (self.untl_object.qualifier == 'recordStatus' or self.untl_object.qualifier == 'system'): if superuser: self.editable = True self.repeatable = True else: self.editable = False self.view_type = 'qualified-input' elif self.untl_object.qualifier == 'hidden': self.label = 'Object Hidden' self.view_type = 'radio' else: self.editable = False self.view_type = 'qualified-input'
[ "def", "get_meta_attributes", "(", "self", ",", "*", "*", "kwargs", ")", ":", "superuser", "=", "kwargs", ".", "get", "(", "'superuser'", ",", "False", ")", "if", "(", "self", ".", "untl_object", ".", "qualifier", "==", "'recordStatus'", "or", "self", ".", "untl_object", ".", "qualifier", "==", "'system'", ")", ":", "if", "superuser", ":", "self", ".", "editable", "=", "True", "self", ".", "repeatable", "=", "True", "else", ":", "self", ".", "editable", "=", "False", "self", ".", "view_type", "=", "'qualified-input'", "elif", "self", ".", "untl_object", ".", "qualifier", "==", "'hidden'", ":", "self", ".", "label", "=", "'Object Hidden'", "self", ".", "view_type", "=", "'radio'", "else", ":", "self", ".", "editable", "=", "False", "self", ".", "view_type", "=", "'qualified-input'" ]
Determine the form attributes for the meta field.
[ "Determine", "the", "form", "attributes", "for", "the", "meta", "field", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L746-L762
train
zeldamods/evfl
evfl/dic.py
_bit_mismatch
def _bit_mismatch(int1: int, int2: int) -> int: """Returns the index of the first different bit or -1 if the values are the same.""" for i in range(max(int1.bit_length(), int2.bit_length())): if (int1 >> i) & 1 != (int2 >> i) & 1: return i return -1
python
def _bit_mismatch(int1: int, int2: int) -> int: """Returns the index of the first different bit or -1 if the values are the same.""" for i in range(max(int1.bit_length(), int2.bit_length())): if (int1 >> i) & 1 != (int2 >> i) & 1: return i return -1
[ "def", "_bit_mismatch", "(", "int1", ":", "int", ",", "int2", ":", "int", ")", "->", "int", ":", "for", "i", "in", "range", "(", "max", "(", "int1", ".", "bit_length", "(", ")", ",", "int2", ".", "bit_length", "(", ")", ")", ")", ":", "if", "(", "int1", ">>", "i", ")", "&", "1", "!=", "(", "int2", ">>", "i", ")", "&", "1", ":", "return", "i", "return", "-", "1" ]
Returns the index of the first different bit or -1 if the values are the same.
[ "Returns", "the", "index", "of", "the", "first", "different", "bit", "or", "-", "1", "if", "the", "values", "are", "the", "same", "." ]
208b39ab817de5bbef419cdae4606255695e83ac
https://github.com/zeldamods/evfl/blob/208b39ab817de5bbef419cdae4606255695e83ac/evfl/dic.py#L5-L10
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/flattenTrees.py
searchRootOfTree
def searchRootOfTree(reducibleChildren: Set[LNode], nodeFromTree: LNode): """ Walk tree of nodes to root :param reducibleChildren: nodes which are part of tree :param nodeFromTree: node where to start the search """ while True: out_e = nodeFromTree.east[0].outgoingEdges # node has no successors if not out_e: return nodeFromTree nextNode = out_e[0].dsts[0].parentNode if nextNode in reducibleChildren: # can reduce node, walk the tree to root nodeFromTree = nextNode else: # can not reduce, return last root of tree return nodeFromTree
python
def searchRootOfTree(reducibleChildren: Set[LNode], nodeFromTree: LNode): """ Walk tree of nodes to root :param reducibleChildren: nodes which are part of tree :param nodeFromTree: node where to start the search """ while True: out_e = nodeFromTree.east[0].outgoingEdges # node has no successors if not out_e: return nodeFromTree nextNode = out_e[0].dsts[0].parentNode if nextNode in reducibleChildren: # can reduce node, walk the tree to root nodeFromTree = nextNode else: # can not reduce, return last root of tree return nodeFromTree
[ "def", "searchRootOfTree", "(", "reducibleChildren", ":", "Set", "[", "LNode", "]", ",", "nodeFromTree", ":", "LNode", ")", ":", "while", "True", ":", "out_e", "=", "nodeFromTree", ".", "east", "[", "0", "]", ".", "outgoingEdges", "# node has no successors", "if", "not", "out_e", ":", "return", "nodeFromTree", "nextNode", "=", "out_e", "[", "0", "]", ".", "dsts", "[", "0", "]", ".", "parentNode", "if", "nextNode", "in", "reducibleChildren", ":", "# can reduce node, walk the tree to root", "nodeFromTree", "=", "nextNode", "else", ":", "# can not reduce, return last root of tree", "return", "nodeFromTree" ]
Walk tree of nodes to root :param reducibleChildren: nodes which are part of tree :param nodeFromTree: node where to start the search
[ "Walk", "tree", "of", "nodes", "to", "root" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/flattenTrees.py#L7-L27
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/flattenTrees.py
collectNodesInTree
def collectNodesInTree(treeRoot: LNode, reducibleChildren: Set[LNode]): """ Collect nodes which will be reduced and input nodes of tree for tree of nodes. :param treeRoot: root node of tree :param reducibleChildren: members of tree :return: Tuple[reducedNodes, inputEdges] where reducedNodes is List[LNode] and inputEdges is List[Tuple[LNode, LPort, LEdge]] """ # List[Tuple[LNode, LPort, LEdge]] inputEdges = [] # List[LNode] reducedNodes = [] # Set[LNode] reducedNodesSet = set() # An iterative process to print preorder traveral of tree # List[Typle[LNode, LPort, LEdge]] nodeStack = [] nodeStack.append((treeRoot, None, None)) # collect nodes in tree and input edges while nodeStack: # pop the node from stack and try to find it's children node, p, e = nodeStack.pop() if node in reducibleChildren and node not in reducedNodesSet: reducedNodes.append(node) reducedNodesSet.add(node) # walk inputs and add child nodes to stack for _p in node.west: for _e in _p.iterEdges(): # assert len(e.srcs) == 1 and len(e.dsts) == 1 nodeStack.append((_e.srcs[0].parentNode, _p, _e)) else: inputEdges.append((node, p, e)) return reducedNodes, inputEdges
python
def collectNodesInTree(treeRoot: LNode, reducibleChildren: Set[LNode]): """ Collect nodes which will be reduced and input nodes of tree for tree of nodes. :param treeRoot: root node of tree :param reducibleChildren: members of tree :return: Tuple[reducedNodes, inputEdges] where reducedNodes is List[LNode] and inputEdges is List[Tuple[LNode, LPort, LEdge]] """ # List[Tuple[LNode, LPort, LEdge]] inputEdges = [] # List[LNode] reducedNodes = [] # Set[LNode] reducedNodesSet = set() # An iterative process to print preorder traveral of tree # List[Typle[LNode, LPort, LEdge]] nodeStack = [] nodeStack.append((treeRoot, None, None)) # collect nodes in tree and input edges while nodeStack: # pop the node from stack and try to find it's children node, p, e = nodeStack.pop() if node in reducibleChildren and node not in reducedNodesSet: reducedNodes.append(node) reducedNodesSet.add(node) # walk inputs and add child nodes to stack for _p in node.west: for _e in _p.iterEdges(): # assert len(e.srcs) == 1 and len(e.dsts) == 1 nodeStack.append((_e.srcs[0].parentNode, _p, _e)) else: inputEdges.append((node, p, e)) return reducedNodes, inputEdges
[ "def", "collectNodesInTree", "(", "treeRoot", ":", "LNode", ",", "reducibleChildren", ":", "Set", "[", "LNode", "]", ")", ":", "# List[Tuple[LNode, LPort, LEdge]]", "inputEdges", "=", "[", "]", "# List[LNode]", "reducedNodes", "=", "[", "]", "# Set[LNode]", "reducedNodesSet", "=", "set", "(", ")", "# An iterative process to print preorder traveral of tree", "# List[Typle[LNode, LPort, LEdge]]", "nodeStack", "=", "[", "]", "nodeStack", ".", "append", "(", "(", "treeRoot", ",", "None", ",", "None", ")", ")", "# collect nodes in tree and input edges", "while", "nodeStack", ":", "# pop the node from stack and try to find it's children", "node", ",", "p", ",", "e", "=", "nodeStack", ".", "pop", "(", ")", "if", "node", "in", "reducibleChildren", "and", "node", "not", "in", "reducedNodesSet", ":", "reducedNodes", ".", "append", "(", "node", ")", "reducedNodesSet", ".", "add", "(", "node", ")", "# walk inputs and add child nodes to stack", "for", "_p", "in", "node", ".", "west", ":", "for", "_e", "in", "_p", ".", "iterEdges", "(", ")", ":", "# assert len(e.srcs) == 1 and len(e.dsts) == 1", "nodeStack", ".", "append", "(", "(", "_e", ".", "srcs", "[", "0", "]", ".", "parentNode", ",", "_p", ",", "_e", ")", ")", "else", ":", "inputEdges", ".", "append", "(", "(", "node", ",", "p", ",", "e", ")", ")", "return", "reducedNodes", ",", "inputEdges" ]
Collect nodes which will be reduced and input nodes of tree for tree of nodes. :param treeRoot: root node of tree :param reducibleChildren: members of tree :return: Tuple[reducedNodes, inputEdges] where reducedNodes is List[LNode] and inputEdges is List[Tuple[LNode, LPort, LEdge]]
[ "Collect", "nodes", "which", "will", "be", "reduced", "and", "input", "nodes", "of", "tree", "for", "tree", "of", "nodes", "." ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/flattenTrees.py#L30-L65
train
pszafer/epson_projector
epson_projector/main.py
Projector.__initLock
def __initLock(self): """Init lock for sending request to projector when it is busy.""" self._isLocked = False self._timer = 0 self._operation = False
python
def __initLock(self): """Init lock for sending request to projector when it is busy.""" self._isLocked = False self._timer = 0 self._operation = False
[ "def", "__initLock", "(", "self", ")", ":", "self", ".", "_isLocked", "=", "False", "self", ".", "_timer", "=", "0", "self", ".", "_operation", "=", "False" ]
Init lock for sending request to projector when it is busy.
[ "Init", "lock", "for", "sending", "request", "to", "projector", "when", "it", "is", "busy", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L52-L56
train
pszafer/epson_projector
epson_projector/main.py
Projector.__setLock
def __setLock(self, command): """Set lock on requests.""" if command in (TURN_ON, TURN_OFF): self._operation = command elif command in INV_SOURCES: self._operation = SOURCE else: self._operation = ALL self._isLocked = True self._timer = time.time()
python
def __setLock(self, command): """Set lock on requests.""" if command in (TURN_ON, TURN_OFF): self._operation = command elif command in INV_SOURCES: self._operation = SOURCE else: self._operation = ALL self._isLocked = True self._timer = time.time()
[ "def", "__setLock", "(", "self", ",", "command", ")", ":", "if", "command", "in", "(", "TURN_ON", ",", "TURN_OFF", ")", ":", "self", ".", "_operation", "=", "command", "elif", "command", "in", "INV_SOURCES", ":", "self", ".", "_operation", "=", "SOURCE", "else", ":", "self", ".", "_operation", "=", "ALL", "self", ".", "_isLocked", "=", "True", "self", ".", "_timer", "=", "time", ".", "time", "(", ")" ]
Set lock on requests.
[ "Set", "lock", "on", "requests", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L58-L67
train
pszafer/epson_projector
epson_projector/main.py
Projector.__unLock
def __unLock(self): """Unlock sending requests to projector.""" self._operation = False self._timer = 0 self._isLocked = False
python
def __unLock(self): """Unlock sending requests to projector.""" self._operation = False self._timer = 0 self._isLocked = False
[ "def", "__unLock", "(", "self", ")", ":", "self", ".", "_operation", "=", "False", "self", ".", "_timer", "=", "0", "self", ".", "_isLocked", "=", "False" ]
Unlock sending requests to projector.
[ "Unlock", "sending", "requests", "to", "projector", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L69-L73
train
pszafer/epson_projector
epson_projector/main.py
Projector.__checkLock
def __checkLock(self): """ Lock checking. Check if there is lock pending and check if enough time passed so requests can be unlocked. """ if self._isLocked: if (time.time() - self._timer) > TIMEOUT_TIMES[self._operation]: self.__unLock() return False return True return False
python
def __checkLock(self): """ Lock checking. Check if there is lock pending and check if enough time passed so requests can be unlocked. """ if self._isLocked: if (time.time() - self._timer) > TIMEOUT_TIMES[self._operation]: self.__unLock() return False return True return False
[ "def", "__checkLock", "(", "self", ")", ":", "if", "self", ".", "_isLocked", ":", "if", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_timer", ")", ">", "TIMEOUT_TIMES", "[", "self", ".", "_operation", "]", ":", "self", ".", "__unLock", "(", ")", "return", "False", "return", "True", "return", "False" ]
Lock checking. Check if there is lock pending and check if enough time passed so requests can be unlocked.
[ "Lock", "checking", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L75-L87
train
pszafer/epson_projector
epson_projector/main.py
Projector.get_property
async def get_property(self, command): """Get property state from device.""" _LOGGER.debug("Getting property %s", command) if self.__checkLock(): return BUSY timeout = self.__get_timeout(command) response = await self.send_request( timeout=timeout, params=EPSON_KEY_COMMANDS[command], type='json_query') if not response: return False try: return response['projector']['feature']['reply'] except KeyError: return BUSY
python
async def get_property(self, command): """Get property state from device.""" _LOGGER.debug("Getting property %s", command) if self.__checkLock(): return BUSY timeout = self.__get_timeout(command) response = await self.send_request( timeout=timeout, params=EPSON_KEY_COMMANDS[command], type='json_query') if not response: return False try: return response['projector']['feature']['reply'] except KeyError: return BUSY
[ "async", "def", "get_property", "(", "self", ",", "command", ")", ":", "_LOGGER", ".", "debug", "(", "\"Getting property %s\"", ",", "command", ")", "if", "self", ".", "__checkLock", "(", ")", ":", "return", "BUSY", "timeout", "=", "self", ".", "__get_timeout", "(", "command", ")", "response", "=", "await", "self", ".", "send_request", "(", "timeout", "=", "timeout", ",", "params", "=", "EPSON_KEY_COMMANDS", "[", "command", "]", ",", "type", "=", "'json_query'", ")", "if", "not", "response", ":", "return", "False", "try", ":", "return", "response", "[", "'projector'", "]", "[", "'feature'", "]", "[", "'reply'", "]", "except", "KeyError", ":", "return", "BUSY" ]
Get property state from device.
[ "Get", "property", "state", "from", "device", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L89-L104
train
pszafer/epson_projector
epson_projector/main.py
Projector.send_command
async def send_command(self, command): """Send command to Epson.""" _LOGGER.debug("Sending command to projector %s", command) if self.__checkLock(): return False self.__setLock(command) response = await self.send_request( timeout=self.__get_timeout(command), params=EPSON_KEY_COMMANDS[command], type='directsend', command=command) return response
python
async def send_command(self, command): """Send command to Epson.""" _LOGGER.debug("Sending command to projector %s", command) if self.__checkLock(): return False self.__setLock(command) response = await self.send_request( timeout=self.__get_timeout(command), params=EPSON_KEY_COMMANDS[command], type='directsend', command=command) return response
[ "async", "def", "send_command", "(", "self", ",", "command", ")", ":", "_LOGGER", ".", "debug", "(", "\"Sending command to projector %s\"", ",", "command", ")", "if", "self", ".", "__checkLock", "(", ")", ":", "return", "False", "self", ".", "__setLock", "(", "command", ")", "response", "=", "await", "self", ".", "send_request", "(", "timeout", "=", "self", ".", "__get_timeout", "(", "command", ")", ",", "params", "=", "EPSON_KEY_COMMANDS", "[", "command", "]", ",", "type", "=", "'directsend'", ",", "command", "=", "command", ")", "return", "response" ]
Send command to Epson.
[ "Send", "command", "to", "Epson", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L106-L117
train
pszafer/epson_projector
epson_projector/main.py
Projector.send_request
async def send_request(self, params, timeout, type='json_query', command=False): """Send request to Epson.""" try: with async_timeout.timeout(timeout): url = '{url}{type}'.format( url=self._http_url, type=type) async with self.websession.get( url=url, params=params, headers=self._headers) as response: if response.status != HTTP_OK: _LOGGER.warning( "Error message %d from Epson.", response.status) return False if command == TURN_ON and self._powering_on: self._powering_on = False if type == 'json_query': return await response.json() return response except (aiohttp.ClientError, aiohttp.ClientConnectionError): _LOGGER.error("Error request") return False
python
async def send_request(self, params, timeout, type='json_query', command=False): """Send request to Epson.""" try: with async_timeout.timeout(timeout): url = '{url}{type}'.format( url=self._http_url, type=type) async with self.websession.get( url=url, params=params, headers=self._headers) as response: if response.status != HTTP_OK: _LOGGER.warning( "Error message %d from Epson.", response.status) return False if command == TURN_ON and self._powering_on: self._powering_on = False if type == 'json_query': return await response.json() return response except (aiohttp.ClientError, aiohttp.ClientConnectionError): _LOGGER.error("Error request") return False
[ "async", "def", "send_request", "(", "self", ",", "params", ",", "timeout", ",", "type", "=", "'json_query'", ",", "command", "=", "False", ")", ":", "try", ":", "with", "async_timeout", ".", "timeout", "(", "timeout", ")", ":", "url", "=", "'{url}{type}'", ".", "format", "(", "url", "=", "self", ".", "_http_url", ",", "type", "=", "type", ")", "async", "with", "self", ".", "websession", ".", "get", "(", "url", "=", "url", ",", "params", "=", "params", ",", "headers", "=", "self", ".", "_headers", ")", "as", "response", ":", "if", "response", ".", "status", "!=", "HTTP_OK", ":", "_LOGGER", ".", "warning", "(", "\"Error message %d from Epson.\"", ",", "response", ".", "status", ")", "return", "False", "if", "command", "==", "TURN_ON", "and", "self", ".", "_powering_on", ":", "self", ".", "_powering_on", "=", "False", "if", "type", "==", "'json_query'", ":", "return", "await", "response", ".", "json", "(", ")", "return", "response", "except", "(", "aiohttp", ".", "ClientError", ",", "aiohttp", ".", "ClientConnectionError", ")", ":", "_LOGGER", ".", "error", "(", "\"Error request\"", ")", "return", "False" ]
Send request to Epson.
[ "Send", "request", "to", "Epson", "." ]
b8a10ace56e0a5cf858546041819c0e7ebca208f
https://github.com/pszafer/epson_projector/blob/b8a10ace56e0a5cf858546041819c0e7ebca208f/epson_projector/main.py#L119-L141
train
redhat-openstack/python-tripleo-helper
tripleohelper/provisioners/openstack/utils.py
remove_instances_by_prefix
def remove_instances_by_prefix(nova_api, prefix): """Remove all the instances on which their name start by a prefix.""" for server in nova_api.servers.list(): if server.name.startswith(prefix): LOG.info("Remove instance '%s'" % server.name) server.delete()
python
def remove_instances_by_prefix(nova_api, prefix): """Remove all the instances on which their name start by a prefix.""" for server in nova_api.servers.list(): if server.name.startswith(prefix): LOG.info("Remove instance '%s'" % server.name) server.delete()
[ "def", "remove_instances_by_prefix", "(", "nova_api", ",", "prefix", ")", ":", "for", "server", "in", "nova_api", ".", "servers", ".", "list", "(", ")", ":", "if", "server", ".", "name", ".", "startswith", "(", "prefix", ")", ":", "LOG", ".", "info", "(", "\"Remove instance '%s'\"", "%", "server", ".", "name", ")", "server", ".", "delete", "(", ")" ]
Remove all the instances on which their name start by a prefix.
[ "Remove", "all", "the", "instances", "on", "which", "their", "name", "start", "by", "a", "prefix", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/provisioners/openstack/utils.py#L106-L111
train
redhat-openstack/python-tripleo-helper
tripleohelper/ovb_shell.py
purge_existing_ovb
def purge_existing_ovb(nova_api, neutron): """Purge any trace of an existing OVB deployment. """ LOG.info('Cleaning up OVB environment from the tenant.') for server in nova_api.servers.list(): if server.name in ('bmc', 'undercloud'): server.delete() if server.name.startswith('baremetal_'): server.delete() for router in neutron.list_routers().get('routers'): if router['name'] not in ('router', 'bmc_router'): continue for subnet in neutron.list_subnets().get('subnets'): if not (subnet['name'].startswith('bmc_eth') or subnet['name'] == 'rdo-m-subnet'): continue try: neutron.remove_interface_router(router['id'], {'subnet_id': subnet['id']}) except neutronclient.common.exceptions.NotFound: pass try: bmc_router = neutron.list_routers(name='bmc_router').get('routers')[0] for port in neutron.list_ports(device_id=bmc_router['id'])['ports']: if port.get('device_owner') == 'network:router_gateway': continue info = {'id': router['id'], 'port_id': port['id'], 'tenant_id': bmc_router.get('tenant_id'), } neutron.remove_interface_router(bmc_router['id'], info) neutron.delete_router(bmc_router['id']) except IndexError: # already doesnt exist pass for _ in range(0, 5): try: for port in neutron.list_ports()['ports']: if port['name'].endswith('_provision'): neutron.delete_port(port['id']) for net in neutron.list_networks().get('networks'): if not net['name'].startswith('provision_'): continue for port in neutron.list_ports(network_id=net['id'])['ports']: if port.get('device_owner') == 'network:router_interface': continue try: neutron.delete_port(port['id']) except neutronclient.common.exceptions.PortNotFoundClient: pass for subnet in neutron.list_subnets(network_id=net['id'])['subnets']: neutron.delete_subnet(subnet['id']) neutron.delete_network(net['id']) except neutronclient.common.exceptions.Conflict: LOG.debug('waiting for all the ports to be freed...') time.sleep(5) else: return
python
def purge_existing_ovb(nova_api, neutron): """Purge any trace of an existing OVB deployment. """ LOG.info('Cleaning up OVB environment from the tenant.') for server in nova_api.servers.list(): if server.name in ('bmc', 'undercloud'): server.delete() if server.name.startswith('baremetal_'): server.delete() for router in neutron.list_routers().get('routers'): if router['name'] not in ('router', 'bmc_router'): continue for subnet in neutron.list_subnets().get('subnets'): if not (subnet['name'].startswith('bmc_eth') or subnet['name'] == 'rdo-m-subnet'): continue try: neutron.remove_interface_router(router['id'], {'subnet_id': subnet['id']}) except neutronclient.common.exceptions.NotFound: pass try: bmc_router = neutron.list_routers(name='bmc_router').get('routers')[0] for port in neutron.list_ports(device_id=bmc_router['id'])['ports']: if port.get('device_owner') == 'network:router_gateway': continue info = {'id': router['id'], 'port_id': port['id'], 'tenant_id': bmc_router.get('tenant_id'), } neutron.remove_interface_router(bmc_router['id'], info) neutron.delete_router(bmc_router['id']) except IndexError: # already doesnt exist pass for _ in range(0, 5): try: for port in neutron.list_ports()['ports']: if port['name'].endswith('_provision'): neutron.delete_port(port['id']) for net in neutron.list_networks().get('networks'): if not net['name'].startswith('provision_'): continue for port in neutron.list_ports(network_id=net['id'])['ports']: if port.get('device_owner') == 'network:router_interface': continue try: neutron.delete_port(port['id']) except neutronclient.common.exceptions.PortNotFoundClient: pass for subnet in neutron.list_subnets(network_id=net['id'])['subnets']: neutron.delete_subnet(subnet['id']) neutron.delete_network(net['id']) except neutronclient.common.exceptions.Conflict: LOG.debug('waiting for all the ports to be freed...') time.sleep(5) else: return
[ "def", "purge_existing_ovb", "(", "nova_api", ",", "neutron", ")", ":", "LOG", ".", "info", "(", "'Cleaning up OVB environment from the tenant.'", ")", "for", "server", "in", "nova_api", ".", "servers", ".", "list", "(", ")", ":", "if", "server", ".", "name", "in", "(", "'bmc'", ",", "'undercloud'", ")", ":", "server", ".", "delete", "(", ")", "if", "server", ".", "name", ".", "startswith", "(", "'baremetal_'", ")", ":", "server", ".", "delete", "(", ")", "for", "router", "in", "neutron", ".", "list_routers", "(", ")", ".", "get", "(", "'routers'", ")", ":", "if", "router", "[", "'name'", "]", "not", "in", "(", "'router'", ",", "'bmc_router'", ")", ":", "continue", "for", "subnet", "in", "neutron", ".", "list_subnets", "(", ")", ".", "get", "(", "'subnets'", ")", ":", "if", "not", "(", "subnet", "[", "'name'", "]", ".", "startswith", "(", "'bmc_eth'", ")", "or", "subnet", "[", "'name'", "]", "==", "'rdo-m-subnet'", ")", ":", "continue", "try", ":", "neutron", ".", "remove_interface_router", "(", "router", "[", "'id'", "]", ",", "{", "'subnet_id'", ":", "subnet", "[", "'id'", "]", "}", ")", "except", "neutronclient", ".", "common", ".", "exceptions", ".", "NotFound", ":", "pass", "try", ":", "bmc_router", "=", "neutron", ".", "list_routers", "(", "name", "=", "'bmc_router'", ")", ".", "get", "(", "'routers'", ")", "[", "0", "]", "for", "port", "in", "neutron", ".", "list_ports", "(", "device_id", "=", "bmc_router", "[", "'id'", "]", ")", "[", "'ports'", "]", ":", "if", "port", ".", "get", "(", "'device_owner'", ")", "==", "'network:router_gateway'", ":", "continue", "info", "=", "{", "'id'", ":", "router", "[", "'id'", "]", ",", "'port_id'", ":", "port", "[", "'id'", "]", ",", "'tenant_id'", ":", "bmc_router", ".", "get", "(", "'tenant_id'", ")", ",", "}", "neutron", ".", "remove_interface_router", "(", "bmc_router", "[", "'id'", "]", ",", "info", ")", "neutron", ".", "delete_router", "(", "bmc_router", "[", "'id'", "]", ")", "except", "IndexError", ":", "# already doesnt exist", "pass", "for", "_", "in", "range", "(", "0", ",", "5", ")", ":", "try", ":", "for", "port", "in", "neutron", ".", "list_ports", "(", ")", "[", "'ports'", "]", ":", "if", "port", "[", "'name'", "]", ".", "endswith", "(", "'_provision'", ")", ":", "neutron", ".", "delete_port", "(", "port", "[", "'id'", "]", ")", "for", "net", "in", "neutron", ".", "list_networks", "(", ")", ".", "get", "(", "'networks'", ")", ":", "if", "not", "net", "[", "'name'", "]", ".", "startswith", "(", "'provision_'", ")", ":", "continue", "for", "port", "in", "neutron", ".", "list_ports", "(", "network_id", "=", "net", "[", "'id'", "]", ")", "[", "'ports'", "]", ":", "if", "port", ".", "get", "(", "'device_owner'", ")", "==", "'network:router_interface'", ":", "continue", "try", ":", "neutron", ".", "delete_port", "(", "port", "[", "'id'", "]", ")", "except", "neutronclient", ".", "common", ".", "exceptions", ".", "PortNotFoundClient", ":", "pass", "for", "subnet", "in", "neutron", ".", "list_subnets", "(", "network_id", "=", "net", "[", "'id'", "]", ")", "[", "'subnets'", "]", ":", "neutron", ".", "delete_subnet", "(", "subnet", "[", "'id'", "]", ")", "neutron", ".", "delete_network", "(", "net", "[", "'id'", "]", ")", "except", "neutronclient", ".", "common", ".", "exceptions", ".", "Conflict", ":", "LOG", ".", "debug", "(", "'waiting for all the ports to be freed...'", ")", "time", ".", "sleep", "(", "5", ")", "else", ":", "return" ]
Purge any trace of an existing OVB deployment.
[ "Purge", "any", "trace", "of", "an", "existing", "OVB", "deployment", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/ovb_shell.py#L39-L94
train
redhat-openstack/python-tripleo-helper
tripleohelper/ovb_shell.py
initialize_network
def initialize_network(neutron): """Initialize an OVB network called provision_bob. """ body_sample = { "network": { "name": 'provision_bob', "admin_state_up": True, } } netw = neutron.create_network(body=body_sample)['network'] body_create_subnet = { 'subnets': [{ 'name': 'rdo-m-subnet', 'cidr': '192.0.2.0/24', 'ip_version': 4, 'network_id': netw['id'], 'host_routes': [{ 'destination': '169.254.169.254/32', 'nexthop': '192.0.2.240' }], 'gateway_ip': '192.0.2.1', 'dns_nameservers': ['8.8.8.8', '8.8.4.4'], 'allocation_pools': [{'start': '192.0.2.30', 'end': '192.0.2.199'}]}]} response = neutron.create_subnet(body=body_create_subnet) subnet_id = response['subnets'][0]['id'] router = neutron.list_routers(name='router').get('routers')[0] response = neutron.add_interface_router(router['id'], {'subnet_id': subnet_id})
python
def initialize_network(neutron): """Initialize an OVB network called provision_bob. """ body_sample = { "network": { "name": 'provision_bob', "admin_state_up": True, } } netw = neutron.create_network(body=body_sample)['network'] body_create_subnet = { 'subnets': [{ 'name': 'rdo-m-subnet', 'cidr': '192.0.2.0/24', 'ip_version': 4, 'network_id': netw['id'], 'host_routes': [{ 'destination': '169.254.169.254/32', 'nexthop': '192.0.2.240' }], 'gateway_ip': '192.0.2.1', 'dns_nameservers': ['8.8.8.8', '8.8.4.4'], 'allocation_pools': [{'start': '192.0.2.30', 'end': '192.0.2.199'}]}]} response = neutron.create_subnet(body=body_create_subnet) subnet_id = response['subnets'][0]['id'] router = neutron.list_routers(name='router').get('routers')[0] response = neutron.add_interface_router(router['id'], {'subnet_id': subnet_id})
[ "def", "initialize_network", "(", "neutron", ")", ":", "body_sample", "=", "{", "\"network\"", ":", "{", "\"name\"", ":", "'provision_bob'", ",", "\"admin_state_up\"", ":", "True", ",", "}", "}", "netw", "=", "neutron", ".", "create_network", "(", "body", "=", "body_sample", ")", "[", "'network'", "]", "body_create_subnet", "=", "{", "'subnets'", ":", "[", "{", "'name'", ":", "'rdo-m-subnet'", ",", "'cidr'", ":", "'192.0.2.0/24'", ",", "'ip_version'", ":", "4", ",", "'network_id'", ":", "netw", "[", "'id'", "]", ",", "'host_routes'", ":", "[", "{", "'destination'", ":", "'169.254.169.254/32'", ",", "'nexthop'", ":", "'192.0.2.240'", "}", "]", ",", "'gateway_ip'", ":", "'192.0.2.1'", ",", "'dns_nameservers'", ":", "[", "'8.8.8.8'", ",", "'8.8.4.4'", "]", ",", "'allocation_pools'", ":", "[", "{", "'start'", ":", "'192.0.2.30'", ",", "'end'", ":", "'192.0.2.199'", "}", "]", "}", "]", "}", "response", "=", "neutron", ".", "create_subnet", "(", "body", "=", "body_create_subnet", ")", "subnet_id", "=", "response", "[", "'subnets'", "]", "[", "0", "]", "[", "'id'", "]", "router", "=", "neutron", ".", "list_routers", "(", "name", "=", "'router'", ")", ".", "get", "(", "'routers'", ")", "[", "0", "]", "response", "=", "neutron", ".", "add_interface_router", "(", "router", "[", "'id'", "]", ",", "{", "'subnet_id'", ":", "subnet_id", "}", ")" ]
Initialize an OVB network called provision_bob.
[ "Initialize", "an", "OVB", "network", "called", "provision_bob", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/ovb_shell.py#L105-L131
train
unt-libraries/pyuntl
pyuntl/dc_structure.py
description_director
def description_director(**kwargs): """Direct which class should be used based on the director qualifier. """ description_type = {'physical': DCFormat} qualifier = kwargs.get('qualifier') # Determine the type of element needed, based on the qualifier. element_class = description_type.get(qualifier, DCDescription) # Create the element object of that element type. element = element_class( qualifier=qualifier, content=kwargs.get('content'), ) return element
python
def description_director(**kwargs): """Direct which class should be used based on the director qualifier. """ description_type = {'physical': DCFormat} qualifier = kwargs.get('qualifier') # Determine the type of element needed, based on the qualifier. element_class = description_type.get(qualifier, DCDescription) # Create the element object of that element type. element = element_class( qualifier=qualifier, content=kwargs.get('content'), ) return element
[ "def", "description_director", "(", "*", "*", "kwargs", ")", ":", "description_type", "=", "{", "'physical'", ":", "DCFormat", "}", "qualifier", "=", "kwargs", ".", "get", "(", "'qualifier'", ")", "# Determine the type of element needed, based on the qualifier.", "element_class", "=", "description_type", ".", "get", "(", "qualifier", ",", "DCDescription", ")", "# Create the element object of that element type.", "element", "=", "element_class", "(", "qualifier", "=", "qualifier", ",", "content", "=", "kwargs", ".", "get", "(", "'content'", ")", ",", ")", "return", "element" ]
Direct which class should be used based on the director qualifier.
[ "Direct", "which", "class", "should", "be", "used", "based", "on", "the", "director", "qualifier", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/dc_structure.py#L220-L233
train
unt-libraries/pyuntl
pyuntl/dc_structure.py
DCElement.add_child
def add_child(self, child): """This adds a child object to the current one. It will check the contained_children list to make sure that the object is allowable, and throw an exception if not. """ # Make sure the child exists before adding it. if child: # Append child if it is allowed to exist under the parent. if child.tag in self.contained_children: self.children.append(child) else: raise DC_StructureException( 'Invalid child "%s" for parent "%s"' % (child.tag, self.tag) )
python
def add_child(self, child): """This adds a child object to the current one. It will check the contained_children list to make sure that the object is allowable, and throw an exception if not. """ # Make sure the child exists before adding it. if child: # Append child if it is allowed to exist under the parent. if child.tag in self.contained_children: self.children.append(child) else: raise DC_StructureException( 'Invalid child "%s" for parent "%s"' % (child.tag, self.tag) )
[ "def", "add_child", "(", "self", ",", "child", ")", ":", "# Make sure the child exists before adding it.", "if", "child", ":", "# Append child if it is allowed to exist under the parent.", "if", "child", ".", "tag", "in", "self", ".", "contained_children", ":", "self", ".", "children", ".", "append", "(", "child", ")", "else", ":", "raise", "DC_StructureException", "(", "'Invalid child \"%s\" for parent \"%s\"'", "%", "(", "child", ".", "tag", ",", "self", ".", "tag", ")", ")" ]
This adds a child object to the current one. It will check the contained_children list to make sure that the object is allowable, and throw an exception if not.
[ "This", "adds", "a", "child", "object", "to", "the", "current", "one", ".", "It", "will", "check", "the", "contained_children", "list", "to", "make", "sure", "that", "the", "object", "is", "allowable", "and", "throw", "an", "exception", "if", "not", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/dc_structure.py#L66-L80
train
unt-libraries/pyuntl
pyuntl/dc_structure.py
DCElement.determine_vocab
def determine_vocab(self, qualifier): """Determine the vocab from the qualifier.""" vocab_value = VOCAB_INDEX.get(self.tag, None) if isinstance(vocab_value, dict): if qualifier is None: qualifier = 'None' # Find the value based on the qualifier. return vocab_value.get(qualifier, None) elif vocab_value is not None: return vocab_value else: return None
python
def determine_vocab(self, qualifier): """Determine the vocab from the qualifier.""" vocab_value = VOCAB_INDEX.get(self.tag, None) if isinstance(vocab_value, dict): if qualifier is None: qualifier = 'None' # Find the value based on the qualifier. return vocab_value.get(qualifier, None) elif vocab_value is not None: return vocab_value else: return None
[ "def", "determine_vocab", "(", "self", ",", "qualifier", ")", ":", "vocab_value", "=", "VOCAB_INDEX", ".", "get", "(", "self", ".", "tag", ",", "None", ")", "if", "isinstance", "(", "vocab_value", ",", "dict", ")", ":", "if", "qualifier", "is", "None", ":", "qualifier", "=", "'None'", "# Find the value based on the qualifier.", "return", "vocab_value", ".", "get", "(", "qualifier", ",", "None", ")", "elif", "vocab_value", "is", "not", "None", ":", "return", "vocab_value", "else", ":", "return", "None" ]
Determine the vocab from the qualifier.
[ "Determine", "the", "vocab", "from", "the", "qualifier", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/dc_structure.py#L91-L102
train
unt-libraries/pyuntl
pyuntl/dc_structure.py
DCElement.resolver
def resolver(self, vocab_data, attribute): """Pull the requested attribute based on the given vocabulary and content. """ term_list = vocab_data.get(self.content_vocab, []) # Loop through the terms from the vocabulary. for term_dict in term_list: # Match the name to the current content. if term_dict['name'] == self.content: return term_dict[attribute] return self.content
python
def resolver(self, vocab_data, attribute): """Pull the requested attribute based on the given vocabulary and content. """ term_list = vocab_data.get(self.content_vocab, []) # Loop through the terms from the vocabulary. for term_dict in term_list: # Match the name to the current content. if term_dict['name'] == self.content: return term_dict[attribute] return self.content
[ "def", "resolver", "(", "self", ",", "vocab_data", ",", "attribute", ")", ":", "term_list", "=", "vocab_data", ".", "get", "(", "self", ".", "content_vocab", ",", "[", "]", ")", "# Loop through the terms from the vocabulary.", "for", "term_dict", "in", "term_list", ":", "# Match the name to the current content.", "if", "term_dict", "[", "'name'", "]", "==", "self", ".", "content", ":", "return", "term_dict", "[", "attribute", "]", "return", "self", ".", "content" ]
Pull the requested attribute based on the given vocabulary and content.
[ "Pull", "the", "requested", "attribute", "based", "on", "the", "given", "vocabulary", "and", "content", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/dc_structure.py#L104-L114
train
grundprinzip/pyxplorer
pyxplorer/loader.py
Loader.check_separator
def check_separator(self, data): """ THis method evaluates a list of separators on the input data to check which one is correct. This is done by first splitting the input by newline and then checking if the split by separator is equal for each input row except the last that might be incomplete due to the limited input data :param data: input data to check :return: """ sep_list = [r'\t', r';', r',', r'|', r'\s+'] data_copy = data for sep in sep_list: # Check if the count matches each line splitted = data_copy.split("\n") parts = [len(re.split(sep, line)) for line in splitted] # If we did not split anything continue if sum(parts) == len(splitted): continue diff = 0 for i in range(len(parts[1:-1])): diff += abs(parts[i] - parts[i + 1]) if diff == 0: return sep, parts[0] # If we reach this point we did not find a separator return None
python
def check_separator(self, data): """ THis method evaluates a list of separators on the input data to check which one is correct. This is done by first splitting the input by newline and then checking if the split by separator is equal for each input row except the last that might be incomplete due to the limited input data :param data: input data to check :return: """ sep_list = [r'\t', r';', r',', r'|', r'\s+'] data_copy = data for sep in sep_list: # Check if the count matches each line splitted = data_copy.split("\n") parts = [len(re.split(sep, line)) for line in splitted] # If we did not split anything continue if sum(parts) == len(splitted): continue diff = 0 for i in range(len(parts[1:-1])): diff += abs(parts[i] - parts[i + 1]) if diff == 0: return sep, parts[0] # If we reach this point we did not find a separator return None
[ "def", "check_separator", "(", "self", ",", "data", ")", ":", "sep_list", "=", "[", "r'\\t'", ",", "r';'", ",", "r','", ",", "r'|'", ",", "r'\\s+'", "]", "data_copy", "=", "data", "for", "sep", "in", "sep_list", ":", "# Check if the count matches each line", "splitted", "=", "data_copy", ".", "split", "(", "\"\\n\"", ")", "parts", "=", "[", "len", "(", "re", ".", "split", "(", "sep", ",", "line", ")", ")", "for", "line", "in", "splitted", "]", "# If we did not split anything continue", "if", "sum", "(", "parts", ")", "==", "len", "(", "splitted", ")", ":", "continue", "diff", "=", "0", "for", "i", "in", "range", "(", "len", "(", "parts", "[", "1", ":", "-", "1", "]", ")", ")", ":", "diff", "+=", "abs", "(", "parts", "[", "i", "]", "-", "parts", "[", "i", "+", "1", "]", ")", "if", "diff", "==", "0", ":", "return", "sep", ",", "parts", "[", "0", "]", "# If we reach this point we did not find a separator", "return", "None" ]
THis method evaluates a list of separators on the input data to check which one is correct. This is done by first splitting the input by newline and then checking if the split by separator is equal for each input row except the last that might be incomplete due to the limited input data :param data: input data to check :return:
[ "THis", "method", "evaluates", "a", "list", "of", "separators", "on", "the", "input", "data", "to", "check", "which", "one", "is", "correct", ".", "This", "is", "done", "by", "first", "splitting", "the", "input", "by", "newline", "and", "then", "checking", "if", "the", "split", "by", "separator", "is", "equal", "for", "each", "input", "row", "except", "the", "last", "that", "might", "be", "incomplete", "due", "to", "the", "limited", "input", "data" ]
34c1d166cfef4a94aeb6d5fcb3cbb726d48146e2
https://github.com/grundprinzip/pyxplorer/blob/34c1d166cfef4a94aeb6d5fcb3cbb726d48146e2/pyxplorer/loader.py#L67-L99
train
grundprinzip/pyxplorer
pyxplorer/loader.py
Loader.head
def head(self, file_path): """ Onlye read the first packets that come, try to max out at 1024kb :return: up to 1024b of the first block of the file """ processor = lambda path, node, tail_only=True, append=False: self._handle_head( path, node) # Find items and go for item in self._client._find_items([file_path], processor, include_toplevel=True, include_children=False, recurse=False): if item: return item
python
def head(self, file_path): """ Onlye read the first packets that come, try to max out at 1024kb :return: up to 1024b of the first block of the file """ processor = lambda path, node, tail_only=True, append=False: self._handle_head( path, node) # Find items and go for item in self._client._find_items([file_path], processor, include_toplevel=True, include_children=False, recurse=False): if item: return item
[ "def", "head", "(", "self", ",", "file_path", ")", ":", "processor", "=", "lambda", "path", ",", "node", ",", "tail_only", "=", "True", ",", "append", "=", "False", ":", "self", ".", "_handle_head", "(", "path", ",", "node", ")", "# Find items and go", "for", "item", "in", "self", ".", "_client", ".", "_find_items", "(", "[", "file_path", "]", ",", "processor", ",", "include_toplevel", "=", "True", ",", "include_children", "=", "False", ",", "recurse", "=", "False", ")", ":", "if", "item", ":", "return", "item" ]
Onlye read the first packets that come, try to max out at 1024kb :return: up to 1024b of the first block of the file
[ "Onlye", "read", "the", "first", "packets", "that", "come", "try", "to", "max", "out", "at", "1024kb" ]
34c1d166cfef4a94aeb6d5fcb3cbb726d48146e2
https://github.com/grundprinzip/pyxplorer/blob/34c1d166cfef4a94aeb6d5fcb3cbb726d48146e2/pyxplorer/loader.py#L102-L116
train
gofed/gofedlib
gofedlib/distribution/clients/pkgdb/client.py
PkgDBClient.packageExists
def packageExists(self, package): """Check if the package already exists :param package: package name :type package: string """ url = "%s/packages" % self.base_url params = {"pattern": package} response = requests.get(url, params=params) if response.status_code != requests.codes.ok: return False return True
python
def packageExists(self, package): """Check if the package already exists :param package: package name :type package: string """ url = "%s/packages" % self.base_url params = {"pattern": package} response = requests.get(url, params=params) if response.status_code != requests.codes.ok: return False return True
[ "def", "packageExists", "(", "self", ",", "package", ")", ":", "url", "=", "\"%s/packages\"", "%", "self", ".", "base_url", "params", "=", "{", "\"pattern\"", ":", "package", "}", "response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "return", "False", "return", "True" ]
Check if the package already exists :param package: package name :type package: string
[ "Check", "if", "the", "package", "already", "exists" ]
0674c248fe3d8706f98f912996b65af469f96b10
https://github.com/gofed/gofedlib/blob/0674c248fe3d8706f98f912996b65af469f96b10/gofedlib/distribution/clients/pkgdb/client.py#L27-L39
train
gofed/gofedlib
gofedlib/distribution/clients/pkgdb/client.py
PkgDBClient.getGolangPackages
def getGolangPackages(self): """Get a list of all golang packages for all available branches """ packages = {} # get all packages url = "%s/packages" % self.base_url params = {"pattern": "golang-*", "limit": 200} response = requests.get(url, params=params) if response.status_code != requests.codes.ok: return {} data = response.json() for package in data["packages"]: packages[package["name"]] = self._processPackageData(package) # accumulate packages from all pages for page in range(2, data["page_total"] + 1): params = {"pattern": "golang-*", "limit": 200, "page": page} response = requests.get(url, params=params) if response.status_code != requests.codes.ok: continue data = response.json() for package in data["packages"]: packages[package["name"]] = self._processPackageData(package) # get branches of all packages MAX_LEN = 30 # break the list of packages into lists of at most 50 packages package_names = packages.keys() packages_total = len(package_names) packages_counter = 0 logger.info("%s packages to process" % packages_total) for i in range(0, packages_total, MAX_LEN): sublist = package_names[i:i+MAX_LEN] branches = self._getPackageBranches(sublist) for package in sublist: packages[package]["branches"] = branches[package] packages_counter = packages_counter + len(branches) logger.info("%s/%s packages processed" % (packages_counter, packages_total)) return packages
python
def getGolangPackages(self): """Get a list of all golang packages for all available branches """ packages = {} # get all packages url = "%s/packages" % self.base_url params = {"pattern": "golang-*", "limit": 200} response = requests.get(url, params=params) if response.status_code != requests.codes.ok: return {} data = response.json() for package in data["packages"]: packages[package["name"]] = self._processPackageData(package) # accumulate packages from all pages for page in range(2, data["page_total"] + 1): params = {"pattern": "golang-*", "limit": 200, "page": page} response = requests.get(url, params=params) if response.status_code != requests.codes.ok: continue data = response.json() for package in data["packages"]: packages[package["name"]] = self._processPackageData(package) # get branches of all packages MAX_LEN = 30 # break the list of packages into lists of at most 50 packages package_names = packages.keys() packages_total = len(package_names) packages_counter = 0 logger.info("%s packages to process" % packages_total) for i in range(0, packages_total, MAX_LEN): sublist = package_names[i:i+MAX_LEN] branches = self._getPackageBranches(sublist) for package in sublist: packages[package]["branches"] = branches[package] packages_counter = packages_counter + len(branches) logger.info("%s/%s packages processed" % (packages_counter, packages_total)) return packages
[ "def", "getGolangPackages", "(", "self", ")", ":", "packages", "=", "{", "}", "# get all packages", "url", "=", "\"%s/packages\"", "%", "self", ".", "base_url", "params", "=", "{", "\"pattern\"", ":", "\"golang-*\"", ",", "\"limit\"", ":", "200", "}", "response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "return", "{", "}", "data", "=", "response", ".", "json", "(", ")", "for", "package", "in", "data", "[", "\"packages\"", "]", ":", "packages", "[", "package", "[", "\"name\"", "]", "]", "=", "self", ".", "_processPackageData", "(", "package", ")", "# accumulate packages from all pages", "for", "page", "in", "range", "(", "2", ",", "data", "[", "\"page_total\"", "]", "+", "1", ")", ":", "params", "=", "{", "\"pattern\"", ":", "\"golang-*\"", ",", "\"limit\"", ":", "200", ",", "\"page\"", ":", "page", "}", "response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ")", "if", "response", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "continue", "data", "=", "response", ".", "json", "(", ")", "for", "package", "in", "data", "[", "\"packages\"", "]", ":", "packages", "[", "package", "[", "\"name\"", "]", "]", "=", "self", ".", "_processPackageData", "(", "package", ")", "# get branches of all packages", "MAX_LEN", "=", "30", "# break the list of packages into lists of at most 50 packages", "package_names", "=", "packages", ".", "keys", "(", ")", "packages_total", "=", "len", "(", "package_names", ")", "packages_counter", "=", "0", "logger", ".", "info", "(", "\"%s packages to process\"", "%", "packages_total", ")", "for", "i", "in", "range", "(", "0", ",", "packages_total", ",", "MAX_LEN", ")", ":", "sublist", "=", "package_names", "[", "i", ":", "i", "+", "MAX_LEN", "]", "branches", "=", "self", ".", "_getPackageBranches", "(", "sublist", ")", "for", "package", "in", "sublist", ":", "packages", "[", "package", "]", "[", "\"branches\"", "]", "=", "branches", "[", "package", "]", "packages_counter", "=", "packages_counter", "+", "len", "(", "branches", ")", "logger", ".", "info", "(", "\"%s/%s packages processed\"", "%", "(", "packages_counter", ",", "packages_total", ")", ")", "return", "packages" ]
Get a list of all golang packages for all available branches
[ "Get", "a", "list", "of", "all", "golang", "packages", "for", "all", "available", "branches" ]
0674c248fe3d8706f98f912996b65af469f96b10
https://github.com/gofed/gofedlib/blob/0674c248fe3d8706f98f912996b65af469f96b10/gofedlib/distribution/clients/pkgdb/client.py#L98-L144
train
eventifyio/eventify
eventify/drivers/crossbar.py
Component.onClose
def onClose(self, wasClean): """ Disconnect when connection to message broker is lost """ self.log.error('lost connection to crossbar on session %' + str(self.session_id)) for task in asyncio.Task.all_tasks(): task.cancel() asyncio.get_event_loop().stop()
python
def onClose(self, wasClean): """ Disconnect when connection to message broker is lost """ self.log.error('lost connection to crossbar on session %' + str(self.session_id)) for task in asyncio.Task.all_tasks(): task.cancel() asyncio.get_event_loop().stop()
[ "def", "onClose", "(", "self", ",", "wasClean", ")", ":", "self", ".", "log", ".", "error", "(", "'lost connection to crossbar on session %'", "+", "str", "(", "self", ".", "session_id", ")", ")", "for", "task", "in", "asyncio", ".", "Task", ".", "all_tasks", "(", ")", ":", "task", ".", "cancel", "(", ")", "asyncio", ".", "get_event_loop", "(", ")", ".", "stop", "(", ")" ]
Disconnect when connection to message broker is lost
[ "Disconnect", "when", "connection", "to", "message", "broker", "is", "lost" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L63-L71
train
eventifyio/eventify
eventify/drivers/crossbar.py
Component.onUserError
def onUserError(self, fail, message): """ Handle user errors """ self.log.error(fail) self.log.error(message)
python
def onUserError(self, fail, message): """ Handle user errors """ self.log.error(fail) self.log.error(message)
[ "def", "onUserError", "(", "self", ",", "fail", ",", "message", ")", ":", "self", ".", "log", ".", "error", "(", "fail", ")", "self", ".", "log", ".", "error", "(", "message", ")" ]
Handle user errors
[ "Handle", "user", "errors" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L86-L91
train
eventifyio/eventify
eventify/drivers/crossbar.py
Component.show_sessions
async def show_sessions(self): """ Returns an object with a lists of the session IDs for all sessions currently attached to the realm http://crossbar.io/docs/Session-Metaevents-and-Procedures/ """ res = await self.call("wamp.session.list") for session_id in res: session = await self.call("wamp.session.get", session_id) self.log.info(session)
python
async def show_sessions(self): """ Returns an object with a lists of the session IDs for all sessions currently attached to the realm http://crossbar.io/docs/Session-Metaevents-and-Procedures/ """ res = await self.call("wamp.session.list") for session_id in res: session = await self.call("wamp.session.get", session_id) self.log.info(session)
[ "async", "def", "show_sessions", "(", "self", ")", ":", "res", "=", "await", "self", ".", "call", "(", "\"wamp.session.list\"", ")", "for", "session_id", "in", "res", ":", "session", "=", "await", "self", ".", "call", "(", "\"wamp.session.get\"", ",", "session_id", ")", "self", ".", "log", ".", "info", "(", "session", ")" ]
Returns an object with a lists of the session IDs for all sessions currently attached to the realm http://crossbar.io/docs/Session-Metaevents-and-Procedures/
[ "Returns", "an", "object", "with", "a", "lists", "of", "the", "session", "IDs", "for", "all", "sessions", "currently", "attached", "to", "the", "realm" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L138-L148
train
eventifyio/eventify
eventify/drivers/crossbar.py
Component.lookup_session
async def lookup_session(self, topic_name): """ Attempts to find the session id for a given topic http://crossbar.io/docs/Subscription-Meta-Events-and-Procedures/ """ res = await self.call("wamp.subscription.lookup", topic_name) self.log.info(res)
python
async def lookup_session(self, topic_name): """ Attempts to find the session id for a given topic http://crossbar.io/docs/Subscription-Meta-Events-and-Procedures/ """ res = await self.call("wamp.subscription.lookup", topic_name) self.log.info(res)
[ "async", "def", "lookup_session", "(", "self", ",", "topic_name", ")", ":", "res", "=", "await", "self", ".", "call", "(", "\"wamp.subscription.lookup\"", ",", "topic_name", ")", "self", ".", "log", ".", "info", "(", "res", ")" ]
Attempts to find the session id for a given topic http://crossbar.io/docs/Subscription-Meta-Events-and-Procedures/
[ "Attempts", "to", "find", "the", "session", "id", "for", "a", "given", "topic" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L159-L166
train
eventifyio/eventify
eventify/drivers/crossbar.py
Service.setup_runner
def setup_runner(self): """ Setup instance of runner var """ runner = ApplicationRunner( url=self.config['transport_host'], realm=u'realm1', extra={ 'config': self.config, 'handlers': self.handlers, } ) return runner
python
def setup_runner(self): """ Setup instance of runner var """ runner = ApplicationRunner( url=self.config['transport_host'], realm=u'realm1', extra={ 'config': self.config, 'handlers': self.handlers, } ) return runner
[ "def", "setup_runner", "(", "self", ")", ":", "runner", "=", "ApplicationRunner", "(", "url", "=", "self", ".", "config", "[", "'transport_host'", "]", ",", "realm", "=", "u'realm1'", ",", "extra", "=", "{", "'config'", ":", "self", ".", "config", ",", "'handlers'", ":", "self", ".", "handlers", ",", "}", ")", "return", "runner" ]
Setup instance of runner var
[ "Setup", "instance", "of", "runner", "var" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L173-L185
train
eventifyio/eventify
eventify/drivers/crossbar.py
Service.reconnect
def reconnect(self): """ Handle reconnect logic if connection to crossbar is lost """ connect_attempt = 0 max_retries = self.config['max_reconnect_retries'] logging.info('attempting to reconnect to crossbar') runner = self.setup_runner() while True: if connect_attempt == max_retries: logging.info('max retries reached; stopping service') sys.exit(1) self.check_event_loop() try: logging.info('waiting 5 seconds') time.sleep(5) if self.check_transport_host(): logging.info('waiting 10 seconds to ensure that crossbar has initialized before reconnecting') time.sleep(10) runner.run(Component) else: logging.error('crossbar host port 8080 not available...') except RuntimeError as error: logging.error(error) except ConnectionRefusedError as error: logging.error(error) except ConnectionError as error: logging.error(error) except KeyboardInterrupt: logging.info('User initiated shutdown') loop = asyncio.get_event_loop() loop.stop() sys.exit(1) connect_attempt += 1
python
def reconnect(self): """ Handle reconnect logic if connection to crossbar is lost """ connect_attempt = 0 max_retries = self.config['max_reconnect_retries'] logging.info('attempting to reconnect to crossbar') runner = self.setup_runner() while True: if connect_attempt == max_retries: logging.info('max retries reached; stopping service') sys.exit(1) self.check_event_loop() try: logging.info('waiting 5 seconds') time.sleep(5) if self.check_transport_host(): logging.info('waiting 10 seconds to ensure that crossbar has initialized before reconnecting') time.sleep(10) runner.run(Component) else: logging.error('crossbar host port 8080 not available...') except RuntimeError as error: logging.error(error) except ConnectionRefusedError as error: logging.error(error) except ConnectionError as error: logging.error(error) except KeyboardInterrupt: logging.info('User initiated shutdown') loop = asyncio.get_event_loop() loop.stop() sys.exit(1) connect_attempt += 1
[ "def", "reconnect", "(", "self", ")", ":", "connect_attempt", "=", "0", "max_retries", "=", "self", ".", "config", "[", "'max_reconnect_retries'", "]", "logging", ".", "info", "(", "'attempting to reconnect to crossbar'", ")", "runner", "=", "self", ".", "setup_runner", "(", ")", "while", "True", ":", "if", "connect_attempt", "==", "max_retries", ":", "logging", ".", "info", "(", "'max retries reached; stopping service'", ")", "sys", ".", "exit", "(", "1", ")", "self", ".", "check_event_loop", "(", ")", "try", ":", "logging", ".", "info", "(", "'waiting 5 seconds'", ")", "time", ".", "sleep", "(", "5", ")", "if", "self", ".", "check_transport_host", "(", ")", ":", "logging", ".", "info", "(", "'waiting 10 seconds to ensure that crossbar has initialized before reconnecting'", ")", "time", ".", "sleep", "(", "10", ")", "runner", ".", "run", "(", "Component", ")", "else", ":", "logging", ".", "error", "(", "'crossbar host port 8080 not available...'", ")", "except", "RuntimeError", "as", "error", ":", "logging", ".", "error", "(", "error", ")", "except", "ConnectionRefusedError", "as", "error", ":", "logging", ".", "error", "(", "error", ")", "except", "ConnectionError", "as", "error", ":", "logging", ".", "error", "(", "error", ")", "except", "KeyboardInterrupt", ":", "logging", ".", "info", "(", "'User initiated shutdown'", ")", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "loop", ".", "stop", "(", ")", "sys", ".", "exit", "(", "1", ")", "connect_attempt", "+=", "1" ]
Handle reconnect logic if connection to crossbar is lost
[ "Handle", "reconnect", "logic", "if", "connection", "to", "crossbar", "is", "lost" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/crossbar.py#L199-L235
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/reduceUselessAssignments.py
reduceUselessAssignments
def reduceUselessAssignments(root: LNode): """ Remove assignments if it is only a direct connection and can be replaced with direct link """ for n in root.children: if n.children: reduceUselessAssignments(n) do_update = False for n in root.children: if isinstance(n.originObj, Assignment)\ and not n.originObj.indexes\ and len(n.west) == 1: src = n.originObj.src if isinstance(src, RtlSignalBase) and src.hidden: continue if not do_update: nodes = set(root.children) do_update = True nodes.remove(n) srcPorts = [] dstPorts = [] edgesToRemove = [] inP = getSinglePort(n.west) outP = getSinglePort(n.east) for e in inP.incomingEdges: sPort = e.src srcPorts.append((sPort, e.originObj)) edgesToRemove.append(e) for e in outP.outgoingEdges: dPort = e.dst dstPorts.append(dPort) edgesToRemove.append(e) for e in edgesToRemove: e.remove() for srcPort, originObj in srcPorts: for dstPort in dstPorts: root.addEdge(srcPort, dstPort, originObj=originObj) if do_update: root.children = list(nodes)
python
def reduceUselessAssignments(root: LNode): """ Remove assignments if it is only a direct connection and can be replaced with direct link """ for n in root.children: if n.children: reduceUselessAssignments(n) do_update = False for n in root.children: if isinstance(n.originObj, Assignment)\ and not n.originObj.indexes\ and len(n.west) == 1: src = n.originObj.src if isinstance(src, RtlSignalBase) and src.hidden: continue if not do_update: nodes = set(root.children) do_update = True nodes.remove(n) srcPorts = [] dstPorts = [] edgesToRemove = [] inP = getSinglePort(n.west) outP = getSinglePort(n.east) for e in inP.incomingEdges: sPort = e.src srcPorts.append((sPort, e.originObj)) edgesToRemove.append(e) for e in outP.outgoingEdges: dPort = e.dst dstPorts.append(dPort) edgesToRemove.append(e) for e in edgesToRemove: e.remove() for srcPort, originObj in srcPorts: for dstPort in dstPorts: root.addEdge(srcPort, dstPort, originObj=originObj) if do_update: root.children = list(nodes)
[ "def", "reduceUselessAssignments", "(", "root", ":", "LNode", ")", ":", "for", "n", "in", "root", ".", "children", ":", "if", "n", ".", "children", ":", "reduceUselessAssignments", "(", "n", ")", "do_update", "=", "False", "for", "n", "in", "root", ".", "children", ":", "if", "isinstance", "(", "n", ".", "originObj", ",", "Assignment", ")", "and", "not", "n", ".", "originObj", ".", "indexes", "and", "len", "(", "n", ".", "west", ")", "==", "1", ":", "src", "=", "n", ".", "originObj", ".", "src", "if", "isinstance", "(", "src", ",", "RtlSignalBase", ")", "and", "src", ".", "hidden", ":", "continue", "if", "not", "do_update", ":", "nodes", "=", "set", "(", "root", ".", "children", ")", "do_update", "=", "True", "nodes", ".", "remove", "(", "n", ")", "srcPorts", "=", "[", "]", "dstPorts", "=", "[", "]", "edgesToRemove", "=", "[", "]", "inP", "=", "getSinglePort", "(", "n", ".", "west", ")", "outP", "=", "getSinglePort", "(", "n", ".", "east", ")", "for", "e", "in", "inP", ".", "incomingEdges", ":", "sPort", "=", "e", ".", "src", "srcPorts", ".", "append", "(", "(", "sPort", ",", "e", ".", "originObj", ")", ")", "edgesToRemove", ".", "append", "(", "e", ")", "for", "e", "in", "outP", ".", "outgoingEdges", ":", "dPort", "=", "e", ".", "dst", "dstPorts", ".", "append", "(", "dPort", ")", "edgesToRemove", ".", "append", "(", "e", ")", "for", "e", "in", "edgesToRemove", ":", "e", ".", "remove", "(", ")", "for", "srcPort", ",", "originObj", "in", "srcPorts", ":", "for", "dstPort", "in", "dstPorts", ":", "root", ".", "addEdge", "(", "srcPort", ",", "dstPort", ",", "originObj", "=", "originObj", ")", "if", "do_update", ":", "root", ".", "children", "=", "list", "(", "nodes", ")" ]
Remove assignments if it is only a direct connection and can be replaced with direct link
[ "Remove", "assignments", "if", "it", "is", "only", "a", "direct", "connection", "and", "can", "be", "replaced", "with", "direct", "link" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/reduceUselessAssignments.py#L7-L55
train
gofed/gofedlib
gofedlib/go/apidiff/apidiff.py
GoApiDiff._constructTypeQualifiedName
def _constructTypeQualifiedName(self, type, full=False): """ For given type construct its full qualified name. AnonymousField = [ "*" ] TypeName . TypeName = identifier | QualifiedIdent . QualifiedIdent = PackageName "." identifier . """ t = type["type"] if t == TYPE_IDENT: return type["def"] elif t == TYPE_POINTER: return self._constructTypeQualifiedName(type["def"]) elif t == TYPE_SELECTOR: if full: return "%s.%s" % (type["prefix"], type["item"]) else: return type["item"] else: raise ValueError("Type %s can not be used for FQN" % t)
python
def _constructTypeQualifiedName(self, type, full=False): """ For given type construct its full qualified name. AnonymousField = [ "*" ] TypeName . TypeName = identifier | QualifiedIdent . QualifiedIdent = PackageName "." identifier . """ t = type["type"] if t == TYPE_IDENT: return type["def"] elif t == TYPE_POINTER: return self._constructTypeQualifiedName(type["def"]) elif t == TYPE_SELECTOR: if full: return "%s.%s" % (type["prefix"], type["item"]) else: return type["item"] else: raise ValueError("Type %s can not be used for FQN" % t)
[ "def", "_constructTypeQualifiedName", "(", "self", ",", "type", ",", "full", "=", "False", ")", ":", "t", "=", "type", "[", "\"type\"", "]", "if", "t", "==", "TYPE_IDENT", ":", "return", "type", "[", "\"def\"", "]", "elif", "t", "==", "TYPE_POINTER", ":", "return", "self", ".", "_constructTypeQualifiedName", "(", "type", "[", "\"def\"", "]", ")", "elif", "t", "==", "TYPE_SELECTOR", ":", "if", "full", ":", "return", "\"%s.%s\"", "%", "(", "type", "[", "\"prefix\"", "]", ",", "type", "[", "\"item\"", "]", ")", "else", ":", "return", "type", "[", "\"item\"", "]", "else", ":", "raise", "ValueError", "(", "\"Type %s can not be used for FQN\"", "%", "t", ")" ]
For given type construct its full qualified name. AnonymousField = [ "*" ] TypeName . TypeName = identifier | QualifiedIdent . QualifiedIdent = PackageName "." identifier .
[ "For", "given", "type", "construct", "its", "full", "qualified", "name", "." ]
0674c248fe3d8706f98f912996b65af469f96b10
https://github.com/gofed/gofedlib/blob/0674c248fe3d8706f98f912996b65af469f96b10/gofedlib/go/apidiff/apidiff.py#L231-L250
train
255BITS/hyperchamber
examples/shared/resize_image_patch.py
crop_to_bounding_box
def crop_to_bounding_box(image, offset_height, offset_width, target_height, target_width, dynamic_shape=False): """Crops an image to a specified bounding box. This op cuts a rectangular part out of `image`. The top-left corner of the returned image is at `offset_height, offset_width` in `image`, and its lower-right corner is at `offset_height + target_height, offset_width + target_width`. Args: image: 3-D tensor with shape `[height, width, channels]` offset_height: Vertical coordinate of the top-left corner of the result in the input. offset_width: Horizontal coordinate of the top-left corner of the result in the input. target_height: Height of the result. target_width: Width of the result. dynamic_shape: Whether the input image has undertermined shape. If set to `True`, shape information will be retrieved at run time. Default to `False`. Returns: 3-D tensor of image with shape `[target_height, target_width, channels]` Raises: ValueError: If the shape of `image` is incompatible with the `offset_*` or `target_*` arguments, and `dynamic_shape` is set to `False`. """ image = ops.convert_to_tensor(image, name='image') _Check3DImage(image, require_static=(not dynamic_shape)) height, width, _ = _ImageDimensions(image, dynamic_shape=dynamic_shape) if not dynamic_shape: if offset_width < 0: raise ValueError('offset_width must be >= 0.') if offset_height < 0: raise ValueError('offset_height must be >= 0.') if width < (target_width + offset_width): raise ValueError('width must be >= target + offset.') if height < (target_height + offset_height): raise ValueError('height must be >= target + offset.') cropped = array_ops.slice(image, array_ops.pack([offset_height, offset_width, 0]), array_ops.pack([target_height, target_width, -1])) return cropped
python
def crop_to_bounding_box(image, offset_height, offset_width, target_height, target_width, dynamic_shape=False): """Crops an image to a specified bounding box. This op cuts a rectangular part out of `image`. The top-left corner of the returned image is at `offset_height, offset_width` in `image`, and its lower-right corner is at `offset_height + target_height, offset_width + target_width`. Args: image: 3-D tensor with shape `[height, width, channels]` offset_height: Vertical coordinate of the top-left corner of the result in the input. offset_width: Horizontal coordinate of the top-left corner of the result in the input. target_height: Height of the result. target_width: Width of the result. dynamic_shape: Whether the input image has undertermined shape. If set to `True`, shape information will be retrieved at run time. Default to `False`. Returns: 3-D tensor of image with shape `[target_height, target_width, channels]` Raises: ValueError: If the shape of `image` is incompatible with the `offset_*` or `target_*` arguments, and `dynamic_shape` is set to `False`. """ image = ops.convert_to_tensor(image, name='image') _Check3DImage(image, require_static=(not dynamic_shape)) height, width, _ = _ImageDimensions(image, dynamic_shape=dynamic_shape) if not dynamic_shape: if offset_width < 0: raise ValueError('offset_width must be >= 0.') if offset_height < 0: raise ValueError('offset_height must be >= 0.') if width < (target_width + offset_width): raise ValueError('width must be >= target + offset.') if height < (target_height + offset_height): raise ValueError('height must be >= target + offset.') cropped = array_ops.slice(image, array_ops.pack([offset_height, offset_width, 0]), array_ops.pack([target_height, target_width, -1])) return cropped
[ "def", "crop_to_bounding_box", "(", "image", ",", "offset_height", ",", "offset_width", ",", "target_height", ",", "target_width", ",", "dynamic_shape", "=", "False", ")", ":", "image", "=", "ops", ".", "convert_to_tensor", "(", "image", ",", "name", "=", "'image'", ")", "_Check3DImage", "(", "image", ",", "require_static", "=", "(", "not", "dynamic_shape", ")", ")", "height", ",", "width", ",", "_", "=", "_ImageDimensions", "(", "image", ",", "dynamic_shape", "=", "dynamic_shape", ")", "if", "not", "dynamic_shape", ":", "if", "offset_width", "<", "0", ":", "raise", "ValueError", "(", "'offset_width must be >= 0.'", ")", "if", "offset_height", "<", "0", ":", "raise", "ValueError", "(", "'offset_height must be >= 0.'", ")", "if", "width", "<", "(", "target_width", "+", "offset_width", ")", ":", "raise", "ValueError", "(", "'width must be >= target + offset.'", ")", "if", "height", "<", "(", "target_height", "+", "offset_height", ")", ":", "raise", "ValueError", "(", "'height must be >= target + offset.'", ")", "cropped", "=", "array_ops", ".", "slice", "(", "image", ",", "array_ops", ".", "pack", "(", "[", "offset_height", ",", "offset_width", ",", "0", "]", ")", ",", "array_ops", ".", "pack", "(", "[", "target_height", ",", "target_width", ",", "-", "1", "]", ")", ")", "return", "cropped" ]
Crops an image to a specified bounding box. This op cuts a rectangular part out of `image`. The top-left corner of the returned image is at `offset_height, offset_width` in `image`, and its lower-right corner is at `offset_height + target_height, offset_width + target_width`. Args: image: 3-D tensor with shape `[height, width, channels]` offset_height: Vertical coordinate of the top-left corner of the result in the input. offset_width: Horizontal coordinate of the top-left corner of the result in the input. target_height: Height of the result. target_width: Width of the result. dynamic_shape: Whether the input image has undertermined shape. If set to `True`, shape information will be retrieved at run time. Default to `False`. Returns: 3-D tensor of image with shape `[target_height, target_width, channels]` Raises: ValueError: If the shape of `image` is incompatible with the `offset_*` or `target_*` arguments, and `dynamic_shape` is set to `False`.
[ "Crops", "an", "image", "to", "a", "specified", "bounding", "box", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/resize_image_patch.py#L14-L61
train
255BITS/hyperchamber
examples/shared/resize_image_patch.py
pad_to_bounding_box
def pad_to_bounding_box(image, offset_height, offset_width, target_height, target_width, dynamic_shape=False): """Pad `image` with zeros to the specified `height` and `width`. Adds `offset_height` rows of zeros on top, `offset_width` columns of zeros on the left, and then pads the image on the bottom and right with zeros until it has dimensions `target_height`, `target_width`. This op does nothing if `offset_*` is zero and the image already has size `target_height` by `target_width`. Args: image: 3-D tensor with shape `[height, width, channels]` offset_height: Number of rows of zeros to add on top. offset_width: Number of columns of zeros to add on the left. target_height: Height of output image. target_width: Width of output image. dynamic_shape: Whether the input image has undertermined shape. If set to `True`, shape information will be retrieved at run time. Default to `False`. Returns: 3-D tensor of shape `[target_height, target_width, channels]` Raises: ValueError: If the shape of `image` is incompatible with the `offset_*` or `target_*` arguments, and `dynamic_shape` is set to `False`. """ image = ops.convert_to_tensor(image, name='image') _Check3DImage(image, require_static=(not dynamic_shape)) height, width, depth = _ImageDimensions(image, dynamic_shape=dynamic_shape) after_padding_width = target_width - offset_width - width after_padding_height = target_height - offset_height - height if not dynamic_shape: if target_width < width: raise ValueError('target_width must be >= width') if target_height < height: raise ValueError('target_height must be >= height') if after_padding_width < 0: raise ValueError('target_width not possible given ' 'offset_width and image width') if after_padding_height < 0: raise ValueError('target_height not possible given ' 'offset_height and image height') # Do not pad on the depth dimensions. if (dynamic_shape or offset_width or offset_height or after_padding_width or after_padding_height): paddings = array_ops.reshape( array_ops.pack([offset_height, after_padding_height, offset_width, after_padding_width, 0, 0]), [3, 2]) padded = array_ops.pad(image, paddings) if not dynamic_shape: padded.set_shape([target_height, target_width, depth]) else: padded = image return padded
python
def pad_to_bounding_box(image, offset_height, offset_width, target_height, target_width, dynamic_shape=False): """Pad `image` with zeros to the specified `height` and `width`. Adds `offset_height` rows of zeros on top, `offset_width` columns of zeros on the left, and then pads the image on the bottom and right with zeros until it has dimensions `target_height`, `target_width`. This op does nothing if `offset_*` is zero and the image already has size `target_height` by `target_width`. Args: image: 3-D tensor with shape `[height, width, channels]` offset_height: Number of rows of zeros to add on top. offset_width: Number of columns of zeros to add on the left. target_height: Height of output image. target_width: Width of output image. dynamic_shape: Whether the input image has undertermined shape. If set to `True`, shape information will be retrieved at run time. Default to `False`. Returns: 3-D tensor of shape `[target_height, target_width, channels]` Raises: ValueError: If the shape of `image` is incompatible with the `offset_*` or `target_*` arguments, and `dynamic_shape` is set to `False`. """ image = ops.convert_to_tensor(image, name='image') _Check3DImage(image, require_static=(not dynamic_shape)) height, width, depth = _ImageDimensions(image, dynamic_shape=dynamic_shape) after_padding_width = target_width - offset_width - width after_padding_height = target_height - offset_height - height if not dynamic_shape: if target_width < width: raise ValueError('target_width must be >= width') if target_height < height: raise ValueError('target_height must be >= height') if after_padding_width < 0: raise ValueError('target_width not possible given ' 'offset_width and image width') if after_padding_height < 0: raise ValueError('target_height not possible given ' 'offset_height and image height') # Do not pad on the depth dimensions. if (dynamic_shape or offset_width or offset_height or after_padding_width or after_padding_height): paddings = array_ops.reshape( array_ops.pack([offset_height, after_padding_height, offset_width, after_padding_width, 0, 0]), [3, 2]) padded = array_ops.pad(image, paddings) if not dynamic_shape: padded.set_shape([target_height, target_width, depth]) else: padded = image return padded
[ "def", "pad_to_bounding_box", "(", "image", ",", "offset_height", ",", "offset_width", ",", "target_height", ",", "target_width", ",", "dynamic_shape", "=", "False", ")", ":", "image", "=", "ops", ".", "convert_to_tensor", "(", "image", ",", "name", "=", "'image'", ")", "_Check3DImage", "(", "image", ",", "require_static", "=", "(", "not", "dynamic_shape", ")", ")", "height", ",", "width", ",", "depth", "=", "_ImageDimensions", "(", "image", ",", "dynamic_shape", "=", "dynamic_shape", ")", "after_padding_width", "=", "target_width", "-", "offset_width", "-", "width", "after_padding_height", "=", "target_height", "-", "offset_height", "-", "height", "if", "not", "dynamic_shape", ":", "if", "target_width", "<", "width", ":", "raise", "ValueError", "(", "'target_width must be >= width'", ")", "if", "target_height", "<", "height", ":", "raise", "ValueError", "(", "'target_height must be >= height'", ")", "if", "after_padding_width", "<", "0", ":", "raise", "ValueError", "(", "'target_width not possible given '", "'offset_width and image width'", ")", "if", "after_padding_height", "<", "0", ":", "raise", "ValueError", "(", "'target_height not possible given '", "'offset_height and image height'", ")", "# Do not pad on the depth dimensions.", "if", "(", "dynamic_shape", "or", "offset_width", "or", "offset_height", "or", "after_padding_width", "or", "after_padding_height", ")", ":", "paddings", "=", "array_ops", ".", "reshape", "(", "array_ops", ".", "pack", "(", "[", "offset_height", ",", "after_padding_height", ",", "offset_width", ",", "after_padding_width", ",", "0", ",", "0", "]", ")", ",", "[", "3", ",", "2", "]", ")", "padded", "=", "array_ops", ".", "pad", "(", "image", ",", "paddings", ")", "if", "not", "dynamic_shape", ":", "padded", ".", "set_shape", "(", "[", "target_height", ",", "target_width", ",", "depth", "]", ")", "else", ":", "padded", "=", "image", "return", "padded" ]
Pad `image` with zeros to the specified `height` and `width`. Adds `offset_height` rows of zeros on top, `offset_width` columns of zeros on the left, and then pads the image on the bottom and right with zeros until it has dimensions `target_height`, `target_width`. This op does nothing if `offset_*` is zero and the image already has size `target_height` by `target_width`. Args: image: 3-D tensor with shape `[height, width, channels]` offset_height: Number of rows of zeros to add on top. offset_width: Number of columns of zeros to add on the left. target_height: Height of output image. target_width: Width of output image. dynamic_shape: Whether the input image has undertermined shape. If set to `True`, shape information will be retrieved at run time. Default to `False`. Returns: 3-D tensor of shape `[target_height, target_width, channels]` Raises: ValueError: If the shape of `image` is incompatible with the `offset_*` or `target_*` arguments, and `dynamic_shape` is set to `False`.
[ "Pad", "image", "with", "zeros", "to", "the", "specified", "height", "and", "width", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/resize_image_patch.py#L66-L127
train
unt-libraries/pyuntl
pyuntl/quality.py
determine_completeness
def determine_completeness(py_untl): """Take a Python untl and calculate the completeness. Completeness is based on this: metadata_quality.rst documentation. Returns a float 0.0 - 1.0. """ # Default values for the completeness dictionary. completeness_dict = { 'title': {'present': False, 'weight': 10, }, 'description': {'present': False, 'weight': 1, }, 'language': {'present': False, 'weight': 1, }, 'collection': {'present': False, 'weight': 10, }, 'institution': {'present': False, 'weight': 10, }, 'resourceType': {'present': False, 'weight': 5, }, 'format': {'present': False, 'weight': 1, }, 'subject': {'present': False, 'weight': 1, }, 'meta': {'present': False, 'weight': 20, }, } total_points = sum(item['weight'] for item in completeness_dict.values()) py_untl_object_score = 0.0 # Iterate through the attributes of the pyuntl record. # This loop will toggle the Boolean for scoring. for i in py_untl.children: # Process attribute that is scorable and has content. if i.tag in PYUNTL_COMPLETENESS_SCORED_ATTRIBUTES: if i.content: content = i.content.lower() # Try and match against new default placeholders. match = bool(DEFAULT_VALUE_REGEX.search(content)) # The content is not a legacy placeholder. if content not in COMMON_DEFAULT_ATTRIBUTE_VALUES and not match: # Only consider <meta qualifier="system"> records. if i.tag == 'meta': if i.qualifier == 'system': completeness_dict['%s' % i.tag]['present'] = True else: completeness_dict['%s' % i.tag]['present'] = True # Get total score of the pyuntl object. for k, v in completeness_dict.iteritems(): # If presence was toggled true, adjust score based on weight. if v['present']: py_untl_object_score += completeness_dict[k]['weight'] # Calculate the float score completeness. completeness = py_untl_object_score / total_points return completeness
python
def determine_completeness(py_untl): """Take a Python untl and calculate the completeness. Completeness is based on this: metadata_quality.rst documentation. Returns a float 0.0 - 1.0. """ # Default values for the completeness dictionary. completeness_dict = { 'title': {'present': False, 'weight': 10, }, 'description': {'present': False, 'weight': 1, }, 'language': {'present': False, 'weight': 1, }, 'collection': {'present': False, 'weight': 10, }, 'institution': {'present': False, 'weight': 10, }, 'resourceType': {'present': False, 'weight': 5, }, 'format': {'present': False, 'weight': 1, }, 'subject': {'present': False, 'weight': 1, }, 'meta': {'present': False, 'weight': 20, }, } total_points = sum(item['weight'] for item in completeness_dict.values()) py_untl_object_score = 0.0 # Iterate through the attributes of the pyuntl record. # This loop will toggle the Boolean for scoring. for i in py_untl.children: # Process attribute that is scorable and has content. if i.tag in PYUNTL_COMPLETENESS_SCORED_ATTRIBUTES: if i.content: content = i.content.lower() # Try and match against new default placeholders. match = bool(DEFAULT_VALUE_REGEX.search(content)) # The content is not a legacy placeholder. if content not in COMMON_DEFAULT_ATTRIBUTE_VALUES and not match: # Only consider <meta qualifier="system"> records. if i.tag == 'meta': if i.qualifier == 'system': completeness_dict['%s' % i.tag]['present'] = True else: completeness_dict['%s' % i.tag]['present'] = True # Get total score of the pyuntl object. for k, v in completeness_dict.iteritems(): # If presence was toggled true, adjust score based on weight. if v['present']: py_untl_object_score += completeness_dict[k]['weight'] # Calculate the float score completeness. completeness = py_untl_object_score / total_points return completeness
[ "def", "determine_completeness", "(", "py_untl", ")", ":", "# Default values for the completeness dictionary.", "completeness_dict", "=", "{", "'title'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "10", ",", "}", ",", "'description'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "1", ",", "}", ",", "'language'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "1", ",", "}", ",", "'collection'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "10", ",", "}", ",", "'institution'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "10", ",", "}", ",", "'resourceType'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "5", ",", "}", ",", "'format'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "1", ",", "}", ",", "'subject'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "1", ",", "}", ",", "'meta'", ":", "{", "'present'", ":", "False", ",", "'weight'", ":", "20", ",", "}", ",", "}", "total_points", "=", "sum", "(", "item", "[", "'weight'", "]", "for", "item", "in", "completeness_dict", ".", "values", "(", ")", ")", "py_untl_object_score", "=", "0.0", "# Iterate through the attributes of the pyuntl record.", "# This loop will toggle the Boolean for scoring.", "for", "i", "in", "py_untl", ".", "children", ":", "# Process attribute that is scorable and has content.", "if", "i", ".", "tag", "in", "PYUNTL_COMPLETENESS_SCORED_ATTRIBUTES", ":", "if", "i", ".", "content", ":", "content", "=", "i", ".", "content", ".", "lower", "(", ")", "# Try and match against new default placeholders.", "match", "=", "bool", "(", "DEFAULT_VALUE_REGEX", ".", "search", "(", "content", ")", ")", "# The content is not a legacy placeholder.", "if", "content", "not", "in", "COMMON_DEFAULT_ATTRIBUTE_VALUES", "and", "not", "match", ":", "# Only consider <meta qualifier=\"system\"> records.", "if", "i", ".", "tag", "==", "'meta'", ":", "if", "i", ".", "qualifier", "==", "'system'", ":", "completeness_dict", "[", "'%s'", "%", "i", ".", "tag", "]", "[", "'present'", "]", "=", "True", "else", ":", "completeness_dict", "[", "'%s'", "%", "i", ".", "tag", "]", "[", "'present'", "]", "=", "True", "# Get total score of the pyuntl object.", "for", "k", ",", "v", "in", "completeness_dict", ".", "iteritems", "(", ")", ":", "# If presence was toggled true, adjust score based on weight.", "if", "v", "[", "'present'", "]", ":", "py_untl_object_score", "+=", "completeness_dict", "[", "k", "]", "[", "'weight'", "]", "# Calculate the float score completeness.", "completeness", "=", "py_untl_object_score", "/", "total_points", "return", "completeness" ]
Take a Python untl and calculate the completeness. Completeness is based on this: metadata_quality.rst documentation. Returns a float 0.0 - 1.0.
[ "Take", "a", "Python", "untl", "and", "calculate", "the", "completeness", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/quality.py#L27-L73
train
Robpol86/Flask-JIRA-Helper
flask_jira.py
JIRA.init_app
def init_app(self, app, config_prefix=None): """Actual method to read JIRA settings from app configuration and initialize the JIRA instance. Positional arguments: app -- Flask application instance. Keyword arguments: config_prefix -- Prefix used in config key names in the Flask app's configuration. Useful for applications which maintain two authenticated sessions with a JIRA server. Default is 'JIRA'. Will be converted to upper case. Examples: JIRA_SYSTEM_SERVER = 'http://jira.mycompany.com' JIRA_SYSTEM_USER = 'system_account' JIRA_SERVER = 'http://jira.mycompany.com' JIRA_TOKEN = '<token for oauthing users>' """ # Restore self.kill_session(). self.kill_session = self.original_kill_session # Normalize the prefix and add this instance to app.extensions. config_prefix = (config_prefix or 'JIRA').rstrip('_').upper() if not hasattr(app, 'extensions'): app.extensions = dict() if config_prefix.lower() in app.extensions: raise ValueError('Already registered config prefix {0!r}.'.format(config_prefix)) app.extensions[config_prefix.lower()] = _JIRAState(self, app) # Read config. args = read_config(app.config, config_prefix) # Initialize fully. try: super(JIRA, self).__init__(**args) except ConnectionError: if not app.config.get('{0}_IGNORE_INITIAL_CONNECTION_FAILURE'.format(config_prefix)): raise LOG.exception('Ignoring ConnectionError.')
python
def init_app(self, app, config_prefix=None): """Actual method to read JIRA settings from app configuration and initialize the JIRA instance. Positional arguments: app -- Flask application instance. Keyword arguments: config_prefix -- Prefix used in config key names in the Flask app's configuration. Useful for applications which maintain two authenticated sessions with a JIRA server. Default is 'JIRA'. Will be converted to upper case. Examples: JIRA_SYSTEM_SERVER = 'http://jira.mycompany.com' JIRA_SYSTEM_USER = 'system_account' JIRA_SERVER = 'http://jira.mycompany.com' JIRA_TOKEN = '<token for oauthing users>' """ # Restore self.kill_session(). self.kill_session = self.original_kill_session # Normalize the prefix and add this instance to app.extensions. config_prefix = (config_prefix or 'JIRA').rstrip('_').upper() if not hasattr(app, 'extensions'): app.extensions = dict() if config_prefix.lower() in app.extensions: raise ValueError('Already registered config prefix {0!r}.'.format(config_prefix)) app.extensions[config_prefix.lower()] = _JIRAState(self, app) # Read config. args = read_config(app.config, config_prefix) # Initialize fully. try: super(JIRA, self).__init__(**args) except ConnectionError: if not app.config.get('{0}_IGNORE_INITIAL_CONNECTION_FAILURE'.format(config_prefix)): raise LOG.exception('Ignoring ConnectionError.')
[ "def", "init_app", "(", "self", ",", "app", ",", "config_prefix", "=", "None", ")", ":", "# Restore self.kill_session().", "self", ".", "kill_session", "=", "self", ".", "original_kill_session", "# Normalize the prefix and add this instance to app.extensions.", "config_prefix", "=", "(", "config_prefix", "or", "'JIRA'", ")", ".", "rstrip", "(", "'_'", ")", ".", "upper", "(", ")", "if", "not", "hasattr", "(", "app", ",", "'extensions'", ")", ":", "app", ".", "extensions", "=", "dict", "(", ")", "if", "config_prefix", ".", "lower", "(", ")", "in", "app", ".", "extensions", ":", "raise", "ValueError", "(", "'Already registered config prefix {0!r}.'", ".", "format", "(", "config_prefix", ")", ")", "app", ".", "extensions", "[", "config_prefix", ".", "lower", "(", ")", "]", "=", "_JIRAState", "(", "self", ",", "app", ")", "# Read config.", "args", "=", "read_config", "(", "app", ".", "config", ",", "config_prefix", ")", "# Initialize fully.", "try", ":", "super", "(", "JIRA", ",", "self", ")", ".", "__init__", "(", "*", "*", "args", ")", "except", "ConnectionError", ":", "if", "not", "app", ".", "config", ".", "get", "(", "'{0}_IGNORE_INITIAL_CONNECTION_FAILURE'", ".", "format", "(", "config_prefix", ")", ")", ":", "raise", "LOG", ".", "exception", "(", "'Ignoring ConnectionError.'", ")" ]
Actual method to read JIRA settings from app configuration and initialize the JIRA instance. Positional arguments: app -- Flask application instance. Keyword arguments: config_prefix -- Prefix used in config key names in the Flask app's configuration. Useful for applications which maintain two authenticated sessions with a JIRA server. Default is 'JIRA'. Will be converted to upper case. Examples: JIRA_SYSTEM_SERVER = 'http://jira.mycompany.com' JIRA_SYSTEM_USER = 'system_account' JIRA_SERVER = 'http://jira.mycompany.com' JIRA_TOKEN = '<token for oauthing users>'
[ "Actual", "method", "to", "read", "JIRA", "settings", "from", "app", "configuration", "and", "initialize", "the", "JIRA", "instance", "." ]
646d32e6fccee8ab7d55e6401b9fbcb9cf52dc7a
https://github.com/Robpol86/Flask-JIRA-Helper/blob/646d32e6fccee8ab7d55e6401b9fbcb9cf52dc7a/flask_jira.py#L114-L149
train
gebn/wood
wood/util.py
zip_dict
def zip_dict(a: Dict[str, A], b: Dict[str, B]) \ -> Dict[str, Tuple[Optional[A], Optional[B]]]: """ Combine the values within two dictionaries by key. :param a: The first dictionary. :param b: The second dictionary. :return: A dictionary containing all keys that appear in the union of a and b. Values are pairs where the first part is a's value for the key, and right second part b's value. """ return {key: (a.get(key), b.get(key)) for key in a.keys() | b.keys()}
python
def zip_dict(a: Dict[str, A], b: Dict[str, B]) \ -> Dict[str, Tuple[Optional[A], Optional[B]]]: """ Combine the values within two dictionaries by key. :param a: The first dictionary. :param b: The second dictionary. :return: A dictionary containing all keys that appear in the union of a and b. Values are pairs where the first part is a's value for the key, and right second part b's value. """ return {key: (a.get(key), b.get(key)) for key in a.keys() | b.keys()}
[ "def", "zip_dict", "(", "a", ":", "Dict", "[", "str", ",", "A", "]", ",", "b", ":", "Dict", "[", "str", ",", "B", "]", ")", "->", "Dict", "[", "str", ",", "Tuple", "[", "Optional", "[", "A", "]", ",", "Optional", "[", "B", "]", "]", "]", ":", "return", "{", "key", ":", "(", "a", ".", "get", "(", "key", ")", ",", "b", ".", "get", "(", "key", ")", ")", "for", "key", "in", "a", ".", "keys", "(", ")", "|", "b", ".", "keys", "(", ")", "}" ]
Combine the values within two dictionaries by key. :param a: The first dictionary. :param b: The second dictionary. :return: A dictionary containing all keys that appear in the union of a and b. Values are pairs where the first part is a's value for the key, and right second part b's value.
[ "Combine", "the", "values", "within", "two", "dictionaries", "by", "key", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/util.py#L11-L22
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/flattenPorts.py
flattenPort
def flattenPort(port: LPort): """ Flatten hierarchical ports """ yield port if port.children: for ch in port.children: yield from flattenPort(ch) port.children.clear()
python
def flattenPort(port: LPort): """ Flatten hierarchical ports """ yield port if port.children: for ch in port.children: yield from flattenPort(ch) port.children.clear()
[ "def", "flattenPort", "(", "port", ":", "LPort", ")", ":", "yield", "port", "if", "port", ".", "children", ":", "for", "ch", "in", "port", ".", "children", ":", "yield", "from", "flattenPort", "(", "ch", ")", "port", ".", "children", ".", "clear", "(", ")" ]
Flatten hierarchical ports
[ "Flatten", "hierarchical", "ports" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/flattenPorts.py#L7-L15
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/flattenPorts.py
_flattenPortsSide
def _flattenPortsSide(side: List[LNode]) -> List[LNode]: """ Flatten hierarchical ports on node side """ new_side = [] for i in side: for new_p in flattenPort(i): new_side.append(new_p) return new_side
python
def _flattenPortsSide(side: List[LNode]) -> List[LNode]: """ Flatten hierarchical ports on node side """ new_side = [] for i in side: for new_p in flattenPort(i): new_side.append(new_p) return new_side
[ "def", "_flattenPortsSide", "(", "side", ":", "List", "[", "LNode", "]", ")", "->", "List", "[", "LNode", "]", ":", "new_side", "=", "[", "]", "for", "i", "in", "side", ":", "for", "new_p", "in", "flattenPort", "(", "i", ")", ":", "new_side", ".", "append", "(", "new_p", ")", "return", "new_side" ]
Flatten hierarchical ports on node side
[ "Flatten", "hierarchical", "ports", "on", "node", "side" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/flattenPorts.py#L18-L26
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/flattenPorts.py
flattenPorts
def flattenPorts(root: LNode): """ Flatten ports to simplify layout generation :attention: children property is destroyed, parent property stays same """ for u in root.children: u.west = _flattenPortsSide(u.west) u.east = _flattenPortsSide(u.east) u.north = _flattenPortsSide(u.north) u.south = _flattenPortsSide(u.south)
python
def flattenPorts(root: LNode): """ Flatten ports to simplify layout generation :attention: children property is destroyed, parent property stays same """ for u in root.children: u.west = _flattenPortsSide(u.west) u.east = _flattenPortsSide(u.east) u.north = _flattenPortsSide(u.north) u.south = _flattenPortsSide(u.south)
[ "def", "flattenPorts", "(", "root", ":", "LNode", ")", ":", "for", "u", "in", "root", ".", "children", ":", "u", ".", "west", "=", "_flattenPortsSide", "(", "u", ".", "west", ")", "u", ".", "east", "=", "_flattenPortsSide", "(", "u", ".", "east", ")", "u", ".", "north", "=", "_flattenPortsSide", "(", "u", ".", "north", ")", "u", ".", "south", "=", "_flattenPortsSide", "(", "u", ".", "south", ")" ]
Flatten ports to simplify layout generation :attention: children property is destroyed, parent property stays same
[ "Flatten", "ports", "to", "simplify", "layout", "generation" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/flattenPorts.py#L29-L39
train
eventifyio/eventify
eventify/__init__.py
Eventify.set_missing_defaults
def set_missing_defaults(self): """ Ensure that minimal configuration is setup and set defaults for missing values """ if 'pub_options' not in self.config: self.config['pub_options'] = { 'acknowledge': True, 'retain': True } if 'sub_options' not in self.config: self.config['sub_options'] = { 'get_retained': False } if 'subscribed_topics' not in self.config: self.config['subscribed_topics'] = None if 'replay_events' not in self.config: self.config['replay_events'] = False if 'max_reconnect_retries' not in self.config: self.config['max_reconnect_retries'] = 10
python
def set_missing_defaults(self): """ Ensure that minimal configuration is setup and set defaults for missing values """ if 'pub_options' not in self.config: self.config['pub_options'] = { 'acknowledge': True, 'retain': True } if 'sub_options' not in self.config: self.config['sub_options'] = { 'get_retained': False } if 'subscribed_topics' not in self.config: self.config['subscribed_topics'] = None if 'replay_events' not in self.config: self.config['replay_events'] = False if 'max_reconnect_retries' not in self.config: self.config['max_reconnect_retries'] = 10
[ "def", "set_missing_defaults", "(", "self", ")", ":", "if", "'pub_options'", "not", "in", "self", ".", "config", ":", "self", ".", "config", "[", "'pub_options'", "]", "=", "{", "'acknowledge'", ":", "True", ",", "'retain'", ":", "True", "}", "if", "'sub_options'", "not", "in", "self", ".", "config", ":", "self", ".", "config", "[", "'sub_options'", "]", "=", "{", "'get_retained'", ":", "False", "}", "if", "'subscribed_topics'", "not", "in", "self", ".", "config", ":", "self", ".", "config", "[", "'subscribed_topics'", "]", "=", "None", "if", "'replay_events'", "not", "in", "self", ".", "config", ":", "self", ".", "config", "[", "'replay_events'", "]", "=", "False", "if", "'max_reconnect_retries'", "not", "in", "self", ".", "config", ":", "self", ".", "config", "[", "'max_reconnect_retries'", "]", "=", "10" ]
Ensure that minimal configuration is setup and set defaults for missing values
[ "Ensure", "that", "minimal", "configuration", "is", "setup", "and", "set", "defaults", "for", "missing", "values" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/__init__.py#L48-L71
train
eventifyio/eventify
eventify/__init__.py
Eventify.config_sanity_check
def config_sanity_check(self): """ Base configuration sanity checks """ if 'name' not in self.config: raise EventifyConfigError( """Required configuration parameter missing! Please configure "name" as a string in your configuration.""") if 'publish_topic' not in self.config: raise EventifyConfigError( """Required configuration parameter missing! Please configure "public_topic" as an object in your configuration.""") if 'topic' not in self.config['publish_topic']: raise EventifyConfigError( """Required configuration parameter missing! Please configure "topic" as a key in your "public_topic object.""")
python
def config_sanity_check(self): """ Base configuration sanity checks """ if 'name' not in self.config: raise EventifyConfigError( """Required configuration parameter missing! Please configure "name" as a string in your configuration.""") if 'publish_topic' not in self.config: raise EventifyConfigError( """Required configuration parameter missing! Please configure "public_topic" as an object in your configuration.""") if 'topic' not in self.config['publish_topic']: raise EventifyConfigError( """Required configuration parameter missing! Please configure "topic" as a key in your "public_topic object.""")
[ "def", "config_sanity_check", "(", "self", ")", ":", "if", "'name'", "not", "in", "self", ".", "config", ":", "raise", "EventifyConfigError", "(", "\"\"\"Required configuration parameter missing!\n Please configure \"name\" as a string in your\n configuration.\"\"\"", ")", "if", "'publish_topic'", "not", "in", "self", ".", "config", ":", "raise", "EventifyConfigError", "(", "\"\"\"Required configuration parameter missing!\n Please configure \"public_topic\" as an object\n in your configuration.\"\"\"", ")", "if", "'topic'", "not", "in", "self", ".", "config", "[", "'publish_topic'", "]", ":", "raise", "EventifyConfigError", "(", "\"\"\"Required configuration parameter missing!\n Please configure \"topic\" as a key in your\n \"public_topic object.\"\"\"", ")" ]
Base configuration sanity checks
[ "Base", "configuration", "sanity", "checks" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/__init__.py#L73-L93
train
eventifyio/eventify
eventify/__init__.py
Eventify.load_config
def load_config(self): """ Load configuration for the service Args: config_file: Configuration file path """ logger.debug('loading config file: %s', self.config_file) if os.path.exists(self.config_file): with open(self.config_file) as file_handle: return json.load(file_handle) else: logger.error('configuration file is required for eventify') logger.error('unable to load configuration for service') raise EventifyConfigError( 'Configuration is required! Missing: %s' % self.config_file )
python
def load_config(self): """ Load configuration for the service Args: config_file: Configuration file path """ logger.debug('loading config file: %s', self.config_file) if os.path.exists(self.config_file): with open(self.config_file) as file_handle: return json.load(file_handle) else: logger.error('configuration file is required for eventify') logger.error('unable to load configuration for service') raise EventifyConfigError( 'Configuration is required! Missing: %s' % self.config_file )
[ "def", "load_config", "(", "self", ")", ":", "logger", ".", "debug", "(", "'loading config file: %s'", ",", "self", ".", "config_file", ")", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "config_file", ")", ":", "with", "open", "(", "self", ".", "config_file", ")", "as", "file_handle", ":", "return", "json", ".", "load", "(", "file_handle", ")", "else", ":", "logger", ".", "error", "(", "'configuration file is required for eventify'", ")", "logger", ".", "error", "(", "'unable to load configuration for service'", ")", "raise", "EventifyConfigError", "(", "'Configuration is required! Missing: %s'", "%", "self", ".", "config_file", ")" ]
Load configuration for the service Args: config_file: Configuration file path
[ "Load", "configuration", "for", "the", "service" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/__init__.py#L96-L112
train
eventifyio/eventify
eventify/__init__.py
Eventify.check_event_loop
def check_event_loop(): """ Check if event loop is closed and create a new event loop """ loop = asyncio.get_event_loop() if loop.is_closed(): asyncio.set_event_loop(asyncio.new_event_loop())
python
def check_event_loop(): """ Check if event loop is closed and create a new event loop """ loop = asyncio.get_event_loop() if loop.is_closed(): asyncio.set_event_loop(asyncio.new_event_loop())
[ "def", "check_event_loop", "(", ")", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "if", "loop", ".", "is_closed", "(", ")", ":", "asyncio", ".", "set_event_loop", "(", "asyncio", ".", "new_event_loop", "(", ")", ")" ]
Check if event loop is closed and create a new event loop
[ "Check", "if", "event", "loop", "is", "closed", "and", "create", "a", "new", "event", "loop" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/__init__.py#L115-L122
train
geophysics-ubonn/crtomo_tools
src/td_run_all_local.py
is_tomodir
def is_tomodir(subdirectories): """provided with the subdirectories of a given directory, check if this is a tomodir """ required = ( 'exe', 'config', 'rho', 'mod', 'inv' ) is_tomodir = True for subdir in required: if subdir not in subdirectories: is_tomodir = False return is_tomodir
python
def is_tomodir(subdirectories): """provided with the subdirectories of a given directory, check if this is a tomodir """ required = ( 'exe', 'config', 'rho', 'mod', 'inv' ) is_tomodir = True for subdir in required: if subdir not in subdirectories: is_tomodir = False return is_tomodir
[ "def", "is_tomodir", "(", "subdirectories", ")", ":", "required", "=", "(", "'exe'", ",", "'config'", ",", "'rho'", ",", "'mod'", ",", "'inv'", ")", "is_tomodir", "=", "True", "for", "subdir", "in", "required", ":", "if", "subdir", "not", "in", "subdirectories", ":", "is_tomodir", "=", "False", "return", "is_tomodir" ]
provided with the subdirectories of a given directory, check if this is a tomodir
[ "provided", "with", "the", "subdirectories", "of", "a", "given", "directory", "check", "if", "this", "is", "a", "tomodir" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_run_all_local.py#L54-L69
train
geophysics-ubonn/crtomo_tools
src/td_run_all_local.py
check_if_needs_modeling
def check_if_needs_modeling(tomodir): """check of we need to run CRMod in a given tomodir """ print('check for modeling', tomodir) required_files = ( 'config' + os.sep + 'config.dat', 'rho' + os.sep + 'rho.dat', 'grid' + os.sep + 'elem.dat', 'grid' + os.sep + 'elec.dat', 'exe' + os.sep + 'crmod.cfg', ) not_allowed = ( 'mod' + os.sep + 'volt.dat', ) needs_modeling = True for filename in not_allowed: if os.path.isfile(tomodir + os.sep + filename): needs_modeling = False for filename in required_files: full_file = tomodir + os.sep + filename if not os.path.isfile(full_file): print('does not exist: ', full_file) needs_modeling = False return needs_modeling
python
def check_if_needs_modeling(tomodir): """check of we need to run CRMod in a given tomodir """ print('check for modeling', tomodir) required_files = ( 'config' + os.sep + 'config.dat', 'rho' + os.sep + 'rho.dat', 'grid' + os.sep + 'elem.dat', 'grid' + os.sep + 'elec.dat', 'exe' + os.sep + 'crmod.cfg', ) not_allowed = ( 'mod' + os.sep + 'volt.dat', ) needs_modeling = True for filename in not_allowed: if os.path.isfile(tomodir + os.sep + filename): needs_modeling = False for filename in required_files: full_file = tomodir + os.sep + filename if not os.path.isfile(full_file): print('does not exist: ', full_file) needs_modeling = False return needs_modeling
[ "def", "check_if_needs_modeling", "(", "tomodir", ")", ":", "print", "(", "'check for modeling'", ",", "tomodir", ")", "required_files", "=", "(", "'config'", "+", "os", ".", "sep", "+", "'config.dat'", ",", "'rho'", "+", "os", ".", "sep", "+", "'rho.dat'", ",", "'grid'", "+", "os", ".", "sep", "+", "'elem.dat'", ",", "'grid'", "+", "os", ".", "sep", "+", "'elec.dat'", ",", "'exe'", "+", "os", ".", "sep", "+", "'crmod.cfg'", ",", ")", "not_allowed", "=", "(", "'mod'", "+", "os", ".", "sep", "+", "'volt.dat'", ",", ")", "needs_modeling", "=", "True", "for", "filename", "in", "not_allowed", ":", "if", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "filename", ")", ":", "needs_modeling", "=", "False", "for", "filename", "in", "required_files", ":", "full_file", "=", "tomodir", "+", "os", ".", "sep", "+", "filename", "if", "not", "os", ".", "path", ".", "isfile", "(", "full_file", ")", ":", "print", "(", "'does not exist: '", ",", "full_file", ")", "needs_modeling", "=", "False", "return", "needs_modeling" ]
check of we need to run CRMod in a given tomodir
[ "check", "of", "we", "need", "to", "run", "CRMod", "in", "a", "given", "tomodir" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_run_all_local.py#L72-L98
train
geophysics-ubonn/crtomo_tools
src/td_run_all_local.py
check_if_needs_inversion
def check_if_needs_inversion(tomodir): """check of we need to run CRTomo in a given tomodir """ required_files = ( 'grid' + os.sep + 'elem.dat', 'grid' + os.sep + 'elec.dat', 'exe' + os.sep + 'crtomo.cfg', ) needs_inversion = True for filename in required_files: if not os.path.isfile(tomodir + os.sep + filename): needs_inversion = False # check for crmod OR modeling capabilities if not os.path.isfile(tomodir + os.sep + 'mod' + os.sep + 'volt.dat'): if not check_if_needs_modeling(tomodir): print('no volt.dat and no modeling possible') needs_inversion = False # check if finished inv_ctr_file = tomodir + os.sep + 'inv' + os.sep + 'inv.ctr' if os.path.isfile(inv_ctr_file): inv_lines = open(inv_ctr_file, 'r').readlines() print('inv_lines', inv_lines[-1]) if inv_lines[-1].startswith('***finished***'): needs_inversion = False return needs_inversion
python
def check_if_needs_inversion(tomodir): """check of we need to run CRTomo in a given tomodir """ required_files = ( 'grid' + os.sep + 'elem.dat', 'grid' + os.sep + 'elec.dat', 'exe' + os.sep + 'crtomo.cfg', ) needs_inversion = True for filename in required_files: if not os.path.isfile(tomodir + os.sep + filename): needs_inversion = False # check for crmod OR modeling capabilities if not os.path.isfile(tomodir + os.sep + 'mod' + os.sep + 'volt.dat'): if not check_if_needs_modeling(tomodir): print('no volt.dat and no modeling possible') needs_inversion = False # check if finished inv_ctr_file = tomodir + os.sep + 'inv' + os.sep + 'inv.ctr' if os.path.isfile(inv_ctr_file): inv_lines = open(inv_ctr_file, 'r').readlines() print('inv_lines', inv_lines[-1]) if inv_lines[-1].startswith('***finished***'): needs_inversion = False return needs_inversion
[ "def", "check_if_needs_inversion", "(", "tomodir", ")", ":", "required_files", "=", "(", "'grid'", "+", "os", ".", "sep", "+", "'elem.dat'", ",", "'grid'", "+", "os", ".", "sep", "+", "'elec.dat'", ",", "'exe'", "+", "os", ".", "sep", "+", "'crtomo.cfg'", ",", ")", "needs_inversion", "=", "True", "for", "filename", "in", "required_files", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "filename", ")", ":", "needs_inversion", "=", "False", "# check for crmod OR modeling capabilities", "if", "not", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'mod'", "+", "os", ".", "sep", "+", "'volt.dat'", ")", ":", "if", "not", "check_if_needs_modeling", "(", "tomodir", ")", ":", "print", "(", "'no volt.dat and no modeling possible'", ")", "needs_inversion", "=", "False", "# check if finished", "inv_ctr_file", "=", "tomodir", "+", "os", ".", "sep", "+", "'inv'", "+", "os", ".", "sep", "+", "'inv.ctr'", "if", "os", ".", "path", ".", "isfile", "(", "inv_ctr_file", ")", ":", "inv_lines", "=", "open", "(", "inv_ctr_file", ",", "'r'", ")", ".", "readlines", "(", ")", "print", "(", "'inv_lines'", ",", "inv_lines", "[", "-", "1", "]", ")", "if", "inv_lines", "[", "-", "1", "]", ".", "startswith", "(", "'***finished***'", ")", ":", "needs_inversion", "=", "False", "return", "needs_inversion" ]
check of we need to run CRTomo in a given tomodir
[ "check", "of", "we", "need", "to", "run", "CRTomo", "in", "a", "given", "tomodir" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_run_all_local.py#L101-L130
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.add_boundary
def add_boundary(self, p1, p2, btype): """ Add a boundary line """ index = self.add_line(p1, p2, self.char_lengths['boundary']) # self.Boundaries.append((p1_id,p2_id,btype)) self.BoundaryIndices.append(index) self.Boundaries.append((p1, p2, btype))
python
def add_boundary(self, p1, p2, btype): """ Add a boundary line """ index = self.add_line(p1, p2, self.char_lengths['boundary']) # self.Boundaries.append((p1_id,p2_id,btype)) self.BoundaryIndices.append(index) self.Boundaries.append((p1, p2, btype))
[ "def", "add_boundary", "(", "self", ",", "p1", ",", "p2", ",", "btype", ")", ":", "index", "=", "self", ".", "add_line", "(", "p1", ",", "p2", ",", "self", ".", "char_lengths", "[", "'boundary'", "]", ")", "# self.Boundaries.append((p1_id,p2_id,btype))", "self", ".", "BoundaryIndices", ".", "append", "(", "index", ")", "self", ".", "Boundaries", ".", "append", "(", "(", "p1", ",", "p2", ",", "btype", ")", ")" ]
Add a boundary line
[ "Add", "a", "boundary", "line" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L133-L140
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.add_line
def add_line(self, p1, p2, char_length): """ Add a line to the list. Check if the nodes already exist, and add them if not. Return the line index (1-indixed, starting with 1) """ p1_id = self.get_point_id(p1, char_length) p2_id = self.get_point_id(p2, char_length) self.Lines.append((p1_id, p2_id)) return len(self.Lines)
python
def add_line(self, p1, p2, char_length): """ Add a line to the list. Check if the nodes already exist, and add them if not. Return the line index (1-indixed, starting with 1) """ p1_id = self.get_point_id(p1, char_length) p2_id = self.get_point_id(p2, char_length) self.Lines.append((p1_id, p2_id)) return len(self.Lines)
[ "def", "add_line", "(", "self", ",", "p1", ",", "p2", ",", "char_length", ")", ":", "p1_id", "=", "self", ".", "get_point_id", "(", "p1", ",", "char_length", ")", "p2_id", "=", "self", ".", "get_point_id", "(", "p2", ",", "char_length", ")", "self", ".", "Lines", ".", "append", "(", "(", "p1_id", ",", "p2_id", ")", ")", "return", "len", "(", "self", ".", "Lines", ")" ]
Add a line to the list. Check if the nodes already exist, and add them if not. Return the line index (1-indixed, starting with 1)
[ "Add", "a", "line", "to", "the", "list", ".", "Check", "if", "the", "nodes", "already", "exist", "and", "add", "them", "if", "not", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L142-L152
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.is_in
def is_in(self, search_list, pair): """ If pair is in search_list, return the index. Otherwise return -1 """ index = -1 for nr, i in enumerate(search_list): if(np.all(i == pair)): return nr return index
python
def is_in(self, search_list, pair): """ If pair is in search_list, return the index. Otherwise return -1 """ index = -1 for nr, i in enumerate(search_list): if(np.all(i == pair)): return nr return index
[ "def", "is_in", "(", "self", ",", "search_list", ",", "pair", ")", ":", "index", "=", "-", "1", "for", "nr", ",", "i", "in", "enumerate", "(", "search_list", ")", ":", "if", "(", "np", ".", "all", "(", "i", "==", "pair", ")", ")", ":", "return", "nr", "return", "index" ]
If pair is in search_list, return the index. Otherwise return -1
[ "If", "pair", "is", "in", "search_list", "return", "the", "index", ".", "Otherwise", "return", "-", "1" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L154-L162
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.read_electrodes
def read_electrodes(self, electrodes): """ Read in electrodes, check if points already exist """ for nr, electrode in enumerate(electrodes): index = self.get_point_id( electrode, self.char_lengths['electrode']) self.Electrodes.append(index)
python
def read_electrodes(self, electrodes): """ Read in electrodes, check if points already exist """ for nr, electrode in enumerate(electrodes): index = self.get_point_id( electrode, self.char_lengths['electrode']) self.Electrodes.append(index)
[ "def", "read_electrodes", "(", "self", ",", "electrodes", ")", ":", "for", "nr", ",", "electrode", "in", "enumerate", "(", "electrodes", ")", ":", "index", "=", "self", ".", "get_point_id", "(", "electrode", ",", "self", ".", "char_lengths", "[", "'electrode'", "]", ")", "self", ".", "Electrodes", ".", "append", "(", "index", ")" ]
Read in electrodes, check if points already exist
[ "Read", "in", "electrodes", "check", "if", "points", "already", "exist" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L164-L171
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.write_electrodes
def write_electrodes(self, filename): """ Write X Y coordinates of electrodes """ fid = open(filename, 'w') for i in self.Electrodes: fid.write('{0} {1}\n'.format(self.Points[i][0], self.Points[i][1])) fid.close()
python
def write_electrodes(self, filename): """ Write X Y coordinates of electrodes """ fid = open(filename, 'w') for i in self.Electrodes: fid.write('{0} {1}\n'.format(self.Points[i][0], self.Points[i][1])) fid.close()
[ "def", "write_electrodes", "(", "self", ",", "filename", ")", ":", "fid", "=", "open", "(", "filename", ",", "'w'", ")", "for", "i", "in", "self", ".", "Electrodes", ":", "fid", ".", "write", "(", "'{0} {1}\\n'", ".", "format", "(", "self", ".", "Points", "[", "i", "]", "[", "0", "]", ",", "self", ".", "Points", "[", "i", "]", "[", "1", "]", ")", ")", "fid", ".", "close", "(", ")" ]
Write X Y coordinates of electrodes
[ "Write", "X", "Y", "coordinates", "of", "electrodes" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L196-L203
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.write_boundaries
def write_boundaries(self, filename): """ Write boundary lines X1 Y1 X2 Y2 TYPE to file """ fid = open(filename, 'w') for i in self.Boundaries: print(i) # fid.write('{0} {1} {2}\n'.format(i[0], i[1], i[2])) fid.write( '{0} {1} {2} {3} {4}\n'.format( i[0][0], i[0][1], i[1][0], i[1][1], i[2])) fid.close()
python
def write_boundaries(self, filename): """ Write boundary lines X1 Y1 X2 Y2 TYPE to file """ fid = open(filename, 'w') for i in self.Boundaries: print(i) # fid.write('{0} {1} {2}\n'.format(i[0], i[1], i[2])) fid.write( '{0} {1} {2} {3} {4}\n'.format( i[0][0], i[0][1], i[1][0], i[1][1], i[2])) fid.close()
[ "def", "write_boundaries", "(", "self", ",", "filename", ")", ":", "fid", "=", "open", "(", "filename", ",", "'w'", ")", "for", "i", "in", "self", ".", "Boundaries", ":", "print", "(", "i", ")", "# fid.write('{0} {1} {2}\\n'.format(i[0], i[1], i[2]))", "fid", ".", "write", "(", "'{0} {1} {2} {3} {4}\\n'", ".", "format", "(", "i", "[", "0", "]", "[", "0", "]", ",", "i", "[", "0", "]", "[", "1", "]", ",", "i", "[", "1", "]", "[", "0", "]", ",", "i", "[", "1", "]", "[", "1", "]", ",", "i", "[", "2", "]", ")", ")", "fid", ".", "close", "(", ")" ]
Write boundary lines X1 Y1 X2 Y2 TYPE to file
[ "Write", "boundary", "lines", "X1", "Y1", "X2", "Y2", "TYPE", "to", "file" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L205-L216
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.read_char_lengths
def read_char_lengths(self, filename, electrode_filename): """Read characteristic lengths from the given file. The file is expected to have either 1 or 4 entries/lines with characteristic lengths > 0 (floats). If only one value is encountered, it is used for all four entities. If four values are encountered, they are assigned, in order, to: 1) electrode nodes 2) boundary nodes 3) nodes from extra lines 4) nodes from extra nodes Note that in case one node belongs to multiple entities, the smallest characteristic length will be used. If four values are used and the electrode length is negative, then the electrode positions will be read in (todo: we open the electrode.dat file two times here...) and the minimal distance between all electrodes will be multiplied by the absolute value of the imported value, and used as the characteristic length: .. math:: l_{electrodes} = min(pdist(electrodes)) * |l_{electrodes}^{from file}| The function scipy.spatial.distance.pdist is used to compute the global minimal distance between any two electrodes. It is advisable to only used values in the range [-1, 0) for the automatic char length option. """ if os.path.isfile(filename): data = np.atleast_1d(np.loadtxt(filename)) if data.size == 4: characteristic_length = data # check sign of first (electrode) length value if characteristic_length[0] < 0: try: elec_positions = np.loadtxt(electrode_filename) except: raise IOError( 'The was an error opening the electrode file') import scipy.spatial.distance distances = scipy.spatial.distance.pdist(elec_positions) characteristic_length[0] = min(distances) * np.abs( characteristic_length[0]) if characteristic_length[0] == 0: raise Exception( 'Error computing electrode ' + 'distances (got a minimal distance of zero') else: characteristic_length = np.ones(4) * data[0] else: characteristic_length = np.ones(4) if np.any(characteristic_length <= 0): raise Exception('No negative characteristic lengths allowed ' + '(except for electrode length') self.char_lengths = {} for key, item in zip(('electrode', 'boundary', 'extra_line', 'extra_node'), characteristic_length): self.char_lengths[key] = item
python
def read_char_lengths(self, filename, electrode_filename): """Read characteristic lengths from the given file. The file is expected to have either 1 or 4 entries/lines with characteristic lengths > 0 (floats). If only one value is encountered, it is used for all four entities. If four values are encountered, they are assigned, in order, to: 1) electrode nodes 2) boundary nodes 3) nodes from extra lines 4) nodes from extra nodes Note that in case one node belongs to multiple entities, the smallest characteristic length will be used. If four values are used and the electrode length is negative, then the electrode positions will be read in (todo: we open the electrode.dat file two times here...) and the minimal distance between all electrodes will be multiplied by the absolute value of the imported value, and used as the characteristic length: .. math:: l_{electrodes} = min(pdist(electrodes)) * |l_{electrodes}^{from file}| The function scipy.spatial.distance.pdist is used to compute the global minimal distance between any two electrodes. It is advisable to only used values in the range [-1, 0) for the automatic char length option. """ if os.path.isfile(filename): data = np.atleast_1d(np.loadtxt(filename)) if data.size == 4: characteristic_length = data # check sign of first (electrode) length value if characteristic_length[0] < 0: try: elec_positions = np.loadtxt(electrode_filename) except: raise IOError( 'The was an error opening the electrode file') import scipy.spatial.distance distances = scipy.spatial.distance.pdist(elec_positions) characteristic_length[0] = min(distances) * np.abs( characteristic_length[0]) if characteristic_length[0] == 0: raise Exception( 'Error computing electrode ' + 'distances (got a minimal distance of zero') else: characteristic_length = np.ones(4) * data[0] else: characteristic_length = np.ones(4) if np.any(characteristic_length <= 0): raise Exception('No negative characteristic lengths allowed ' + '(except for electrode length') self.char_lengths = {} for key, item in zip(('electrode', 'boundary', 'extra_line', 'extra_node'), characteristic_length): self.char_lengths[key] = item
[ "def", "read_char_lengths", "(", "self", ",", "filename", ",", "electrode_filename", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "data", "=", "np", ".", "atleast_1d", "(", "np", ".", "loadtxt", "(", "filename", ")", ")", "if", "data", ".", "size", "==", "4", ":", "characteristic_length", "=", "data", "# check sign of first (electrode) length value", "if", "characteristic_length", "[", "0", "]", "<", "0", ":", "try", ":", "elec_positions", "=", "np", ".", "loadtxt", "(", "electrode_filename", ")", "except", ":", "raise", "IOError", "(", "'The was an error opening the electrode file'", ")", "import", "scipy", ".", "spatial", ".", "distance", "distances", "=", "scipy", ".", "spatial", ".", "distance", ".", "pdist", "(", "elec_positions", ")", "characteristic_length", "[", "0", "]", "=", "min", "(", "distances", ")", "*", "np", ".", "abs", "(", "characteristic_length", "[", "0", "]", ")", "if", "characteristic_length", "[", "0", "]", "==", "0", ":", "raise", "Exception", "(", "'Error computing electrode '", "+", "'distances (got a minimal distance of zero'", ")", "else", ":", "characteristic_length", "=", "np", ".", "ones", "(", "4", ")", "*", "data", "[", "0", "]", "else", ":", "characteristic_length", "=", "np", ".", "ones", "(", "4", ")", "if", "np", ".", "any", "(", "characteristic_length", "<=", "0", ")", ":", "raise", "Exception", "(", "'No negative characteristic lengths allowed '", "+", "'(except for electrode length'", ")", "self", ".", "char_lengths", "=", "{", "}", "for", "key", ",", "item", "in", "zip", "(", "(", "'electrode'", ",", "'boundary'", ",", "'extra_line'", ",", "'extra_node'", ")", ",", "characteristic_length", ")", ":", "self", ".", "char_lengths", "[", "key", "]", "=", "item" ]
Read characteristic lengths from the given file. The file is expected to have either 1 or 4 entries/lines with characteristic lengths > 0 (floats). If only one value is encountered, it is used for all four entities. If four values are encountered, they are assigned, in order, to: 1) electrode nodes 2) boundary nodes 3) nodes from extra lines 4) nodes from extra nodes Note that in case one node belongs to multiple entities, the smallest characteristic length will be used. If four values are used and the electrode length is negative, then the electrode positions will be read in (todo: we open the electrode.dat file two times here...) and the minimal distance between all electrodes will be multiplied by the absolute value of the imported value, and used as the characteristic length: .. math:: l_{electrodes} = min(pdist(electrodes)) * |l_{electrodes}^{from file}| The function scipy.spatial.distance.pdist is used to compute the global minimal distance between any two electrodes. It is advisable to only used values in the range [-1, 0) for the automatic char length option.
[ "Read", "characteristic", "lengths", "from", "the", "given", "file", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L218-L287
train
geophysics-ubonn/crtomo_tools
src/cr_trig_create.py
Mesh.write_points
def write_points(self, fid): """ Write the grid points to the GMSH-command file. Parameters ---------- fid: file object for the command file (.geo) """ for nr, point in enumerate(self.Points): fid.write( 'Point({0}) = {{{1}, {2}, 0, {3}}};\n'.format( nr + 1, point[0], point[1], self.Charlengths[nr]))
python
def write_points(self, fid): """ Write the grid points to the GMSH-command file. Parameters ---------- fid: file object for the command file (.geo) """ for nr, point in enumerate(self.Points): fid.write( 'Point({0}) = {{{1}, {2}, 0, {3}}};\n'.format( nr + 1, point[0], point[1], self.Charlengths[nr]))
[ "def", "write_points", "(", "self", ",", "fid", ")", ":", "for", "nr", ",", "point", "in", "enumerate", "(", "self", ".", "Points", ")", ":", "fid", ".", "write", "(", "'Point({0}) = {{{1}, {2}, 0, {3}}};\\n'", ".", "format", "(", "nr", "+", "1", ",", "point", "[", "0", "]", ",", "point", "[", "1", "]", ",", "self", ".", "Charlengths", "[", "nr", "]", ")", ")" ]
Write the grid points to the GMSH-command file. Parameters ---------- fid: file object for the command file (.geo)
[ "Write", "the", "grid", "points", "to", "the", "GMSH", "-", "command", "file", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_create.py#L289-L301
train
zalando-stups/lizzy-client
lizzy_client/lizzy.py
Lizzy.get_output
def get_output(cls, response: requests.Response) -> str: """ Extracts the senza cli output from the response """ output = response.headers['X-Lizzy-Output'] # type: str output = output.replace('\\n', '\n') # unescape new lines lines = ('[AGENT] {}'.format(line) for line in output.splitlines()) return '\n'.join(lines)
python
def get_output(cls, response: requests.Response) -> str: """ Extracts the senza cli output from the response """ output = response.headers['X-Lizzy-Output'] # type: str output = output.replace('\\n', '\n') # unescape new lines lines = ('[AGENT] {}'.format(line) for line in output.splitlines()) return '\n'.join(lines)
[ "def", "get_output", "(", "cls", ",", "response", ":", "requests", ".", "Response", ")", "->", "str", ":", "output", "=", "response", ".", "headers", "[", "'X-Lizzy-Output'", "]", "# type: str", "output", "=", "output", ".", "replace", "(", "'\\\\n'", ",", "'\\n'", ")", "# unescape new lines", "lines", "=", "(", "'[AGENT] {}'", ".", "format", "(", "line", ")", "for", "line", "in", "output", ".", "splitlines", "(", ")", ")", "return", "'\\n'", ".", "join", "(", "lines", ")" ]
Extracts the senza cli output from the response
[ "Extracts", "the", "senza", "cli", "output", "from", "the", "response" ]
0af9733ca5a25ebd0a9dc1453f2a7592efcee56a
https://github.com/zalando-stups/lizzy-client/blob/0af9733ca5a25ebd0a9dc1453f2a7592efcee56a/lizzy_client/lizzy.py#L25-L32
train
zalando-stups/lizzy-client
lizzy_client/lizzy.py
Lizzy.new_stack
def new_stack(self, keep_stacks: int, new_traffic: int, senza_yaml: dict, stack_version: str, disable_rollback: bool, parameters: List[str], region: Optional[str], dry_run: bool, tags: List[str]) -> (Dict[str, str], str): # TODO put arguments in a more logical order """ Requests a new stack. """ header = make_header(self.access_token) data = {'senza_yaml': yaml.dump(senza_yaml), 'stack_version': stack_version, 'disable_rollback': disable_rollback, 'dry_run': dry_run, 'keep_stacks': keep_stacks, 'new_traffic': new_traffic, 'parameters': parameters, 'tags': tags} if region: data['region'] = region request = self.stacks_url.post(json=data, headers=header, verify=False) request.raise_for_status() return request.json(), self.get_output(request)
python
def new_stack(self, keep_stacks: int, new_traffic: int, senza_yaml: dict, stack_version: str, disable_rollback: bool, parameters: List[str], region: Optional[str], dry_run: bool, tags: List[str]) -> (Dict[str, str], str): # TODO put arguments in a more logical order """ Requests a new stack. """ header = make_header(self.access_token) data = {'senza_yaml': yaml.dump(senza_yaml), 'stack_version': stack_version, 'disable_rollback': disable_rollback, 'dry_run': dry_run, 'keep_stacks': keep_stacks, 'new_traffic': new_traffic, 'parameters': parameters, 'tags': tags} if region: data['region'] = region request = self.stacks_url.post(json=data, headers=header, verify=False) request.raise_for_status() return request.json(), self.get_output(request)
[ "def", "new_stack", "(", "self", ",", "keep_stacks", ":", "int", ",", "new_traffic", ":", "int", ",", "senza_yaml", ":", "dict", ",", "stack_version", ":", "str", ",", "disable_rollback", ":", "bool", ",", "parameters", ":", "List", "[", "str", "]", ",", "region", ":", "Optional", "[", "str", "]", ",", "dry_run", ":", "bool", ",", "tags", ":", "List", "[", "str", "]", ")", "->", "(", "Dict", "[", "str", ",", "str", "]", ",", "str", ")", ":", "# TODO put arguments in a more logical order", "header", "=", "make_header", "(", "self", ".", "access_token", ")", "data", "=", "{", "'senza_yaml'", ":", "yaml", ".", "dump", "(", "senza_yaml", ")", ",", "'stack_version'", ":", "stack_version", ",", "'disable_rollback'", ":", "disable_rollback", ",", "'dry_run'", ":", "dry_run", ",", "'keep_stacks'", ":", "keep_stacks", ",", "'new_traffic'", ":", "new_traffic", ",", "'parameters'", ":", "parameters", ",", "'tags'", ":", "tags", "}", "if", "region", ":", "data", "[", "'region'", "]", "=", "region", "request", "=", "self", ".", "stacks_url", ".", "post", "(", "json", "=", "data", ",", "headers", "=", "header", ",", "verify", "=", "False", ")", "request", ".", "raise_for_status", "(", ")", "return", "request", ".", "json", "(", ")", ",", "self", ".", "get_output", "(", "request", ")" ]
Requests a new stack.
[ "Requests", "a", "new", "stack", "." ]
0af9733ca5a25ebd0a9dc1453f2a7592efcee56a
https://github.com/zalando-stups/lizzy-client/blob/0af9733ca5a25ebd0a9dc1453f2a7592efcee56a/lizzy_client/lizzy.py#L76-L103
train
geophysics-ubonn/crtomo_tools
lib/crtomo/analytical_solution.py
pot_ana
def pot_ana(r, rho): """Return the analytical potential in distance r over a homogeneous half-space """ I = 1.0 sigma = 1.0 / rho phi = np.divide(I, (2.0 * np.pi * sigma * r)) return phi
python
def pot_ana(r, rho): """Return the analytical potential in distance r over a homogeneous half-space """ I = 1.0 sigma = 1.0 / rho phi = np.divide(I, (2.0 * np.pi * sigma * r)) return phi
[ "def", "pot_ana", "(", "r", ",", "rho", ")", ":", "I", "=", "1.0", "sigma", "=", "1.0", "/", "rho", "phi", "=", "np", ".", "divide", "(", "I", ",", "(", "2.0", "*", "np", ".", "pi", "*", "sigma", "*", "r", ")", ")", "return", "phi" ]
Return the analytical potential in distance r over a homogeneous half-space
[ "Return", "the", "analytical", "potential", "in", "distance", "r", "over", "a", "homogeneous", "half", "-", "space" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/analytical_solution.py#L12-L19
train
geophysics-ubonn/crtomo_tools
lib/crtomo/analytical_solution.py
compute_potentials_analytical_hs
def compute_potentials_analytical_hs(grid, configs_raw, rho): """Compute the potential superpositions of each current dipole in the configurations, using the provided resistivity Parameters ---------- grid: crt_grid object with loaded FE grid. Used for the electrode positions configs_raw: numpy.ndarray Nx4 array containing N four-point spreads rho: float resistivity of half-space Returns ------- potentials: list List containing N arrays, each of size M (nr of grid nodes) """ potentials = [] nodes_sorted = grid.nodes['sorted'] nodes_raw = grid.nodes['sorted'] for config in configs_raw: print('potential configs', config) # determine distance of all nodes to both electrodes e1_node = grid.get_electrode_node(config[0]) print('e1_node', e1_node) electrode1 = nodes_sorted[e1_node][1:3] # electrode1 = nodes_sorted[config[0]][1:3] r1 = np.sqrt( (nodes_raw[:, 1] - electrode1[0]) ** 2 + (nodes_raw[:, 2] - electrode1[1]) ** 2 ) # electrode2 = nodes_sorted[config[1]][1:3] e2_node = grid.get_electrode_node(config[1]) print('e2_node', e2_node) electrode2 = nodes_sorted[e2_node][1:3] r2 = np.sqrt( (nodes_raw[:, 1] - electrode2[0]) ** 2 + (nodes_raw[:, 2] - electrode2[1]) ** 2 ) pot1 = pot_ana(r1, rho) pot2 = - pot_ana(r2, rho) pot12 = pot1 + pot2 potentials.append(pot12) return potentials
python
def compute_potentials_analytical_hs(grid, configs_raw, rho): """Compute the potential superpositions of each current dipole in the configurations, using the provided resistivity Parameters ---------- grid: crt_grid object with loaded FE grid. Used for the electrode positions configs_raw: numpy.ndarray Nx4 array containing N four-point spreads rho: float resistivity of half-space Returns ------- potentials: list List containing N arrays, each of size M (nr of grid nodes) """ potentials = [] nodes_sorted = grid.nodes['sorted'] nodes_raw = grid.nodes['sorted'] for config in configs_raw: print('potential configs', config) # determine distance of all nodes to both electrodes e1_node = grid.get_electrode_node(config[0]) print('e1_node', e1_node) electrode1 = nodes_sorted[e1_node][1:3] # electrode1 = nodes_sorted[config[0]][1:3] r1 = np.sqrt( (nodes_raw[:, 1] - electrode1[0]) ** 2 + (nodes_raw[:, 2] - electrode1[1]) ** 2 ) # electrode2 = nodes_sorted[config[1]][1:3] e2_node = grid.get_electrode_node(config[1]) print('e2_node', e2_node) electrode2 = nodes_sorted[e2_node][1:3] r2 = np.sqrt( (nodes_raw[:, 1] - electrode2[0]) ** 2 + (nodes_raw[:, 2] - electrode2[1]) ** 2 ) pot1 = pot_ana(r1, rho) pot2 = - pot_ana(r2, rho) pot12 = pot1 + pot2 potentials.append(pot12) return potentials
[ "def", "compute_potentials_analytical_hs", "(", "grid", ",", "configs_raw", ",", "rho", ")", ":", "potentials", "=", "[", "]", "nodes_sorted", "=", "grid", ".", "nodes", "[", "'sorted'", "]", "nodes_raw", "=", "grid", ".", "nodes", "[", "'sorted'", "]", "for", "config", "in", "configs_raw", ":", "print", "(", "'potential configs'", ",", "config", ")", "# determine distance of all nodes to both electrodes", "e1_node", "=", "grid", ".", "get_electrode_node", "(", "config", "[", "0", "]", ")", "print", "(", "'e1_node'", ",", "e1_node", ")", "electrode1", "=", "nodes_sorted", "[", "e1_node", "]", "[", "1", ":", "3", "]", "# electrode1 = nodes_sorted[config[0]][1:3]", "r1", "=", "np", ".", "sqrt", "(", "(", "nodes_raw", "[", ":", ",", "1", "]", "-", "electrode1", "[", "0", "]", ")", "**", "2", "+", "(", "nodes_raw", "[", ":", ",", "2", "]", "-", "electrode1", "[", "1", "]", ")", "**", "2", ")", "# electrode2 = nodes_sorted[config[1]][1:3]", "e2_node", "=", "grid", ".", "get_electrode_node", "(", "config", "[", "1", "]", ")", "print", "(", "'e2_node'", ",", "e2_node", ")", "electrode2", "=", "nodes_sorted", "[", "e2_node", "]", "[", "1", ":", "3", "]", "r2", "=", "np", ".", "sqrt", "(", "(", "nodes_raw", "[", ":", ",", "1", "]", "-", "electrode2", "[", "0", "]", ")", "**", "2", "+", "(", "nodes_raw", "[", ":", ",", "2", "]", "-", "electrode2", "[", "1", "]", ")", "**", "2", ")", "pot1", "=", "pot_ana", "(", "r1", ",", "rho", ")", "pot2", "=", "-", "pot_ana", "(", "r2", ",", "rho", ")", "pot12", "=", "pot1", "+", "pot2", "potentials", ".", "append", "(", "pot12", ")", "return", "potentials" ]
Compute the potential superpositions of each current dipole in the configurations, using the provided resistivity Parameters ---------- grid: crt_grid object with loaded FE grid. Used for the electrode positions configs_raw: numpy.ndarray Nx4 array containing N four-point spreads rho: float resistivity of half-space Returns ------- potentials: list List containing N arrays, each of size M (nr of grid nodes)
[ "Compute", "the", "potential", "superpositions", "of", "each", "current", "dipole", "in", "the", "configurations", "using", "the", "provided", "resistivity" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/analytical_solution.py#L22-L68
train
geophysics-ubonn/crtomo_tools
lib/crtomo/analytical_solution.py
compute_voltages
def compute_voltages(grid, configs_raw, potentials_raw): """Given a list of potential distribution and corresponding four-point spreads, compute the voltages Parameters ---------- grid: crt_grid object the grid is used to infer electrode positions configs_raw: Nx4 array containing the measurement configs (1-indexed) potentials_raw: list with N entries corresponding to each measurement, containing the node potentials of each injection dipole. """ # we operate on 0-indexed arrays, config holds 1-indexed values # configs = configs_raw - 1 voltages = [] for config, potentials in zip(configs_raw, potentials_raw): print('config', config) e3_node = grid.get_electrode_node(config[2]) e4_node = grid.get_electrode_node(config[3]) print(e3_node, e4_node) print('pot1', potentials[e3_node]) print('pot2', potentials[e4_node]) voltage = potentials[e3_node] - potentials[e4_node] voltages.append(voltage) return voltages
python
def compute_voltages(grid, configs_raw, potentials_raw): """Given a list of potential distribution and corresponding four-point spreads, compute the voltages Parameters ---------- grid: crt_grid object the grid is used to infer electrode positions configs_raw: Nx4 array containing the measurement configs (1-indexed) potentials_raw: list with N entries corresponding to each measurement, containing the node potentials of each injection dipole. """ # we operate on 0-indexed arrays, config holds 1-indexed values # configs = configs_raw - 1 voltages = [] for config, potentials in zip(configs_raw, potentials_raw): print('config', config) e3_node = grid.get_electrode_node(config[2]) e4_node = grid.get_electrode_node(config[3]) print(e3_node, e4_node) print('pot1', potentials[e3_node]) print('pot2', potentials[e4_node]) voltage = potentials[e3_node] - potentials[e4_node] voltages.append(voltage) return voltages
[ "def", "compute_voltages", "(", "grid", ",", "configs_raw", ",", "potentials_raw", ")", ":", "# we operate on 0-indexed arrays, config holds 1-indexed values", "# configs = configs_raw - 1", "voltages", "=", "[", "]", "for", "config", ",", "potentials", "in", "zip", "(", "configs_raw", ",", "potentials_raw", ")", ":", "print", "(", "'config'", ",", "config", ")", "e3_node", "=", "grid", ".", "get_electrode_node", "(", "config", "[", "2", "]", ")", "e4_node", "=", "grid", ".", "get_electrode_node", "(", "config", "[", "3", "]", ")", "print", "(", "e3_node", ",", "e4_node", ")", "print", "(", "'pot1'", ",", "potentials", "[", "e3_node", "]", ")", "print", "(", "'pot2'", ",", "potentials", "[", "e4_node", "]", ")", "voltage", "=", "potentials", "[", "e3_node", "]", "-", "potentials", "[", "e4_node", "]", "voltages", ".", "append", "(", "voltage", ")", "return", "voltages" ]
Given a list of potential distribution and corresponding four-point spreads, compute the voltages Parameters ---------- grid: crt_grid object the grid is used to infer electrode positions configs_raw: Nx4 array containing the measurement configs (1-indexed) potentials_raw: list with N entries corresponding to each measurement, containing the node potentials of each injection dipole.
[ "Given", "a", "list", "of", "potential", "distribution", "and", "corresponding", "four", "-", "point", "spreads", "compute", "the", "voltages" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/analytical_solution.py#L71-L97
train
pgxcentre/geneparse
geneparse/extract/__main__.py
vcf_writer
def vcf_writer(parser, keep, extract, args): """Writes the data in VCF format.""" # The output output = sys.stdout if args.output == "-" else open(args.output, "w") try: # Getting the samples samples = np.array(parser.get_samples(), dtype=str) k = _get_sample_select(samples=samples, keep=keep) # Writing the VCF header output.write(_VCF_HEADER.format( date=datetime.today().strftime("%Y%m%d"), version=__version__, samples="\t".join(samples[k]), )) # The data generator generator = _get_generator(parser=parser, extract=extract, keep=k, check_maf=args.maf) # The number of markers extracted nb_extracted = 0 for data in generator: # Keeping only the required genotypes genotypes = data.genotypes # Computing the alternative allele frequency af = np.nanmean(genotypes) / 2 print(data.variant.chrom, data.variant.pos, data.variant.name, data.reference, data.coded, ".", "PASS", "AF={}".format(af), "GT:DS", sep="\t", end="", file=output) for geno in genotypes: if np.isnan(geno): output.write("\t./.:.") else: rounded_geno = int(round(geno, 0)) output.write("\t{}:{}".format( _VCF_GT_MAP[rounded_geno], geno, )) output.write("\n") nb_extracted += 1 if nb_extracted == 0: logger.warning("No markers matched the extract list") finally: output.close()
python
def vcf_writer(parser, keep, extract, args): """Writes the data in VCF format.""" # The output output = sys.stdout if args.output == "-" else open(args.output, "w") try: # Getting the samples samples = np.array(parser.get_samples(), dtype=str) k = _get_sample_select(samples=samples, keep=keep) # Writing the VCF header output.write(_VCF_HEADER.format( date=datetime.today().strftime("%Y%m%d"), version=__version__, samples="\t".join(samples[k]), )) # The data generator generator = _get_generator(parser=parser, extract=extract, keep=k, check_maf=args.maf) # The number of markers extracted nb_extracted = 0 for data in generator: # Keeping only the required genotypes genotypes = data.genotypes # Computing the alternative allele frequency af = np.nanmean(genotypes) / 2 print(data.variant.chrom, data.variant.pos, data.variant.name, data.reference, data.coded, ".", "PASS", "AF={}".format(af), "GT:DS", sep="\t", end="", file=output) for geno in genotypes: if np.isnan(geno): output.write("\t./.:.") else: rounded_geno = int(round(geno, 0)) output.write("\t{}:{}".format( _VCF_GT_MAP[rounded_geno], geno, )) output.write("\n") nb_extracted += 1 if nb_extracted == 0: logger.warning("No markers matched the extract list") finally: output.close()
[ "def", "vcf_writer", "(", "parser", ",", "keep", ",", "extract", ",", "args", ")", ":", "# The output", "output", "=", "sys", ".", "stdout", "if", "args", ".", "output", "==", "\"-\"", "else", "open", "(", "args", ".", "output", ",", "\"w\"", ")", "try", ":", "# Getting the samples", "samples", "=", "np", ".", "array", "(", "parser", ".", "get_samples", "(", ")", ",", "dtype", "=", "str", ")", "k", "=", "_get_sample_select", "(", "samples", "=", "samples", ",", "keep", "=", "keep", ")", "# Writing the VCF header", "output", ".", "write", "(", "_VCF_HEADER", ".", "format", "(", "date", "=", "datetime", ".", "today", "(", ")", ".", "strftime", "(", "\"%Y%m%d\"", ")", ",", "version", "=", "__version__", ",", "samples", "=", "\"\\t\"", ".", "join", "(", "samples", "[", "k", "]", ")", ",", ")", ")", "# The data generator", "generator", "=", "_get_generator", "(", "parser", "=", "parser", ",", "extract", "=", "extract", ",", "keep", "=", "k", ",", "check_maf", "=", "args", ".", "maf", ")", "# The number of markers extracted", "nb_extracted", "=", "0", "for", "data", "in", "generator", ":", "# Keeping only the required genotypes", "genotypes", "=", "data", ".", "genotypes", "# Computing the alternative allele frequency", "af", "=", "np", ".", "nanmean", "(", "genotypes", ")", "/", "2", "print", "(", "data", ".", "variant", ".", "chrom", ",", "data", ".", "variant", ".", "pos", ",", "data", ".", "variant", ".", "name", ",", "data", ".", "reference", ",", "data", ".", "coded", ",", "\".\"", ",", "\"PASS\"", ",", "\"AF={}\"", ".", "format", "(", "af", ")", ",", "\"GT:DS\"", ",", "sep", "=", "\"\\t\"", ",", "end", "=", "\"\"", ",", "file", "=", "output", ")", "for", "geno", "in", "genotypes", ":", "if", "np", ".", "isnan", "(", "geno", ")", ":", "output", ".", "write", "(", "\"\\t./.:.\"", ")", "else", ":", "rounded_geno", "=", "int", "(", "round", "(", "geno", ",", "0", ")", ")", "output", ".", "write", "(", "\"\\t{}:{}\"", ".", "format", "(", "_VCF_GT_MAP", "[", "rounded_geno", "]", ",", "geno", ",", ")", ")", "output", ".", "write", "(", "\"\\n\"", ")", "nb_extracted", "+=", "1", "if", "nb_extracted", "==", "0", ":", "logger", ".", "warning", "(", "\"No markers matched the extract list\"", ")", "finally", ":", "output", ".", "close", "(", ")" ]
Writes the data in VCF format.
[ "Writes", "the", "data", "in", "VCF", "format", "." ]
f698f9708af4c7962d384a70a5a14006b1cb7108
https://github.com/pgxcentre/geneparse/blob/f698f9708af4c7962d384a70a5a14006b1cb7108/geneparse/extract/__main__.py#L133-L184
train
pgxcentre/geneparse
geneparse/extract/__main__.py
csv_writer
def csv_writer(parser, keep, extract, args): """Writes the data in CSV format.""" # The output output = sys.stdout if args.output == "-" else open(args.output, "w") try: # Getting the samples samples = np.array(parser.get_samples(), dtype=str) k = _get_sample_select(samples=samples, keep=keep) # Writing the CSV header print("sample_id", "variant_id", "chromosome", "position", "reference", "coded", "dosage", "hard_call", sep=",", file=output) # The data generator generator = _get_generator(parser=parser, extract=extract, keep=k, check_maf=args.maf) # The number of markers extracted nb_extracted = 0 for data in generator: # Keeping only the required genotypes genotypes = data.genotypes # The hard call mapping hard_call_mapping = { 0: "{ref}/{ref}".format(ref=data.reference), 1: "{ref}/{alt}".format(ref=data.reference, alt=data.coded), 2: "{alt}/{alt}".format(alt=data.coded), } for sample, geno in zip(samples[k], genotypes): # Is the genotype missing is_missing = np.isnan(geno) # Hard coding (NaN values are empty string) hard_coded = None if is_missing: geno = "" hard_coded = "" else: hard_coded = hard_call_mapping[int(round(geno, 0))] print(sample, data.variant.name, data.variant.chrom, data.variant.pos, data.reference, data.coded, geno, hard_coded, sep=",", file=output) nb_extracted += 1 if nb_extracted == 0: logger.warning("No markers matched the extract list") finally: output.close()
python
def csv_writer(parser, keep, extract, args): """Writes the data in CSV format.""" # The output output = sys.stdout if args.output == "-" else open(args.output, "w") try: # Getting the samples samples = np.array(parser.get_samples(), dtype=str) k = _get_sample_select(samples=samples, keep=keep) # Writing the CSV header print("sample_id", "variant_id", "chromosome", "position", "reference", "coded", "dosage", "hard_call", sep=",", file=output) # The data generator generator = _get_generator(parser=parser, extract=extract, keep=k, check_maf=args.maf) # The number of markers extracted nb_extracted = 0 for data in generator: # Keeping only the required genotypes genotypes = data.genotypes # The hard call mapping hard_call_mapping = { 0: "{ref}/{ref}".format(ref=data.reference), 1: "{ref}/{alt}".format(ref=data.reference, alt=data.coded), 2: "{alt}/{alt}".format(alt=data.coded), } for sample, geno in zip(samples[k], genotypes): # Is the genotype missing is_missing = np.isnan(geno) # Hard coding (NaN values are empty string) hard_coded = None if is_missing: geno = "" hard_coded = "" else: hard_coded = hard_call_mapping[int(round(geno, 0))] print(sample, data.variant.name, data.variant.chrom, data.variant.pos, data.reference, data.coded, geno, hard_coded, sep=",", file=output) nb_extracted += 1 if nb_extracted == 0: logger.warning("No markers matched the extract list") finally: output.close()
[ "def", "csv_writer", "(", "parser", ",", "keep", ",", "extract", ",", "args", ")", ":", "# The output", "output", "=", "sys", ".", "stdout", "if", "args", ".", "output", "==", "\"-\"", "else", "open", "(", "args", ".", "output", ",", "\"w\"", ")", "try", ":", "# Getting the samples", "samples", "=", "np", ".", "array", "(", "parser", ".", "get_samples", "(", ")", ",", "dtype", "=", "str", ")", "k", "=", "_get_sample_select", "(", "samples", "=", "samples", ",", "keep", "=", "keep", ")", "# Writing the CSV header", "print", "(", "\"sample_id\"", ",", "\"variant_id\"", ",", "\"chromosome\"", ",", "\"position\"", ",", "\"reference\"", ",", "\"coded\"", ",", "\"dosage\"", ",", "\"hard_call\"", ",", "sep", "=", "\",\"", ",", "file", "=", "output", ")", "# The data generator", "generator", "=", "_get_generator", "(", "parser", "=", "parser", ",", "extract", "=", "extract", ",", "keep", "=", "k", ",", "check_maf", "=", "args", ".", "maf", ")", "# The number of markers extracted", "nb_extracted", "=", "0", "for", "data", "in", "generator", ":", "# Keeping only the required genotypes", "genotypes", "=", "data", ".", "genotypes", "# The hard call mapping", "hard_call_mapping", "=", "{", "0", ":", "\"{ref}/{ref}\"", ".", "format", "(", "ref", "=", "data", ".", "reference", ")", ",", "1", ":", "\"{ref}/{alt}\"", ".", "format", "(", "ref", "=", "data", ".", "reference", ",", "alt", "=", "data", ".", "coded", ")", ",", "2", ":", "\"{alt}/{alt}\"", ".", "format", "(", "alt", "=", "data", ".", "coded", ")", ",", "}", "for", "sample", ",", "geno", "in", "zip", "(", "samples", "[", "k", "]", ",", "genotypes", ")", ":", "# Is the genotype missing", "is_missing", "=", "np", ".", "isnan", "(", "geno", ")", "# Hard coding (NaN values are empty string)", "hard_coded", "=", "None", "if", "is_missing", ":", "geno", "=", "\"\"", "hard_coded", "=", "\"\"", "else", ":", "hard_coded", "=", "hard_call_mapping", "[", "int", "(", "round", "(", "geno", ",", "0", ")", ")", "]", "print", "(", "sample", ",", "data", ".", "variant", ".", "name", ",", "data", ".", "variant", ".", "chrom", ",", "data", ".", "variant", ".", "pos", ",", "data", ".", "reference", ",", "data", ".", "coded", ",", "geno", ",", "hard_coded", ",", "sep", "=", "\",\"", ",", "file", "=", "output", ")", "nb_extracted", "+=", "1", "if", "nb_extracted", "==", "0", ":", "logger", ".", "warning", "(", "\"No markers matched the extract list\"", ")", "finally", ":", "output", ".", "close", "(", ")" ]
Writes the data in CSV format.
[ "Writes", "the", "data", "in", "CSV", "format", "." ]
f698f9708af4c7962d384a70a5a14006b1cb7108
https://github.com/pgxcentre/geneparse/blob/f698f9708af4c7962d384a70a5a14006b1cb7108/geneparse/extract/__main__.py#L187-L241
train
pgxcentre/geneparse
geneparse/extract/__main__.py
_get_generator
def _get_generator(parser, extract, keep, check_maf): """Generates the data (with extract markers and keep, if required.""" if extract is not None: parser = Extractor(parser, names=extract) for data in parser.iter_genotypes(): data.genotypes = data.genotypes[keep] # Checking the MAF, if required if check_maf: data.code_minor() yield data
python
def _get_generator(parser, extract, keep, check_maf): """Generates the data (with extract markers and keep, if required.""" if extract is not None: parser = Extractor(parser, names=extract) for data in parser.iter_genotypes(): data.genotypes = data.genotypes[keep] # Checking the MAF, if required if check_maf: data.code_minor() yield data
[ "def", "_get_generator", "(", "parser", ",", "extract", ",", "keep", ",", "check_maf", ")", ":", "if", "extract", "is", "not", "None", ":", "parser", "=", "Extractor", "(", "parser", ",", "names", "=", "extract", ")", "for", "data", "in", "parser", ".", "iter_genotypes", "(", ")", ":", "data", ".", "genotypes", "=", "data", ".", "genotypes", "[", "keep", "]", "# Checking the MAF, if required", "if", "check_maf", ":", "data", ".", "code_minor", "(", ")", "yield", "data" ]
Generates the data (with extract markers and keep, if required.
[ "Generates", "the", "data", "(", "with", "extract", "markers", "and", "keep", "if", "required", "." ]
f698f9708af4c7962d384a70a5a14006b1cb7108
https://github.com/pgxcentre/geneparse/blob/f698f9708af4c7962d384a70a5a14006b1cb7108/geneparse/extract/__main__.py#L296-L308
train
hotzenklotz/pybeerxml
pybeerxml/hop.py
Hop.bitterness
def bitterness(self, ibu_method, early_og, batch_size): "Calculate bitterness based on chosen method" if ibu_method == "tinseth": bitterness = 1.65 * math.pow(0.000125, early_og - 1.0) * ((1 - math.pow(math.e, -0.04 * self.time)) / 4.15) * ((self.alpha / 100.0 * self.amount * 1000000) / batch_size) * self.utilization_factor() elif ibu_method == "rager": utilization = 18.11 + 13.86 * math.tanh((self.time - 31.32) / 18.27) adjustment = max(0, (early_og - 1.050) / 0.2) bitterness = self.amount * 100 * utilization * self.utilization_factor() * self.alpha / (batch_size * (1 + adjustment)) else: raise Exception("Unknown IBU method %s!" % ibu_method) return bitterness
python
def bitterness(self, ibu_method, early_og, batch_size): "Calculate bitterness based on chosen method" if ibu_method == "tinseth": bitterness = 1.65 * math.pow(0.000125, early_og - 1.0) * ((1 - math.pow(math.e, -0.04 * self.time)) / 4.15) * ((self.alpha / 100.0 * self.amount * 1000000) / batch_size) * self.utilization_factor() elif ibu_method == "rager": utilization = 18.11 + 13.86 * math.tanh((self.time - 31.32) / 18.27) adjustment = max(0, (early_og - 1.050) / 0.2) bitterness = self.amount * 100 * utilization * self.utilization_factor() * self.alpha / (batch_size * (1 + adjustment)) else: raise Exception("Unknown IBU method %s!" % ibu_method) return bitterness
[ "def", "bitterness", "(", "self", ",", "ibu_method", ",", "early_og", ",", "batch_size", ")", ":", "if", "ibu_method", "==", "\"tinseth\"", ":", "bitterness", "=", "1.65", "*", "math", ".", "pow", "(", "0.000125", ",", "early_og", "-", "1.0", ")", "*", "(", "(", "1", "-", "math", ".", "pow", "(", "math", ".", "e", ",", "-", "0.04", "*", "self", ".", "time", ")", ")", "/", "4.15", ")", "*", "(", "(", "self", ".", "alpha", "/", "100.0", "*", "self", ".", "amount", "*", "1000000", ")", "/", "batch_size", ")", "*", "self", ".", "utilization_factor", "(", ")", "elif", "ibu_method", "==", "\"rager\"", ":", "utilization", "=", "18.11", "+", "13.86", "*", "math", ".", "tanh", "(", "(", "self", ".", "time", "-", "31.32", ")", "/", "18.27", ")", "adjustment", "=", "max", "(", "0", ",", "(", "early_og", "-", "1.050", ")", "/", "0.2", ")", "bitterness", "=", "self", ".", "amount", "*", "100", "*", "utilization", "*", "self", ".", "utilization_factor", "(", ")", "*", "self", ".", "alpha", "/", "(", "batch_size", "*", "(", "1", "+", "adjustment", ")", ")", "else", ":", "raise", "Exception", "(", "\"Unknown IBU method %s!\"", "%", "ibu_method", ")", "return", "bitterness" ]
Calculate bitterness based on chosen method
[ "Calculate", "bitterness", "based", "on", "chosen", "method" ]
e9cf8d6090b1e01e5bbb101e255792b134affbe0
https://github.com/hotzenklotz/pybeerxml/blob/e9cf8d6090b1e01e5bbb101e255792b134affbe0/pybeerxml/hop.py#L18-L32
train
Alidron/spyrk
spyrk/spark_cloud.py
SparkCloud._check_error
def _check_error(response): """Raises an exception if the Spark Cloud returned an error.""" if (not response.ok) or (response.status_code != 200): raise Exception( response.json()['error'] + ': ' + response.json()['error_description'] )
python
def _check_error(response): """Raises an exception if the Spark Cloud returned an error.""" if (not response.ok) or (response.status_code != 200): raise Exception( response.json()['error'] + ': ' + response.json()['error_description'] )
[ "def", "_check_error", "(", "response", ")", ":", "if", "(", "not", "response", ".", "ok", ")", "or", "(", "response", ".", "status_code", "!=", "200", ")", ":", "raise", "Exception", "(", "response", ".", "json", "(", ")", "[", "'error'", "]", "+", "': '", "+", "response", ".", "json", "(", ")", "[", "'error_description'", "]", ")" ]
Raises an exception if the Spark Cloud returned an error.
[ "Raises", "an", "exception", "if", "the", "Spark", "Cloud", "returned", "an", "error", "." ]
fc1d7c0892a351cf742bf07e95b852fd6bf7c108
https://github.com/Alidron/spyrk/blob/fc1d7c0892a351cf742bf07e95b852fd6bf7c108/spyrk/spark_cloud.py#L76-L82
train
Alidron/spyrk
spyrk/spark_cloud.py
SparkCloud._login
def _login(self, username, password): """Proceed to login to the Spark Cloud and returns an access token.""" data = { 'username': username, 'password': password, 'grant_type': 'password' } r = self.spark_api.oauth.token.POST(auth=('spark', 'spark'), data=data, timeout=self.timeout) self._check_error(r) return r.json()['access_token']
python
def _login(self, username, password): """Proceed to login to the Spark Cloud and returns an access token.""" data = { 'username': username, 'password': password, 'grant_type': 'password' } r = self.spark_api.oauth.token.POST(auth=('spark', 'spark'), data=data, timeout=self.timeout) self._check_error(r) return r.json()['access_token']
[ "def", "_login", "(", "self", ",", "username", ",", "password", ")", ":", "data", "=", "{", "'username'", ":", "username", ",", "'password'", ":", "password", ",", "'grant_type'", ":", "'password'", "}", "r", "=", "self", ".", "spark_api", ".", "oauth", ".", "token", ".", "POST", "(", "auth", "=", "(", "'spark'", ",", "'spark'", ")", ",", "data", "=", "data", ",", "timeout", "=", "self", ".", "timeout", ")", "self", ".", "_check_error", "(", "r", ")", "return", "r", ".", "json", "(", ")", "[", "'access_token'", "]" ]
Proceed to login to the Spark Cloud and returns an access token.
[ "Proceed", "to", "login", "to", "the", "Spark", "Cloud", "and", "returns", "an", "access", "token", "." ]
fc1d7c0892a351cf742bf07e95b852fd6bf7c108
https://github.com/Alidron/spyrk/blob/fc1d7c0892a351cf742bf07e95b852fd6bf7c108/spyrk/spark_cloud.py#L84-L93
train
Alidron/spyrk
spyrk/spark_cloud.py
SparkCloud.devices
def devices(self): """Create a dictionary of devices known to the user account.""" params = {'access_token': self.access_token} r = self.spark_api.GET(params=params, timeout=self.timeout) self._check_error(r) json_list = r.json() devices_dict = {} if json_list: # it is possible the keys in json responses varies from one device to another: compute the set of all keys allKeys = {'functions', 'variables', 'api', 'requires_deep_update', 'status'} # added by device_info for device_json in json_list: allKeys.update(device_json.keys()) Device = _BaseDevice.make_device_class(self, allKeys, timeout = self.timeout) for d in json_list: if d["connected"]: info = self._get_device_info(d['id']) d['functions'] = info.get('functions') d['variables'] = info.get('variables') d['api'] = self.spark_api(d['id']) d['requires_deep_update'] = d.get('requires_deep_update', False) d['status'] = info.get('status') # ensure the set of all keys is present in the dictionnary (Device constructor requires all keys present) [d.setdefault(key, None) for key in allKeys] devices_dict[d['name']] = Device(**d) return devices_dict
python
def devices(self): """Create a dictionary of devices known to the user account.""" params = {'access_token': self.access_token} r = self.spark_api.GET(params=params, timeout=self.timeout) self._check_error(r) json_list = r.json() devices_dict = {} if json_list: # it is possible the keys in json responses varies from one device to another: compute the set of all keys allKeys = {'functions', 'variables', 'api', 'requires_deep_update', 'status'} # added by device_info for device_json in json_list: allKeys.update(device_json.keys()) Device = _BaseDevice.make_device_class(self, allKeys, timeout = self.timeout) for d in json_list: if d["connected"]: info = self._get_device_info(d['id']) d['functions'] = info.get('functions') d['variables'] = info.get('variables') d['api'] = self.spark_api(d['id']) d['requires_deep_update'] = d.get('requires_deep_update', False) d['status'] = info.get('status') # ensure the set of all keys is present in the dictionnary (Device constructor requires all keys present) [d.setdefault(key, None) for key in allKeys] devices_dict[d['name']] = Device(**d) return devices_dict
[ "def", "devices", "(", "self", ")", ":", "params", "=", "{", "'access_token'", ":", "self", ".", "access_token", "}", "r", "=", "self", ".", "spark_api", ".", "GET", "(", "params", "=", "params", ",", "timeout", "=", "self", ".", "timeout", ")", "self", ".", "_check_error", "(", "r", ")", "json_list", "=", "r", ".", "json", "(", ")", "devices_dict", "=", "{", "}", "if", "json_list", ":", "# it is possible the keys in json responses varies from one device to another: compute the set of all keys", "allKeys", "=", "{", "'functions'", ",", "'variables'", ",", "'api'", ",", "'requires_deep_update'", ",", "'status'", "}", "# added by device_info", "for", "device_json", "in", "json_list", ":", "allKeys", ".", "update", "(", "device_json", ".", "keys", "(", ")", ")", "Device", "=", "_BaseDevice", ".", "make_device_class", "(", "self", ",", "allKeys", ",", "timeout", "=", "self", ".", "timeout", ")", "for", "d", "in", "json_list", ":", "if", "d", "[", "\"connected\"", "]", ":", "info", "=", "self", ".", "_get_device_info", "(", "d", "[", "'id'", "]", ")", "d", "[", "'functions'", "]", "=", "info", ".", "get", "(", "'functions'", ")", "d", "[", "'variables'", "]", "=", "info", ".", "get", "(", "'variables'", ")", "d", "[", "'api'", "]", "=", "self", ".", "spark_api", "(", "d", "[", "'id'", "]", ")", "d", "[", "'requires_deep_update'", "]", "=", "d", ".", "get", "(", "'requires_deep_update'", ",", "False", ")", "d", "[", "'status'", "]", "=", "info", ".", "get", "(", "'status'", ")", "# ensure the set of all keys is present in the dictionnary (Device constructor requires all keys present)", "[", "d", ".", "setdefault", "(", "key", ",", "None", ")", "for", "key", "in", "allKeys", "]", "devices_dict", "[", "d", "[", "'name'", "]", "]", "=", "Device", "(", "*", "*", "d", ")", "return", "devices_dict" ]
Create a dictionary of devices known to the user account.
[ "Create", "a", "dictionary", "of", "devices", "known", "to", "the", "user", "account", "." ]
fc1d7c0892a351cf742bf07e95b852fd6bf7c108
https://github.com/Alidron/spyrk/blob/fc1d7c0892a351cf742bf07e95b852fd6bf7c108/spyrk/spark_cloud.py#L96-L125
train
Alidron/spyrk
spyrk/spark_cloud.py
SparkCloud._get_device_info
def _get_device_info(self, device_id): """Queries the Spark Cloud for detailed information about a device.""" params = {'access_token': self.access_token} r = self.spark_api(device_id).GET(params=params, timeout=30) self._check_error(r) return r.json()
python
def _get_device_info(self, device_id): """Queries the Spark Cloud for detailed information about a device.""" params = {'access_token': self.access_token} r = self.spark_api(device_id).GET(params=params, timeout=30) self._check_error(r) return r.json()
[ "def", "_get_device_info", "(", "self", ",", "device_id", ")", ":", "params", "=", "{", "'access_token'", ":", "self", ".", "access_token", "}", "r", "=", "self", ".", "spark_api", "(", "device_id", ")", ".", "GET", "(", "params", "=", "params", ",", "timeout", "=", "30", ")", "self", ".", "_check_error", "(", "r", ")", "return", "r", ".", "json", "(", ")" ]
Queries the Spark Cloud for detailed information about a device.
[ "Queries", "the", "Spark", "Cloud", "for", "detailed", "information", "about", "a", "device", "." ]
fc1d7c0892a351cf742bf07e95b852fd6bf7c108
https://github.com/Alidron/spyrk/blob/fc1d7c0892a351cf742bf07e95b852fd6bf7c108/spyrk/spark_cloud.py#L127-L132
train
Alidron/spyrk
spyrk/spark_cloud.py
_BaseDevice.make_device_class
def make_device_class(spark_cloud, entries, timeout=30): """Returns a dynamic Device class based on what a GET device list from the Spark Cloud returns. spark_cloud parameter should be the caller instance of SparkCloud. entries parameter should be the list of fields the Spark Cloud API is returning. """ attrs = list( set( list(entries) + [ 'requires_deep_update', 'functions', 'variables', 'api', 'status' ] ) ) return type( 'Device', (_BaseDevice, namedtuple('Device', attrs)), {'__slots__': (), 'spark_cloud': spark_cloud, 'timeout' : timeout} )
python
def make_device_class(spark_cloud, entries, timeout=30): """Returns a dynamic Device class based on what a GET device list from the Spark Cloud returns. spark_cloud parameter should be the caller instance of SparkCloud. entries parameter should be the list of fields the Spark Cloud API is returning. """ attrs = list( set( list(entries) + [ 'requires_deep_update', 'functions', 'variables', 'api', 'status' ] ) ) return type( 'Device', (_BaseDevice, namedtuple('Device', attrs)), {'__slots__': (), 'spark_cloud': spark_cloud, 'timeout' : timeout} )
[ "def", "make_device_class", "(", "spark_cloud", ",", "entries", ",", "timeout", "=", "30", ")", ":", "attrs", "=", "list", "(", "set", "(", "list", "(", "entries", ")", "+", "[", "'requires_deep_update'", ",", "'functions'", ",", "'variables'", ",", "'api'", ",", "'status'", "]", ")", ")", "return", "type", "(", "'Device'", ",", "(", "_BaseDevice", ",", "namedtuple", "(", "'Device'", ",", "attrs", ")", ")", ",", "{", "'__slots__'", ":", "(", ")", ",", "'spark_cloud'", ":", "spark_cloud", ",", "'timeout'", ":", "timeout", "}", ")" ]
Returns a dynamic Device class based on what a GET device list from the Spark Cloud returns. spark_cloud parameter should be the caller instance of SparkCloud. entries parameter should be the list of fields the Spark Cloud API is returning.
[ "Returns", "a", "dynamic", "Device", "class", "based", "on", "what", "a", "GET", "device", "list", "from", "the", "Spark", "Cloud", "returns", ".", "spark_cloud", "parameter", "should", "be", "the", "caller", "instance", "of", "SparkCloud", ".", "entries", "parameter", "should", "be", "the", "list", "of", "fields", "the", "Spark", "Cloud", "API", "is", "returning", "." ]
fc1d7c0892a351cf742bf07e95b852fd6bf7c108
https://github.com/Alidron/spyrk/blob/fc1d7c0892a351cf742bf07e95b852fd6bf7c108/spyrk/spark_cloud.py#L156-L177
train
zalando-stups/lizzy-client
lizzy_client/metrics.py
report_metric
def report_metric(metric_name: str, value: int, fail_silently: bool=True): """ Tries to report a metric, ignoring all errors """ if metricz is None: return configuration = Configuration() try: lizzy_domain = urlparse(configuration.lizzy_url).netloc lizzy_name, _ = lizzy_domain.split('.', 1) except Exception: lizzy_name = 'UNKNOWN' tags = { 'version': VERSION, 'lizzy': lizzy_name } # noinspection PyBroadException try: writer = metricz.MetricWriter(url=configuration.token_url, directory=configuration.credentials_dir, fail_silently=False) writer.write_metric(metric_name, value, tags, timeout=10) except Exception: if not fail_silently: raise
python
def report_metric(metric_name: str, value: int, fail_silently: bool=True): """ Tries to report a metric, ignoring all errors """ if metricz is None: return configuration = Configuration() try: lizzy_domain = urlparse(configuration.lizzy_url).netloc lizzy_name, _ = lizzy_domain.split('.', 1) except Exception: lizzy_name = 'UNKNOWN' tags = { 'version': VERSION, 'lizzy': lizzy_name } # noinspection PyBroadException try: writer = metricz.MetricWriter(url=configuration.token_url, directory=configuration.credentials_dir, fail_silently=False) writer.write_metric(metric_name, value, tags, timeout=10) except Exception: if not fail_silently: raise
[ "def", "report_metric", "(", "metric_name", ":", "str", ",", "value", ":", "int", ",", "fail_silently", ":", "bool", "=", "True", ")", ":", "if", "metricz", "is", "None", ":", "return", "configuration", "=", "Configuration", "(", ")", "try", ":", "lizzy_domain", "=", "urlparse", "(", "configuration", ".", "lizzy_url", ")", ".", "netloc", "lizzy_name", ",", "_", "=", "lizzy_domain", ".", "split", "(", "'.'", ",", "1", ")", "except", "Exception", ":", "lizzy_name", "=", "'UNKNOWN'", "tags", "=", "{", "'version'", ":", "VERSION", ",", "'lizzy'", ":", "lizzy_name", "}", "# noinspection PyBroadException", "try", ":", "writer", "=", "metricz", ".", "MetricWriter", "(", "url", "=", "configuration", ".", "token_url", ",", "directory", "=", "configuration", ".", "credentials_dir", ",", "fail_silently", "=", "False", ")", "writer", ".", "write_metric", "(", "metric_name", ",", "value", ",", "tags", ",", "timeout", "=", "10", ")", "except", "Exception", ":", "if", "not", "fail_silently", ":", "raise" ]
Tries to report a metric, ignoring all errors
[ "Tries", "to", "report", "a", "metric", "ignoring", "all", "errors" ]
0af9733ca5a25ebd0a9dc1453f2a7592efcee56a
https://github.com/zalando-stups/lizzy-client/blob/0af9733ca5a25ebd0a9dc1453f2a7592efcee56a/lizzy_client/metrics.py#L19-L47
train
KnightConan/sspdatatables
src/sspdatatables/templatetags/form_field.py
get_form_bound_field
def get_form_bound_field(form, field_name): """ Intends to get the bound field from the form regarding the field name :param form: Django Form: django form instance :param field_name: str: name of the field in form instance :return: Django Form bound field """ field = form.fields[field_name] field = field.get_bound_field(form, field_name) return field
python
def get_form_bound_field(form, field_name): """ Intends to get the bound field from the form regarding the field name :param form: Django Form: django form instance :param field_name: str: name of the field in form instance :return: Django Form bound field """ field = form.fields[field_name] field = field.get_bound_field(form, field_name) return field
[ "def", "get_form_bound_field", "(", "form", ",", "field_name", ")", ":", "field", "=", "form", ".", "fields", "[", "field_name", "]", "field", "=", "field", ".", "get_bound_field", "(", "form", ",", "field_name", ")", "return", "field" ]
Intends to get the bound field from the form regarding the field name :param form: Django Form: django form instance :param field_name: str: name of the field in form instance :return: Django Form bound field
[ "Intends", "to", "get", "the", "bound", "field", "from", "the", "form", "regarding", "the", "field", "name" ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/templatetags/form_field.py#L10-L20
train
rhayes777/PyAutoFit
autofit/conf.py
AncestorConfig.read
def read(self, module_name): """ Read a particular config file Parameters ---------- module_name: String The analysis_path of the module for which a config is to be read (priors relate one to one with configs). """ self.parser.read("{}/{}.ini".format(self.path, module_name.split(".")[-1]))
python
def read(self, module_name): """ Read a particular config file Parameters ---------- module_name: String The analysis_path of the module for which a config is to be read (priors relate one to one with configs). """ self.parser.read("{}/{}.ini".format(self.path, module_name.split(".")[-1]))
[ "def", "read", "(", "self", ",", "module_name", ")", ":", "self", ".", "parser", ".", "read", "(", "\"{}/{}.ini\"", ".", "format", "(", "self", ".", "path", ",", "module_name", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", ")", ")" ]
Read a particular config file Parameters ---------- module_name: String The analysis_path of the module for which a config is to be read (priors relate one to one with configs).
[ "Read", "a", "particular", "config", "file" ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/conf.py#L117-L126
train
rhayes777/PyAutoFit
autofit/conf.py
AncestorConfig.get_for_nearest_ancestor
def get_for_nearest_ancestor(self, cls, attribute_name): """ Find a prior with the attribute analysis_path from the config for this class or one of its ancestors Parameters ---------- cls: class The class of interest attribute_name: String The analysis_path of the attribute Returns ------- prior_array: [] An array describing this prior """ for family_cls in family(cls): if self.has(family_cls.__module__, family_cls.__name__, attribute_name): return self.get(family_cls.__module__, family_cls.__name__, attribute_name) ini_filename = cls.__module__.split(".")[-1] raise exc.PriorException( "The prior config at {}/{} does not contain {} in {} or any of its parents".format(self.path, ini_filename, attribute_name, cls.__name__ ))
python
def get_for_nearest_ancestor(self, cls, attribute_name): """ Find a prior with the attribute analysis_path from the config for this class or one of its ancestors Parameters ---------- cls: class The class of interest attribute_name: String The analysis_path of the attribute Returns ------- prior_array: [] An array describing this prior """ for family_cls in family(cls): if self.has(family_cls.__module__, family_cls.__name__, attribute_name): return self.get(family_cls.__module__, family_cls.__name__, attribute_name) ini_filename = cls.__module__.split(".")[-1] raise exc.PriorException( "The prior config at {}/{} does not contain {} in {} or any of its parents".format(self.path, ini_filename, attribute_name, cls.__name__ ))
[ "def", "get_for_nearest_ancestor", "(", "self", ",", "cls", ",", "attribute_name", ")", ":", "for", "family_cls", "in", "family", "(", "cls", ")", ":", "if", "self", ".", "has", "(", "family_cls", ".", "__module__", ",", "family_cls", ".", "__name__", ",", "attribute_name", ")", ":", "return", "self", ".", "get", "(", "family_cls", ".", "__module__", ",", "family_cls", ".", "__name__", ",", "attribute_name", ")", "ini_filename", "=", "cls", ".", "__module__", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "raise", "exc", ".", "PriorException", "(", "\"The prior config at {}/{} does not contain {} in {} or any of its parents\"", ".", "format", "(", "self", ".", "path", ",", "ini_filename", ",", "attribute_name", ",", "cls", ".", "__name__", ")", ")" ]
Find a prior with the attribute analysis_path from the config for this class or one of its ancestors Parameters ---------- cls: class The class of interest attribute_name: String The analysis_path of the attribute Returns ------- prior_array: [] An array describing this prior
[ "Find", "a", "prior", "with", "the", "attribute", "analysis_path", "from", "the", "config", "for", "this", "class", "or", "one", "of", "its", "ancestors" ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/conf.py#L128-L153
train
wdv4758h/python-everywhere
everywhere/base.py
fib
def fib(number: int) -> int: """ Simple Fibonacci function. >>> fib(10) 55 """ if number < 2: return number return fib(number - 1) + fib(number - 2)
python
def fib(number: int) -> int: """ Simple Fibonacci function. >>> fib(10) 55 """ if number < 2: return number return fib(number - 1) + fib(number - 2)
[ "def", "fib", "(", "number", ":", "int", ")", "->", "int", ":", "if", "number", "<", "2", ":", "return", "number", "return", "fib", "(", "number", "-", "1", ")", "+", "fib", "(", "number", "-", "2", ")" ]
Simple Fibonacci function. >>> fib(10) 55
[ "Simple", "Fibonacci", "function", "." ]
1d1bafd9f908b08c7bdb0470c6e54181c928f32f
https://github.com/wdv4758h/python-everywhere/blob/1d1bafd9f908b08c7bdb0470c6e54181c928f32f/everywhere/base.py#L7-L16
train
geophysics-ubonn/crtomo_tools
lib/crtomo/nodeManager.py
NodeMan.add_data
def add_data(self, data): """Add data to the node value sets Parameters ---------- data: numpy.ndarray one or more node value sets. It must either be 1D or 2D, with the first dimension the number of parameter sets (K), and the second the number of elements (Z): K x Z Examples -------- >>> # suppose that grid is a fully initialized grid oject with 50 nodes nodeman = NodeMan(grid) # one_data_set = np.ones(50) cid = nodeman.add_data(one_data_set) print(nodeman.parsets[cid]) two_data_sets = np.ones((2, 50)) cids = nodeman.add_data(two_data_sets) print(cids) [0, ] [1, 2] """ subdata = np.atleast_2d(data) # we try to accommodate transposed input if subdata.shape[1] != self.grid.nr_of_nodes: if subdata.shape[0] == self.grid.nr_of_nodes: subdata = subdata.T else: raise Exception( 'Number of values does not match the number of ' + 'nodes in the grid {0} grid nodes vs {1} data'.format( self.grid.nr_of_nodes, subdata.shape, ) ) return_ids = [] for dataset in subdata: cid = self._get_next_index() self.nodevals[cid] = dataset.copy() return_ids.append(cid) if len(return_ids) == 1: return return_ids[0] else: return return_ids
python
def add_data(self, data): """Add data to the node value sets Parameters ---------- data: numpy.ndarray one or more node value sets. It must either be 1D or 2D, with the first dimension the number of parameter sets (K), and the second the number of elements (Z): K x Z Examples -------- >>> # suppose that grid is a fully initialized grid oject with 50 nodes nodeman = NodeMan(grid) # one_data_set = np.ones(50) cid = nodeman.add_data(one_data_set) print(nodeman.parsets[cid]) two_data_sets = np.ones((2, 50)) cids = nodeman.add_data(two_data_sets) print(cids) [0, ] [1, 2] """ subdata = np.atleast_2d(data) # we try to accommodate transposed input if subdata.shape[1] != self.grid.nr_of_nodes: if subdata.shape[0] == self.grid.nr_of_nodes: subdata = subdata.T else: raise Exception( 'Number of values does not match the number of ' + 'nodes in the grid {0} grid nodes vs {1} data'.format( self.grid.nr_of_nodes, subdata.shape, ) ) return_ids = [] for dataset in subdata: cid = self._get_next_index() self.nodevals[cid] = dataset.copy() return_ids.append(cid) if len(return_ids) == 1: return return_ids[0] else: return return_ids
[ "def", "add_data", "(", "self", ",", "data", ")", ":", "subdata", "=", "np", ".", "atleast_2d", "(", "data", ")", "# we try to accommodate transposed input", "if", "subdata", ".", "shape", "[", "1", "]", "!=", "self", ".", "grid", ".", "nr_of_nodes", ":", "if", "subdata", ".", "shape", "[", "0", "]", "==", "self", ".", "grid", ".", "nr_of_nodes", ":", "subdata", "=", "subdata", ".", "T", "else", ":", "raise", "Exception", "(", "'Number of values does not match the number of '", "+", "'nodes in the grid {0} grid nodes vs {1} data'", ".", "format", "(", "self", ".", "grid", ".", "nr_of_nodes", ",", "subdata", ".", "shape", ",", ")", ")", "return_ids", "=", "[", "]", "for", "dataset", "in", "subdata", ":", "cid", "=", "self", ".", "_get_next_index", "(", ")", "self", ".", "nodevals", "[", "cid", "]", "=", "dataset", ".", "copy", "(", ")", "return_ids", ".", "append", "(", "cid", ")", "if", "len", "(", "return_ids", ")", "==", "1", ":", "return", "return_ids", "[", "0", "]", "else", ":", "return", "return_ids" ]
Add data to the node value sets Parameters ---------- data: numpy.ndarray one or more node value sets. It must either be 1D or 2D, with the first dimension the number of parameter sets (K), and the second the number of elements (Z): K x Z Examples -------- >>> # suppose that grid is a fully initialized grid oject with 50 nodes nodeman = NodeMan(grid) # one_data_set = np.ones(50) cid = nodeman.add_data(one_data_set) print(nodeman.parsets[cid]) two_data_sets = np.ones((2, 50)) cids = nodeman.add_data(two_data_sets) print(cids) [0, ] [1, 2]
[ "Add", "data", "to", "the", "node", "value", "sets" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/nodeManager.py#L38-L87
train
reorx/torext
torext/utils.py
SingletonMixin.instance
def instance(cls, *args, **kwgs): """Will be the only instance""" if not hasattr(cls, "_instance"): cls._instance = cls(*args, **kwgs) return cls._instance
python
def instance(cls, *args, **kwgs): """Will be the only instance""" if not hasattr(cls, "_instance"): cls._instance = cls(*args, **kwgs) return cls._instance
[ "def", "instance", "(", "cls", ",", "*", "args", ",", "*", "*", "kwgs", ")", ":", "if", "not", "hasattr", "(", "cls", ",", "\"_instance\"", ")", ":", "cls", ".", "_instance", "=", "cls", "(", "*", "args", ",", "*", "*", "kwgs", ")", "return", "cls", ".", "_instance" ]
Will be the only instance
[ "Will", "be", "the", "only", "instance" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/utils.py#L56-L60
train
assamite/creamas
creamas/examples/grid/utils.py
configure_logger
def configure_logger(logger, filename, folder, log_level): '''Configure logging behvior for the simulations. ''' fmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s') if folder is not None: log_file = os.path.join(folder, filename) hdl = logging.FileHandler(log_file) hdl.setFormatter(fmt) hdl.setLevel(log_level) logger.addHandler(hdl) shdl = logging.StreamHandler() shdl.setLevel(log_level) shdl.setFormatter(fmt) logger.addHandler(shdl) logger.setLevel(log_level)
python
def configure_logger(logger, filename, folder, log_level): '''Configure logging behvior for the simulations. ''' fmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s') if folder is not None: log_file = os.path.join(folder, filename) hdl = logging.FileHandler(log_file) hdl.setFormatter(fmt) hdl.setLevel(log_level) logger.addHandler(hdl) shdl = logging.StreamHandler() shdl.setLevel(log_level) shdl.setFormatter(fmt) logger.addHandler(shdl) logger.setLevel(log_level)
[ "def", "configure_logger", "(", "logger", ",", "filename", ",", "folder", ",", "log_level", ")", ":", "fmt", "=", "logging", ".", "Formatter", "(", "'%(asctime)s %(levelname)s: %(message)s'", ")", "if", "folder", "is", "not", "None", ":", "log_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "filename", ")", "hdl", "=", "logging", ".", "FileHandler", "(", "log_file", ")", "hdl", ".", "setFormatter", "(", "fmt", ")", "hdl", ".", "setLevel", "(", "log_level", ")", "logger", ".", "addHandler", "(", "hdl", ")", "shdl", "=", "logging", ".", "StreamHandler", "(", ")", "shdl", ".", "setLevel", "(", "log_level", ")", "shdl", ".", "setFormatter", "(", "fmt", ")", "logger", ".", "addHandler", "(", "shdl", ")", "logger", ".", "setLevel", "(", "log_level", ")" ]
Configure logging behvior for the simulations.
[ "Configure", "logging", "behvior", "for", "the", "simulations", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/examples/grid/utils.py#L11-L25
train
tech-pi/doufo
src/python/doufo/function.py
_nargs
def _nargs(f) -> Optional[int]: ''' number of positional arguments values. Dynamically computed from the arguments attribute. ''' if isinstance(f, Function): return f.nargs spec = inspect.getfullargspec(f) if spec.varargs is not None: return None return len(spec.args)
python
def _nargs(f) -> Optional[int]: ''' number of positional arguments values. Dynamically computed from the arguments attribute. ''' if isinstance(f, Function): return f.nargs spec = inspect.getfullargspec(f) if spec.varargs is not None: return None return len(spec.args)
[ "def", "_nargs", "(", "f", ")", "->", "Optional", "[", "int", "]", ":", "if", "isinstance", "(", "f", ",", "Function", ")", ":", "return", "f", ".", "nargs", "spec", "=", "inspect", ".", "getfullargspec", "(", "f", ")", "if", "spec", ".", "varargs", "is", "not", "None", ":", "return", "None", "return", "len", "(", "spec", ".", "args", ")" ]
number of positional arguments values. Dynamically computed from the arguments attribute.
[ "number", "of", "positional", "arguments", "values", ".", "Dynamically", "computed", "from", "the", "arguments", "attribute", "." ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L93-L102
train
tech-pi/doufo
src/python/doufo/function.py
_ndefs
def _ndefs(f): ''' number of any default values for positional or keyword parameters ''' if isinstance(f, Function): return f.ndefs spec = inspect.getfullargspec(f) if spec.defaults is None: return 0 return len(spec.defaults)
python
def _ndefs(f): ''' number of any default values for positional or keyword parameters ''' if isinstance(f, Function): return f.ndefs spec = inspect.getfullargspec(f) if spec.defaults is None: return 0 return len(spec.defaults)
[ "def", "_ndefs", "(", "f", ")", ":", "if", "isinstance", "(", "f", ",", "Function", ")", ":", "return", "f", ".", "ndefs", "spec", "=", "inspect", ".", "getfullargspec", "(", "f", ")", "if", "spec", ".", "defaults", "is", "None", ":", "return", "0", "return", "len", "(", "spec", ".", "defaults", ")" ]
number of any default values for positional or keyword parameters
[ "number", "of", "any", "default", "values", "for", "positional", "or", "keyword", "parameters" ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L105-L114
train
tech-pi/doufo
src/python/doufo/function.py
singledispatch
def singledispatch(*, nargs=None, nouts=None, ndefs=None): """ singledispatch decorate of both functools.singledispatch and func """ def wrapper(f): return wraps(f)(SingleDispatchFunction(f, nargs=nargs, nouts=nouts, ndefs=ndefs)) return wrapper
python
def singledispatch(*, nargs=None, nouts=None, ndefs=None): """ singledispatch decorate of both functools.singledispatch and func """ def wrapper(f): return wraps(f)(SingleDispatchFunction(f, nargs=nargs, nouts=nouts, ndefs=ndefs)) return wrapper
[ "def", "singledispatch", "(", "*", ",", "nargs", "=", "None", ",", "nouts", "=", "None", ",", "ndefs", "=", "None", ")", ":", "def", "wrapper", "(", "f", ")", ":", "return", "wraps", "(", "f", ")", "(", "SingleDispatchFunction", "(", "f", ",", "nargs", "=", "nargs", ",", "nouts", "=", "nouts", ",", "ndefs", "=", "ndefs", ")", ")", "return", "wrapper" ]
singledispatch decorate of both functools.singledispatch and func
[ "singledispatch", "decorate", "of", "both", "functools", ".", "singledispatch", "and", "func" ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L285-L293
train
tech-pi/doufo
src/python/doufo/function.py
multidispatch
def multidispatch(*, nargs=None, nouts=None): """ multidispatch decorate of both functools.singledispatch and func """ def wrapper(f): return wraps(f)(MultiDispatchFunction(f, nargs=nargs, nouts=nouts)) return wrapper
python
def multidispatch(*, nargs=None, nouts=None): """ multidispatch decorate of both functools.singledispatch and func """ def wrapper(f): return wraps(f)(MultiDispatchFunction(f, nargs=nargs, nouts=nouts)) return wrapper
[ "def", "multidispatch", "(", "*", ",", "nargs", "=", "None", ",", "nouts", "=", "None", ")", ":", "def", "wrapper", "(", "f", ")", ":", "return", "wraps", "(", "f", ")", "(", "MultiDispatchFunction", "(", "f", ",", "nargs", "=", "nargs", ",", "nouts", "=", "nouts", ")", ")", "return", "wrapper" ]
multidispatch decorate of both functools.singledispatch and func
[ "multidispatch", "decorate", "of", "both", "functools", ".", "singledispatch", "and", "func" ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L318-L325
train
tech-pi/doufo
src/python/doufo/function.py
flip
def flip(f: Callable) -> Function: """ flip order of first two arguments to function. """ nargs_, nouts_, ndefs_ = nargs(f), nouts(f), ndefs(f) return WrappedFunction(lambda *args, **kwargs: f(args[1], args[0], *args[2:], **kwargs), nargs=nargs_, nouts=nouts_, ndefs=ndefs_)
python
def flip(f: Callable) -> Function: """ flip order of first two arguments to function. """ nargs_, nouts_, ndefs_ = nargs(f), nouts(f), ndefs(f) return WrappedFunction(lambda *args, **kwargs: f(args[1], args[0], *args[2:], **kwargs), nargs=nargs_, nouts=nouts_, ndefs=ndefs_)
[ "def", "flip", "(", "f", ":", "Callable", ")", "->", "Function", ":", "nargs_", ",", "nouts_", ",", "ndefs_", "=", "nargs", "(", "f", ")", ",", "nouts", "(", "f", ")", ",", "ndefs", "(", "f", ")", "return", "WrappedFunction", "(", "lambda", "*", "args", ",", "*", "*", "kwargs", ":", "f", "(", "args", "[", "1", "]", ",", "args", "[", "0", "]", ",", "*", "args", "[", "2", ":", "]", ",", "*", "*", "kwargs", ")", ",", "nargs", "=", "nargs_", ",", "nouts", "=", "nouts_", ",", "ndefs", "=", "ndefs_", ")" ]
flip order of first two arguments to function.
[ "flip", "order", "of", "first", "two", "arguments", "to", "function", "." ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L329-L335
train
tech-pi/doufo
src/python/doufo/function.py
tagfunc
def tagfunc(nargs=None, ndefs=None, nouts=None): """ decorate of tagged function """ def wrapper(f): return wraps(f)(FunctionWithTag(f, nargs=nargs, nouts=nouts, ndefs=ndefs)) return wrapper
python
def tagfunc(nargs=None, ndefs=None, nouts=None): """ decorate of tagged function """ def wrapper(f): return wraps(f)(FunctionWithTag(f, nargs=nargs, nouts=nouts, ndefs=ndefs)) return wrapper
[ "def", "tagfunc", "(", "nargs", "=", "None", ",", "ndefs", "=", "None", ",", "nouts", "=", "None", ")", ":", "def", "wrapper", "(", "f", ")", ":", "return", "wraps", "(", "f", ")", "(", "FunctionWithTag", "(", "f", ",", "nargs", "=", "nargs", ",", "nouts", "=", "nouts", ",", "ndefs", "=", "ndefs", ")", ")", "return", "wrapper" ]
decorate of tagged function
[ "decorate", "of", "tagged", "function" ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L388-L395
train
tech-pi/doufo
src/python/doufo/function.py
WrappedFunction.fmap
def fmap(self, f: 'WrappedFunction') -> 'WrappedFunction': ''' function map for Wrapped Function. A forced transfermation to WrappedFunction would be applied.async def fmap(self, f: 'WrappedFunction') -> 'WrappedFunction' ''' if not isinstance(f, WrappedFunction): f = WrappedFunction(f) return WrappedFunction(lambda *args, **kwargs: self(f(*args, **kwargs)), nargs=f.nargs, nouts=self.nouts)
python
def fmap(self, f: 'WrappedFunction') -> 'WrappedFunction': ''' function map for Wrapped Function. A forced transfermation to WrappedFunction would be applied.async def fmap(self, f: 'WrappedFunction') -> 'WrappedFunction' ''' if not isinstance(f, WrappedFunction): f = WrappedFunction(f) return WrappedFunction(lambda *args, **kwargs: self(f(*args, **kwargs)), nargs=f.nargs, nouts=self.nouts)
[ "def", "fmap", "(", "self", ",", "f", ":", "'WrappedFunction'", ")", "->", "'WrappedFunction'", ":", "if", "not", "isinstance", "(", "f", ",", "WrappedFunction", ")", ":", "f", "=", "WrappedFunction", "(", "f", ")", "return", "WrappedFunction", "(", "lambda", "*", "args", ",", "*", "*", "kwargs", ":", "self", "(", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", ",", "nargs", "=", "f", ".", "nargs", ",", "nouts", "=", "self", ".", "nouts", ")" ]
function map for Wrapped Function. A forced transfermation to WrappedFunction would be applied.async def fmap(self, f: 'WrappedFunction') -> 'WrappedFunction'
[ "function", "map", "for", "Wrapped", "Function", ".", "A", "forced", "transfermation", "to", "WrappedFunction", "would", "be", "applied", ".", "async", "def" ]
3d375fef30670597768a6eef809b75b4b1b5a3fd
https://github.com/tech-pi/doufo/blob/3d375fef30670597768a6eef809b75b4b1b5a3fd/src/python/doufo/function.py#L184-L192
train
Kortemme-Lab/klab
klab/bio/pdbml.py
PDBML_slow.parse_atoms
def parse_atoms(self): '''All ATOM lines are parsed even though only one per residue needs to be parsed. The reason for parsing all the lines is just to sanity-checks that the ATOMs within one residue are consistent with each other.''' atom_site_header_tag = self.main_tag.getElementsByTagName("PDBx:atom_siteCategory") assert(len(atom_site_header_tag) == 1) atom_site_header_tag = atom_site_header_tag[0] atom_site_tags = atom_site_header_tag.getElementsByTagName("PDBx:atom_site") residue_map = {} residues_read = {} int_type = types.IntType for t in atom_site_tags: r, seqres, ResidueAA, Residue3AA = PDBML_slow.parse_atom_site(t, self.modified_residues) if r: # skip certain ACE residues if not(self.pdb_id in cases_with_ACE_residues_we_can_ignore and Residue3AA == 'ACE'): full_residue_id = str(r) if residues_read.get(full_residue_id): assert(residues_read[full_residue_id] == (r.ResidueAA, seqres)) else: residues_read[full_residue_id] = (r.ResidueAA, seqres) residue_map[r.Chain] = residue_map.get(r.Chain, {}) assert(type(seqres) == int_type) residue_map[r.Chain][str(r)] = seqres ## Create SequenceMap objects to map the ATOM Sequences to the SEQRES Sequences atom_to_seqres_sequence_maps = {} for chain_id, atom_seqres_mapping in residue_map.iteritems(): atom_to_seqres_sequence_maps[chain_id] = SequenceMap.from_dict(atom_seqres_mapping) self.atom_to_seqres_sequence_maps = atom_to_seqres_sequence_maps
python
def parse_atoms(self): '''All ATOM lines are parsed even though only one per residue needs to be parsed. The reason for parsing all the lines is just to sanity-checks that the ATOMs within one residue are consistent with each other.''' atom_site_header_tag = self.main_tag.getElementsByTagName("PDBx:atom_siteCategory") assert(len(atom_site_header_tag) == 1) atom_site_header_tag = atom_site_header_tag[0] atom_site_tags = atom_site_header_tag.getElementsByTagName("PDBx:atom_site") residue_map = {} residues_read = {} int_type = types.IntType for t in atom_site_tags: r, seqres, ResidueAA, Residue3AA = PDBML_slow.parse_atom_site(t, self.modified_residues) if r: # skip certain ACE residues if not(self.pdb_id in cases_with_ACE_residues_we_can_ignore and Residue3AA == 'ACE'): full_residue_id = str(r) if residues_read.get(full_residue_id): assert(residues_read[full_residue_id] == (r.ResidueAA, seqres)) else: residues_read[full_residue_id] = (r.ResidueAA, seqres) residue_map[r.Chain] = residue_map.get(r.Chain, {}) assert(type(seqres) == int_type) residue_map[r.Chain][str(r)] = seqres ## Create SequenceMap objects to map the ATOM Sequences to the SEQRES Sequences atom_to_seqres_sequence_maps = {} for chain_id, atom_seqres_mapping in residue_map.iteritems(): atom_to_seqres_sequence_maps[chain_id] = SequenceMap.from_dict(atom_seqres_mapping) self.atom_to_seqres_sequence_maps = atom_to_seqres_sequence_maps
[ "def", "parse_atoms", "(", "self", ")", ":", "atom_site_header_tag", "=", "self", ".", "main_tag", ".", "getElementsByTagName", "(", "\"PDBx:atom_siteCategory\"", ")", "assert", "(", "len", "(", "atom_site_header_tag", ")", "==", "1", ")", "atom_site_header_tag", "=", "atom_site_header_tag", "[", "0", "]", "atom_site_tags", "=", "atom_site_header_tag", ".", "getElementsByTagName", "(", "\"PDBx:atom_site\"", ")", "residue_map", "=", "{", "}", "residues_read", "=", "{", "}", "int_type", "=", "types", ".", "IntType", "for", "t", "in", "atom_site_tags", ":", "r", ",", "seqres", ",", "ResidueAA", ",", "Residue3AA", "=", "PDBML_slow", ".", "parse_atom_site", "(", "t", ",", "self", ".", "modified_residues", ")", "if", "r", ":", "# skip certain ACE residues", "if", "not", "(", "self", ".", "pdb_id", "in", "cases_with_ACE_residues_we_can_ignore", "and", "Residue3AA", "==", "'ACE'", ")", ":", "full_residue_id", "=", "str", "(", "r", ")", "if", "residues_read", ".", "get", "(", "full_residue_id", ")", ":", "assert", "(", "residues_read", "[", "full_residue_id", "]", "==", "(", "r", ".", "ResidueAA", ",", "seqres", ")", ")", "else", ":", "residues_read", "[", "full_residue_id", "]", "=", "(", "r", ".", "ResidueAA", ",", "seqres", ")", "residue_map", "[", "r", ".", "Chain", "]", "=", "residue_map", ".", "get", "(", "r", ".", "Chain", ",", "{", "}", ")", "assert", "(", "type", "(", "seqres", ")", "==", "int_type", ")", "residue_map", "[", "r", ".", "Chain", "]", "[", "str", "(", "r", ")", "]", "=", "seqres", "## Create SequenceMap objects to map the ATOM Sequences to the SEQRES Sequences", "atom_to_seqres_sequence_maps", "=", "{", "}", "for", "chain_id", ",", "atom_seqres_mapping", "in", "residue_map", ".", "iteritems", "(", ")", ":", "atom_to_seqres_sequence_maps", "[", "chain_id", "]", "=", "SequenceMap", ".", "from_dict", "(", "atom_seqres_mapping", ")", "self", ".", "atom_to_seqres_sequence_maps", "=", "atom_to_seqres_sequence_maps" ]
All ATOM lines are parsed even though only one per residue needs to be parsed. The reason for parsing all the lines is just to sanity-checks that the ATOMs within one residue are consistent with each other.
[ "All", "ATOM", "lines", "are", "parsed", "even", "though", "only", "one", "per", "residue", "needs", "to", "be", "parsed", ".", "The", "reason", "for", "parsing", "all", "the", "lines", "is", "just", "to", "sanity", "-", "checks", "that", "the", "ATOMs", "within", "one", "residue", "are", "consistent", "with", "each", "other", "." ]
6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b
https://github.com/Kortemme-Lab/klab/blob/6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b/klab/bio/pdbml.py#L125-L157
train
Kortemme-Lab/klab
klab/bio/pdbml.py
PDBML.parse_atom_site
def parse_atom_site(self, name, attributes): '''Parse the atom tag attributes. Most atom tags do not have attributes.''' if name == "PDBx:pdbx_PDB_ins_code": assert(not(self.current_atom_site.ATOMResidueiCodeIsNull)) if attributes.get('xsi:nil') == 'true': self.current_atom_site.ATOMResidueiCodeIsNull = True if name == "PDBx:auth_asym_id": assert(not(self.current_atom_site.PDBChainIDIsNull)) if attributes.get('xsi:nil') == 'true': self.current_atom_site.PDBChainIDIsNull = True
python
def parse_atom_site(self, name, attributes): '''Parse the atom tag attributes. Most atom tags do not have attributes.''' if name == "PDBx:pdbx_PDB_ins_code": assert(not(self.current_atom_site.ATOMResidueiCodeIsNull)) if attributes.get('xsi:nil') == 'true': self.current_atom_site.ATOMResidueiCodeIsNull = True if name == "PDBx:auth_asym_id": assert(not(self.current_atom_site.PDBChainIDIsNull)) if attributes.get('xsi:nil') == 'true': self.current_atom_site.PDBChainIDIsNull = True
[ "def", "parse_atom_site", "(", "self", ",", "name", ",", "attributes", ")", ":", "if", "name", "==", "\"PDBx:pdbx_PDB_ins_code\"", ":", "assert", "(", "not", "(", "self", ".", "current_atom_site", ".", "ATOMResidueiCodeIsNull", ")", ")", "if", "attributes", ".", "get", "(", "'xsi:nil'", ")", "==", "'true'", ":", "self", ".", "current_atom_site", ".", "ATOMResidueiCodeIsNull", "=", "True", "if", "name", "==", "\"PDBx:auth_asym_id\"", ":", "assert", "(", "not", "(", "self", ".", "current_atom_site", ".", "PDBChainIDIsNull", ")", ")", "if", "attributes", ".", "get", "(", "'xsi:nil'", ")", "==", "'true'", ":", "self", ".", "current_atom_site", ".", "PDBChainIDIsNull", "=", "True" ]
Parse the atom tag attributes. Most atom tags do not have attributes.
[ "Parse", "the", "atom", "tag", "attributes", ".", "Most", "atom", "tags", "do", "not", "have", "attributes", "." ]
6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b
https://github.com/Kortemme-Lab/klab/blob/6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b/klab/bio/pdbml.py#L471-L480
train
Kortemme-Lab/klab
klab/bio/pdbml.py
PDBML.parse_atom_tag_data
def parse_atom_tag_data(self, name, tag_content): '''Parse the atom tag data.''' current_atom_site = self.current_atom_site if current_atom_site.IsHETATM: # Early out - do not parse HETATM records return elif name == 'PDBx:atom_site': # We have to handle the atom_site close tag here since we jump based on self._BLOCK first in end_element #'''Add the residue to the residue map.''' self._BLOCK = None current_atom_site = self.current_atom_site current_atom_site.validate() if current_atom_site.IsATOM: # Only parse ATOM records r, seqres, ResidueAA, Residue3AA = current_atom_site.convert_to_residue(self.modified_residues) if r: if not(self.pdb_id in cases_with_ACE_residues_we_can_ignore and Residue3AA == 'ACE'): # skip certain ACE residues full_residue_id = str(r) if self._residues_read.get(full_residue_id): assert(self._residues_read[full_residue_id] == (r.ResidueAA, seqres)) else: self._residues_read[full_residue_id] = (r.ResidueAA, seqres) self._residue_map[r.Chain] = self._residue_map.get(r.Chain, {}) assert(type(seqres) == int_type) self._residue_map[r.Chain][str(r)] = seqres # Record type elif name == 'PDBx:group_PDB': # ATOM or HETATM if tag_content == 'ATOM': current_atom_site.IsATOM = True elif tag_content == 'HETATM': current_atom_site.IsHETATM = True else: raise Exception("PDBx:group_PDB was expected to be 'ATOM' or 'HETATM'. '%s' read instead." % tag_content) # Residue identifier - chain ID, residue ID, insertion code elif name == 'PDBx:auth_asym_id': assert(not(current_atom_site.PDBChainID)) current_atom_site.PDBChainID = tag_content if not tag_content: assert(current_atom_site.PDBChainIDIsNull) if self.pdb_id.upper() == '2MBP': current_atom_site.PDBChainID = 'A' # e.g. 2MBP else: current_atom_site.PDBChainID = ' ' elif name == 'PDBx:auth_seq_id': assert(not(current_atom_site.ATOMResidueID)) current_atom_site.ATOMResidueID = int(tag_content) elif name == "PDBx:pdbx_PDB_ins_code": if current_atom_site.ATOMResidueiCodeIsNull: assert(len(tag_content) == 0) else: assert(current_atom_site.ATOMResidueiCode == ' ') current_atom_site.ATOMResidueiCode = tag_content elif name == "PDBx:auth_comp_id": assert(not(current_atom_site.ATOMResidueAA)) current_atom_site.ATOMResidueAA = tag_content elif name == "PDBx:label_seq_id": assert(not(current_atom_site.SEQRESIndex)) current_atom_site.SEQRESIndex = int(tag_content) elif name == "PDBx:label_comp_id": assert(not(current_atom_site.ATOMSeqresResidueAA)) current_atom_site.ATOMSeqresResidueAA = tag_content
python
def parse_atom_tag_data(self, name, tag_content): '''Parse the atom tag data.''' current_atom_site = self.current_atom_site if current_atom_site.IsHETATM: # Early out - do not parse HETATM records return elif name == 'PDBx:atom_site': # We have to handle the atom_site close tag here since we jump based on self._BLOCK first in end_element #'''Add the residue to the residue map.''' self._BLOCK = None current_atom_site = self.current_atom_site current_atom_site.validate() if current_atom_site.IsATOM: # Only parse ATOM records r, seqres, ResidueAA, Residue3AA = current_atom_site.convert_to_residue(self.modified_residues) if r: if not(self.pdb_id in cases_with_ACE_residues_we_can_ignore and Residue3AA == 'ACE'): # skip certain ACE residues full_residue_id = str(r) if self._residues_read.get(full_residue_id): assert(self._residues_read[full_residue_id] == (r.ResidueAA, seqres)) else: self._residues_read[full_residue_id] = (r.ResidueAA, seqres) self._residue_map[r.Chain] = self._residue_map.get(r.Chain, {}) assert(type(seqres) == int_type) self._residue_map[r.Chain][str(r)] = seqres # Record type elif name == 'PDBx:group_PDB': # ATOM or HETATM if tag_content == 'ATOM': current_atom_site.IsATOM = True elif tag_content == 'HETATM': current_atom_site.IsHETATM = True else: raise Exception("PDBx:group_PDB was expected to be 'ATOM' or 'HETATM'. '%s' read instead." % tag_content) # Residue identifier - chain ID, residue ID, insertion code elif name == 'PDBx:auth_asym_id': assert(not(current_atom_site.PDBChainID)) current_atom_site.PDBChainID = tag_content if not tag_content: assert(current_atom_site.PDBChainIDIsNull) if self.pdb_id.upper() == '2MBP': current_atom_site.PDBChainID = 'A' # e.g. 2MBP else: current_atom_site.PDBChainID = ' ' elif name == 'PDBx:auth_seq_id': assert(not(current_atom_site.ATOMResidueID)) current_atom_site.ATOMResidueID = int(tag_content) elif name == "PDBx:pdbx_PDB_ins_code": if current_atom_site.ATOMResidueiCodeIsNull: assert(len(tag_content) == 0) else: assert(current_atom_site.ATOMResidueiCode == ' ') current_atom_site.ATOMResidueiCode = tag_content elif name == "PDBx:auth_comp_id": assert(not(current_atom_site.ATOMResidueAA)) current_atom_site.ATOMResidueAA = tag_content elif name == "PDBx:label_seq_id": assert(not(current_atom_site.SEQRESIndex)) current_atom_site.SEQRESIndex = int(tag_content) elif name == "PDBx:label_comp_id": assert(not(current_atom_site.ATOMSeqresResidueAA)) current_atom_site.ATOMSeqresResidueAA = tag_content
[ "def", "parse_atom_tag_data", "(", "self", ",", "name", ",", "tag_content", ")", ":", "current_atom_site", "=", "self", ".", "current_atom_site", "if", "current_atom_site", ".", "IsHETATM", ":", "# Early out - do not parse HETATM records", "return", "elif", "name", "==", "'PDBx:atom_site'", ":", "# We have to handle the atom_site close tag here since we jump based on self._BLOCK first in end_element", "#'''Add the residue to the residue map.'''", "self", ".", "_BLOCK", "=", "None", "current_atom_site", "=", "self", ".", "current_atom_site", "current_atom_site", ".", "validate", "(", ")", "if", "current_atom_site", ".", "IsATOM", ":", "# Only parse ATOM records", "r", ",", "seqres", ",", "ResidueAA", ",", "Residue3AA", "=", "current_atom_site", ".", "convert_to_residue", "(", "self", ".", "modified_residues", ")", "if", "r", ":", "if", "not", "(", "self", ".", "pdb_id", "in", "cases_with_ACE_residues_we_can_ignore", "and", "Residue3AA", "==", "'ACE'", ")", ":", "# skip certain ACE residues", "full_residue_id", "=", "str", "(", "r", ")", "if", "self", ".", "_residues_read", ".", "get", "(", "full_residue_id", ")", ":", "assert", "(", "self", ".", "_residues_read", "[", "full_residue_id", "]", "==", "(", "r", ".", "ResidueAA", ",", "seqres", ")", ")", "else", ":", "self", ".", "_residues_read", "[", "full_residue_id", "]", "=", "(", "r", ".", "ResidueAA", ",", "seqres", ")", "self", ".", "_residue_map", "[", "r", ".", "Chain", "]", "=", "self", ".", "_residue_map", ".", "get", "(", "r", ".", "Chain", ",", "{", "}", ")", "assert", "(", "type", "(", "seqres", ")", "==", "int_type", ")", "self", ".", "_residue_map", "[", "r", ".", "Chain", "]", "[", "str", "(", "r", ")", "]", "=", "seqres", "# Record type", "elif", "name", "==", "'PDBx:group_PDB'", ":", "# ATOM or HETATM", "if", "tag_content", "==", "'ATOM'", ":", "current_atom_site", ".", "IsATOM", "=", "True", "elif", "tag_content", "==", "'HETATM'", ":", "current_atom_site", ".", "IsHETATM", "=", "True", "else", ":", "raise", "Exception", "(", "\"PDBx:group_PDB was expected to be 'ATOM' or 'HETATM'. '%s' read instead.\"", "%", "tag_content", ")", "# Residue identifier - chain ID, residue ID, insertion code", "elif", "name", "==", "'PDBx:auth_asym_id'", ":", "assert", "(", "not", "(", "current_atom_site", ".", "PDBChainID", ")", ")", "current_atom_site", ".", "PDBChainID", "=", "tag_content", "if", "not", "tag_content", ":", "assert", "(", "current_atom_site", ".", "PDBChainIDIsNull", ")", "if", "self", ".", "pdb_id", ".", "upper", "(", ")", "==", "'2MBP'", ":", "current_atom_site", ".", "PDBChainID", "=", "'A'", "# e.g. 2MBP", "else", ":", "current_atom_site", ".", "PDBChainID", "=", "' '", "elif", "name", "==", "'PDBx:auth_seq_id'", ":", "assert", "(", "not", "(", "current_atom_site", ".", "ATOMResidueID", ")", ")", "current_atom_site", ".", "ATOMResidueID", "=", "int", "(", "tag_content", ")", "elif", "name", "==", "\"PDBx:pdbx_PDB_ins_code\"", ":", "if", "current_atom_site", ".", "ATOMResidueiCodeIsNull", ":", "assert", "(", "len", "(", "tag_content", ")", "==", "0", ")", "else", ":", "assert", "(", "current_atom_site", ".", "ATOMResidueiCode", "==", "' '", ")", "current_atom_site", ".", "ATOMResidueiCode", "=", "tag_content", "elif", "name", "==", "\"PDBx:auth_comp_id\"", ":", "assert", "(", "not", "(", "current_atom_site", ".", "ATOMResidueAA", ")", ")", "current_atom_site", ".", "ATOMResidueAA", "=", "tag_content", "elif", "name", "==", "\"PDBx:label_seq_id\"", ":", "assert", "(", "not", "(", "current_atom_site", ".", "SEQRESIndex", ")", ")", "current_atom_site", ".", "SEQRESIndex", "=", "int", "(", "tag_content", ")", "elif", "name", "==", "\"PDBx:label_comp_id\"", ":", "assert", "(", "not", "(", "current_atom_site", ".", "ATOMSeqresResidueAA", ")", ")", "current_atom_site", ".", "ATOMSeqresResidueAA", "=", "tag_content" ]
Parse the atom tag data.
[ "Parse", "the", "atom", "tag", "data", "." ]
6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b
https://github.com/Kortemme-Lab/klab/blob/6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b/klab/bio/pdbml.py#L482-L550
train
Kortemme-Lab/klab
klab/bio/pdbml.py
PDBML.create_atom_data
def create_atom_data(self): '''The atom site work is split into two parts. This function type-converts the tags.''' current_atom_site = self.current_atom_site # Only parse ATOM records if current_atom_site.IsHETATM: # Early out - do not parse HETATM records return None, None, None, None elif current_atom_site.IsATOM: return current_atom_site.convert_to_residue(self.modified_residues) else: raise Exception('current_atom_site')
python
def create_atom_data(self): '''The atom site work is split into two parts. This function type-converts the tags.''' current_atom_site = self.current_atom_site # Only parse ATOM records if current_atom_site.IsHETATM: # Early out - do not parse HETATM records return None, None, None, None elif current_atom_site.IsATOM: return current_atom_site.convert_to_residue(self.modified_residues) else: raise Exception('current_atom_site')
[ "def", "create_atom_data", "(", "self", ")", ":", "current_atom_site", "=", "self", ".", "current_atom_site", "# Only parse ATOM records", "if", "current_atom_site", ".", "IsHETATM", ":", "# Early out - do not parse HETATM records", "return", "None", ",", "None", ",", "None", ",", "None", "elif", "current_atom_site", ".", "IsATOM", ":", "return", "current_atom_site", ".", "convert_to_residue", "(", "self", ".", "modified_residues", ")", "else", ":", "raise", "Exception", "(", "'current_atom_site'", ")" ]
The atom site work is split into two parts. This function type-converts the tags.
[ "The", "atom", "site", "work", "is", "split", "into", "two", "parts", ".", "This", "function", "type", "-", "converts", "the", "tags", "." ]
6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b
https://github.com/Kortemme-Lab/klab/blob/6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b/klab/bio/pdbml.py#L552-L564
train
aacanakin/glim
glim/utils.py
import_source
def import_source(module, path, pass_errors=False): """ Function imports a module given full path Args ---- module (string): the module name path (string): the full path of module pass_errors(boolean): the switch for function to skip errors or not. Returns ------- module (module): the module object. Raises ------ e (Exception): any kind of exceptions during importing. """ try: m = imp.load_source(module, path) return m except Exception as e: return None
python
def import_source(module, path, pass_errors=False): """ Function imports a module given full path Args ---- module (string): the module name path (string): the full path of module pass_errors(boolean): the switch for function to skip errors or not. Returns ------- module (module): the module object. Raises ------ e (Exception): any kind of exceptions during importing. """ try: m = imp.load_source(module, path) return m except Exception as e: return None
[ "def", "import_source", "(", "module", ",", "path", ",", "pass_errors", "=", "False", ")", ":", "try", ":", "m", "=", "imp", ".", "load_source", "(", "module", ",", "path", ")", "return", "m", "except", "Exception", "as", "e", ":", "return", "None" ]
Function imports a module given full path Args ---- module (string): the module name path (string): the full path of module pass_errors(boolean): the switch for function to skip errors or not. Returns ------- module (module): the module object. Raises ------ e (Exception): any kind of exceptions during importing.
[ "Function", "imports", "a", "module", "given", "full", "path" ]
71a20ac149a1292c0d6c1dc7414985ea51854f7a
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/utils.py#L17-L40
train
aacanakin/glim
glim/utils.py
import_module
def import_module(module, pass_errors=False): """ Function imports a module given module name Args ---- module (string): the module name pass_errors(boolean): the switch for function to skip errors or not. Returns ------- module (module): the module object. Raises ------ exception (Exception): any kind of exceptions during importing. import_error(ImportError): import errors during importing. Note: pass_errors switch will not pass any errors other than ImportError """ frm = module.split('.') try: m = __import__(module, fromlist=[frm[1]]) return m except ImportError as e: if pass_errors: return None else: print(traceback.format_exc()) return None except Exception as e: print(traceback.format_exc()) return None
python
def import_module(module, pass_errors=False): """ Function imports a module given module name Args ---- module (string): the module name pass_errors(boolean): the switch for function to skip errors or not. Returns ------- module (module): the module object. Raises ------ exception (Exception): any kind of exceptions during importing. import_error(ImportError): import errors during importing. Note: pass_errors switch will not pass any errors other than ImportError """ frm = module.split('.') try: m = __import__(module, fromlist=[frm[1]]) return m except ImportError as e: if pass_errors: return None else: print(traceback.format_exc()) return None except Exception as e: print(traceback.format_exc()) return None
[ "def", "import_module", "(", "module", ",", "pass_errors", "=", "False", ")", ":", "frm", "=", "module", ".", "split", "(", "'.'", ")", "try", ":", "m", "=", "__import__", "(", "module", ",", "fromlist", "=", "[", "frm", "[", "1", "]", "]", ")", "return", "m", "except", "ImportError", "as", "e", ":", "if", "pass_errors", ":", "return", "None", "else", ":", "print", "(", "traceback", ".", "format_exc", "(", ")", ")", "return", "None", "except", "Exception", "as", "e", ":", "print", "(", "traceback", ".", "format_exc", "(", ")", ")", "return", "None" ]
Function imports a module given module name Args ---- module (string): the module name pass_errors(boolean): the switch for function to skip errors or not. Returns ------- module (module): the module object. Raises ------ exception (Exception): any kind of exceptions during importing. import_error(ImportError): import errors during importing. Note: pass_errors switch will not pass any errors other than ImportError
[ "Function", "imports", "a", "module", "given", "module", "name" ]
71a20ac149a1292c0d6c1dc7414985ea51854f7a
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/utils.py#L45-L79
train
aacanakin/glim
glim/utils.py
copytree
def copytree(src, dst, symlinks=False, ignore=None): """ Function recursively copies from directory to directory. Args ---- src (string): the full path of source directory dst (string): the full path of destination directory symlinks (boolean): the switch for tracking symlinks ignore (list): the ignore list """ if not os.path.exists(dst): os.mkdir(dst) try: for item in os.listdir(src): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d) except Exception as e: raise FolderExistsError("Folder already exists in %s" % dst)
python
def copytree(src, dst, symlinks=False, ignore=None): """ Function recursively copies from directory to directory. Args ---- src (string): the full path of source directory dst (string): the full path of destination directory symlinks (boolean): the switch for tracking symlinks ignore (list): the ignore list """ if not os.path.exists(dst): os.mkdir(dst) try: for item in os.listdir(src): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d) except Exception as e: raise FolderExistsError("Folder already exists in %s" % dst)
[ "def", "copytree", "(", "src", ",", "dst", ",", "symlinks", "=", "False", ",", "ignore", "=", "None", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "dst", ")", ":", "os", ".", "mkdir", "(", "dst", ")", "try", ":", "for", "item", "in", "os", ".", "listdir", "(", "src", ")", ":", "s", "=", "os", ".", "path", ".", "join", "(", "src", ",", "item", ")", "d", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "item", ")", "if", "os", ".", "path", ".", "isdir", "(", "s", ")", ":", "shutil", ".", "copytree", "(", "s", ",", "d", ",", "symlinks", ",", "ignore", ")", "else", ":", "shutil", ".", "copy2", "(", "s", ",", "d", ")", "except", "Exception", "as", "e", ":", "raise", "FolderExistsError", "(", "\"Folder already exists in %s\"", "%", "dst", ")" ]
Function recursively copies from directory to directory. Args ---- src (string): the full path of source directory dst (string): the full path of destination directory symlinks (boolean): the switch for tracking symlinks ignore (list): the ignore list
[ "Function", "recursively", "copies", "from", "directory", "to", "directory", "." ]
71a20ac149a1292c0d6c1dc7414985ea51854f7a
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/utils.py#L84-L106
train
aacanakin/glim
glim/utils.py
empty
def empty(key, dict): """ Function determines if the dict key exists or it is empty Args ---- key (string): the dict key dict (dict): the dict to be searched """ if key in dict.keys(): if dict[key]: return False return True
python
def empty(key, dict): """ Function determines if the dict key exists or it is empty Args ---- key (string): the dict key dict (dict): the dict to be searched """ if key in dict.keys(): if dict[key]: return False return True
[ "def", "empty", "(", "key", ",", "dict", ")", ":", "if", "key", "in", "dict", ".", "keys", "(", ")", ":", "if", "dict", "[", "key", "]", ":", "return", "False", "return", "True" ]
Function determines if the dict key exists or it is empty Args ---- key (string): the dict key dict (dict): the dict to be searched
[ "Function", "determines", "if", "the", "dict", "key", "exists", "or", "it", "is", "empty" ]
71a20ac149a1292c0d6c1dc7414985ea51854f7a
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/utils.py#L109-L121
train