Unnamed: 0
int64 0
10k
| repository_name
stringlengths 7
54
| func_path_in_repository
stringlengths 5
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 100
30.3k
| language
stringclasses 1
value | func_code_string
stringlengths 100
30.3k
| func_code_tokens
stringlengths 138
33.2k
| func_documentation_string
stringlengths 1
15k
| func_documentation_tokens
stringlengths 5
5.14k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
|
---|---|---|---|---|---|---|---|---|---|---|---|
1,800 | Cito/DBUtils | DBUtils/SteadyDB.py | SteadyDBConnection.begin | def begin(self, *args, **kwargs):
"""Indicate the beginning of a transaction.
During a transaction, connections won't be transparently
replaced, and all errors will be raised to the application.
If the underlying driver supports this method, it will be called
with the given parameters (e.g. for distributed transactions).
"""
self._transaction = True
try:
begin = self._con.begin
except AttributeError:
pass
else:
begin(*args, **kwargs) | python | def begin(self, *args, **kwargs):
"""Indicate the beginning of a transaction.
During a transaction, connections won't be transparently
replaced, and all errors will be raised to the application.
If the underlying driver supports this method, it will be called
with the given parameters (e.g. for distributed transactions).
"""
self._transaction = True
try:
begin = self._con.begin
except AttributeError:
pass
else:
begin(*args, **kwargs) | ['def', 'begin', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'self', '.', '_transaction', '=', 'True', 'try', ':', 'begin', '=', 'self', '.', '_con', '.', 'begin', 'except', 'AttributeError', ':', 'pass', 'else', ':', 'begin', '(', '*', 'args', ',', '*', '*', 'kwargs', ')'] | Indicate the beginning of a transaction.
During a transaction, connections won't be transparently
replaced, and all errors will be raised to the application.
If the underlying driver supports this method, it will be called
with the given parameters (e.g. for distributed transactions). | ['Indicate', 'the', 'beginning', 'of', 'a', 'transaction', '.'] | train | https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L409-L425 |
1,801 | stanfordnlp/stanza | stanza/nlp/corenlp.py | CoreNLPClient.annotate_json | def annotate_json(self, text, annotators=None):
"""Return a JSON dict from the CoreNLP server, containing annotations of the text.
:param (str) text: Text to annotate.
:param (list[str]) annotators: a list of annotator names
:return (dict): a dict of annotations
"""
# WARN(chaganty): I'd like to deprecate this function -- we
# should just use annotate().json
#properties = {
# 'annotators': ','.join(annotators or self.default_annotators),
# 'outputFormat': 'json',
#}
#return self._request(text, properties).json(strict=False)
doc = self.annotate(text, annotators)
return doc.json | python | def annotate_json(self, text, annotators=None):
"""Return a JSON dict from the CoreNLP server, containing annotations of the text.
:param (str) text: Text to annotate.
:param (list[str]) annotators: a list of annotator names
:return (dict): a dict of annotations
"""
# WARN(chaganty): I'd like to deprecate this function -- we
# should just use annotate().json
#properties = {
# 'annotators': ','.join(annotators or self.default_annotators),
# 'outputFormat': 'json',
#}
#return self._request(text, properties).json(strict=False)
doc = self.annotate(text, annotators)
return doc.json | ['def', 'annotate_json', '(', 'self', ',', 'text', ',', 'annotators', '=', 'None', ')', ':', "# WARN(chaganty): I'd like to deprecate this function -- we", '# should just use annotate().json', '#properties = {', "# 'annotators': ','.join(annotators or self.default_annotators),", "# 'outputFormat': 'json',", '#}', '#return self._request(text, properties).json(strict=False)', 'doc', '=', 'self', '.', 'annotate', '(', 'text', ',', 'annotators', ')', 'return', 'doc', '.', 'json'] | Return a JSON dict from the CoreNLP server, containing annotations of the text.
:param (str) text: Text to annotate.
:param (list[str]) annotators: a list of annotator names
:return (dict): a dict of annotations | ['Return', 'a', 'JSON', 'dict', 'from', 'the', 'CoreNLP', 'server', 'containing', 'annotations', 'of', 'the', 'text', '.'] | train | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/nlp/corenlp.py#L79-L96 |
1,802 | rgs1/zk_shell | zk_shell/shell.py | Shell.do_time | def do_time(self, params):
"""
\x1b[1mNAME\x1b[0m
time - Measures elapsed seconds after running commands
\x1b[1mSYNOPSIS\x1b[0m
time <cmd1> <cmd2> ... <cmdN>
\x1b[1mEXAMPLES\x1b[0m
> time 'loop 10 0 "create /foo_ bar ephemeral=false sequence=true"'
Took 0.05585 seconds
"""
start = time.time()
for cmd in params.cmds:
try:
self.onecmd(cmd)
except Exception as ex:
self.show_output("Command failed: %s.", ex)
elapsed = "{0:.5f}".format(time.time() - start)
self.show_output("Took %s seconds" % elapsed) | python | def do_time(self, params):
"""
\x1b[1mNAME\x1b[0m
time - Measures elapsed seconds after running commands
\x1b[1mSYNOPSIS\x1b[0m
time <cmd1> <cmd2> ... <cmdN>
\x1b[1mEXAMPLES\x1b[0m
> time 'loop 10 0 "create /foo_ bar ephemeral=false sequence=true"'
Took 0.05585 seconds
"""
start = time.time()
for cmd in params.cmds:
try:
self.onecmd(cmd)
except Exception as ex:
self.show_output("Command failed: %s.", ex)
elapsed = "{0:.5f}".format(time.time() - start)
self.show_output("Took %s seconds" % elapsed) | ['def', 'do_time', '(', 'self', ',', 'params', ')', ':', 'start', '=', 'time', '.', 'time', '(', ')', 'for', 'cmd', 'in', 'params', '.', 'cmds', ':', 'try', ':', 'self', '.', 'onecmd', '(', 'cmd', ')', 'except', 'Exception', 'as', 'ex', ':', 'self', '.', 'show_output', '(', '"Command failed: %s."', ',', 'ex', ')', 'elapsed', '=', '"{0:.5f}"', '.', 'format', '(', 'time', '.', 'time', '(', ')', '-', 'start', ')', 'self', '.', 'show_output', '(', '"Took %s seconds"', '%', 'elapsed', ')'] | \x1b[1mNAME\x1b[0m
time - Measures elapsed seconds after running commands
\x1b[1mSYNOPSIS\x1b[0m
time <cmd1> <cmd2> ... <cmdN>
\x1b[1mEXAMPLES\x1b[0m
> time 'loop 10 0 "create /foo_ bar ephemeral=false sequence=true"'
Took 0.05585 seconds | ['\\', 'x1b', '[', '1mNAME', '\\', 'x1b', '[', '0m', 'time', '-', 'Measures', 'elapsed', 'seconds', 'after', 'running', 'commands'] | train | https://github.com/rgs1/zk_shell/blob/bbf34fdfcf1f81100e2a5816fad8af6afc782a54/zk_shell/shell.py#L2883-L2903 |
1,803 | tensorflow/datasets | tensorflow_datasets/translate/wmt.py | _parse_tmx | def _parse_tmx(path):
"""Generates examples from TMX file."""
def _get_tuv_lang(tuv):
for k, v in tuv.items():
if k.endswith("}lang"):
return v
raise AssertionError("Language not found in `tuv` attributes.")
def _get_tuv_seg(tuv):
segs = tuv.findall("seg")
assert len(segs) == 1, "Invalid number of segments: %d" % len(segs)
return segs[0].text
with tf.io.gfile.GFile(path) as f:
for _, elem in ElementTree.iterparse(f):
if elem.tag == "tu":
yield {
_get_tuv_lang(tuv):
_get_tuv_seg(tuv) for tuv in elem.iterfind("tuv")
}
elem.clear() | python | def _parse_tmx(path):
"""Generates examples from TMX file."""
def _get_tuv_lang(tuv):
for k, v in tuv.items():
if k.endswith("}lang"):
return v
raise AssertionError("Language not found in `tuv` attributes.")
def _get_tuv_seg(tuv):
segs = tuv.findall("seg")
assert len(segs) == 1, "Invalid number of segments: %d" % len(segs)
return segs[0].text
with tf.io.gfile.GFile(path) as f:
for _, elem in ElementTree.iterparse(f):
if elem.tag == "tu":
yield {
_get_tuv_lang(tuv):
_get_tuv_seg(tuv) for tuv in elem.iterfind("tuv")
}
elem.clear() | ['def', '_parse_tmx', '(', 'path', ')', ':', 'def', '_get_tuv_lang', '(', 'tuv', ')', ':', 'for', 'k', ',', 'v', 'in', 'tuv', '.', 'items', '(', ')', ':', 'if', 'k', '.', 'endswith', '(', '"}lang"', ')', ':', 'return', 'v', 'raise', 'AssertionError', '(', '"Language not found in `tuv` attributes."', ')', 'def', '_get_tuv_seg', '(', 'tuv', ')', ':', 'segs', '=', 'tuv', '.', 'findall', '(', '"seg"', ')', 'assert', 'len', '(', 'segs', ')', '==', '1', ',', '"Invalid number of segments: %d"', '%', 'len', '(', 'segs', ')', 'return', 'segs', '[', '0', ']', '.', 'text', 'with', 'tf', '.', 'io', '.', 'gfile', '.', 'GFile', '(', 'path', ')', 'as', 'f', ':', 'for', '_', ',', 'elem', 'in', 'ElementTree', '.', 'iterparse', '(', 'f', ')', ':', 'if', 'elem', '.', 'tag', '==', '"tu"', ':', 'yield', '{', '_get_tuv_lang', '(', 'tuv', ')', ':', '_get_tuv_seg', '(', 'tuv', ')', 'for', 'tuv', 'in', 'elem', '.', 'iterfind', '(', '"tuv"', ')', '}', 'elem', '.', 'clear', '(', ')'] | Generates examples from TMX file. | ['Generates', 'examples', 'from', 'TMX', 'file', '.'] | train | https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L838-L858 |
1,804 | dnanexus/dx-toolkit | src/python/dxpy/bindings/search.py | find_projects | def find_projects(name=None, name_mode='exact', properties=None, tags=None,
level=None, describe=False, explicit_perms=None, region=None,
public=None, created_after=None, created_before=None, billed_to=None,
limit=None, return_handler=False, first_page_size=100, containsPHI=None, **kwargs):
"""
:param name: Name of the project (also see *name_mode*)
:type name: string
:param name_mode: Method by which to interpret the *name* field ("exact": exact match, "glob": use "*" and "?" as wildcards, "regexp": interpret as a regular expression)
:type name_mode: string
:param properties: Properties (key-value pairs) that each result must have (use value True to require the property key and allow any value)
:type properties: dict
:param tags: Tags that each result must have
:type tags: list of strings
:param level: One of "VIEW", "UPLOAD", "CONTRIBUTE", or "ADMINSTER". If specified, only returns projects where the current user has at least the specified permission level.
:type level: string
:param describe: Controls whether to also return the output of
calling describe() on each project. Supply False to omit
describe output, True to obtain the default describe output, or
a dict to be supplied as the describe call input (which may be
used to customize the set of fields that is returned)
:type describe: bool or dict
:param explicit_perms: Filter on presence of an explicit permision. If True, matching projects must have an explicit permission (any permission granted directly to the user or an organization to which the user belongs). If False, matching projects must not have any explicit permissions for the user. (default is None, for no filter)
:type explicit_perms: boolean or None
:param region: If specified, only returns projects where the project is in the given region.
:type region: string
:param public: Filter on the project being public. If True, matching projects must be public. If False, matching projects must not be public. (default is None, for no filter)
:type public: boolean or None
:param created_after: Timestamp after which each result was created
(see note accompanying :meth:`find_data_objects()` for interpretation)
:type created_after: int or string
:param created_before: Timestamp before which each result was created
(see note accompanying :meth:`find_data_objects()` for interpretation)
:type created_before: int or string
:param billed_to: Entity ID (user or organization) that pays for the project's storage costs
:type billed_to: string
:param limit: The maximum number of results to be returned (if not specified, the number of results is unlimited)
:type limit: int
:param first_page_size: The number of results that the initial API call will return. Subsequent calls will raise this by multiplying by 2 up to a maximum of 1000.
:type first_page_size: int
:param return_handler: If True, yields results as dxpy object handlers (otherwise, yields each result as a dict with keys "id" and "project")
:type return_handler: boolean
:param containsPHI: If set to true, only returns projects that contain PHI.
If set to false, only returns projects that do not contain PHI.
:type containsPHI: boolean
:rtype: generator
Returns a generator that yields all projects that match the query.
It transparently handles paging through the result set if necessary.
For all parameters that are omitted, the search is not restricted by
the corresponding field.
You can use the *level* parameter to find projects that the user has
at least a specific level of access to (e.g. "CONTRIBUTE").
"""
query = {}
if name is not None:
if name_mode == 'exact':
query['name'] = name
elif name_mode == 'glob':
query['name'] = {'glob': name}
elif name_mode == 'regexp':
query['name'] = {'regexp': name}
else:
raise DXError('find_projects: Unexpected value found for argument name_mode')
if properties is not None:
query["properties"] = properties
if tags is not None:
query["tags"] = {"$and": tags}
if level is not None:
query["level"] = level
if describe is not None and describe is not False:
query["describe"] = describe
if explicit_perms is not None:
query['explicitPermission'] = explicit_perms
if public is not None:
query['public'] = public
if created_after is not None or created_before is not None:
query["created"] = {}
if created_after is not None:
query["created"]["after"] = dxpy.utils.normalize_time_input(created_after)
if created_before is not None:
query["created"]["before"] = dxpy.utils.normalize_time_input(created_before)
if region is not None:
query['region'] = region
if billed_to is not None:
query['billTo'] = billed_to
if limit is not None:
query["limit"] = limit
if containsPHI is not None:
query["containsPHI"] = containsPHI
return _find(dxpy.api.system_find_projects, query, limit, return_handler, first_page_size, **kwargs) | python | def find_projects(name=None, name_mode='exact', properties=None, tags=None,
level=None, describe=False, explicit_perms=None, region=None,
public=None, created_after=None, created_before=None, billed_to=None,
limit=None, return_handler=False, first_page_size=100, containsPHI=None, **kwargs):
"""
:param name: Name of the project (also see *name_mode*)
:type name: string
:param name_mode: Method by which to interpret the *name* field ("exact": exact match, "glob": use "*" and "?" as wildcards, "regexp": interpret as a regular expression)
:type name_mode: string
:param properties: Properties (key-value pairs) that each result must have (use value True to require the property key and allow any value)
:type properties: dict
:param tags: Tags that each result must have
:type tags: list of strings
:param level: One of "VIEW", "UPLOAD", "CONTRIBUTE", or "ADMINSTER". If specified, only returns projects where the current user has at least the specified permission level.
:type level: string
:param describe: Controls whether to also return the output of
calling describe() on each project. Supply False to omit
describe output, True to obtain the default describe output, or
a dict to be supplied as the describe call input (which may be
used to customize the set of fields that is returned)
:type describe: bool or dict
:param explicit_perms: Filter on presence of an explicit permision. If True, matching projects must have an explicit permission (any permission granted directly to the user or an organization to which the user belongs). If False, matching projects must not have any explicit permissions for the user. (default is None, for no filter)
:type explicit_perms: boolean or None
:param region: If specified, only returns projects where the project is in the given region.
:type region: string
:param public: Filter on the project being public. If True, matching projects must be public. If False, matching projects must not be public. (default is None, for no filter)
:type public: boolean or None
:param created_after: Timestamp after which each result was created
(see note accompanying :meth:`find_data_objects()` for interpretation)
:type created_after: int or string
:param created_before: Timestamp before which each result was created
(see note accompanying :meth:`find_data_objects()` for interpretation)
:type created_before: int or string
:param billed_to: Entity ID (user or organization) that pays for the project's storage costs
:type billed_to: string
:param limit: The maximum number of results to be returned (if not specified, the number of results is unlimited)
:type limit: int
:param first_page_size: The number of results that the initial API call will return. Subsequent calls will raise this by multiplying by 2 up to a maximum of 1000.
:type first_page_size: int
:param return_handler: If True, yields results as dxpy object handlers (otherwise, yields each result as a dict with keys "id" and "project")
:type return_handler: boolean
:param containsPHI: If set to true, only returns projects that contain PHI.
If set to false, only returns projects that do not contain PHI.
:type containsPHI: boolean
:rtype: generator
Returns a generator that yields all projects that match the query.
It transparently handles paging through the result set if necessary.
For all parameters that are omitted, the search is not restricted by
the corresponding field.
You can use the *level* parameter to find projects that the user has
at least a specific level of access to (e.g. "CONTRIBUTE").
"""
query = {}
if name is not None:
if name_mode == 'exact':
query['name'] = name
elif name_mode == 'glob':
query['name'] = {'glob': name}
elif name_mode == 'regexp':
query['name'] = {'regexp': name}
else:
raise DXError('find_projects: Unexpected value found for argument name_mode')
if properties is not None:
query["properties"] = properties
if tags is not None:
query["tags"] = {"$and": tags}
if level is not None:
query["level"] = level
if describe is not None and describe is not False:
query["describe"] = describe
if explicit_perms is not None:
query['explicitPermission'] = explicit_perms
if public is not None:
query['public'] = public
if created_after is not None or created_before is not None:
query["created"] = {}
if created_after is not None:
query["created"]["after"] = dxpy.utils.normalize_time_input(created_after)
if created_before is not None:
query["created"]["before"] = dxpy.utils.normalize_time_input(created_before)
if region is not None:
query['region'] = region
if billed_to is not None:
query['billTo'] = billed_to
if limit is not None:
query["limit"] = limit
if containsPHI is not None:
query["containsPHI"] = containsPHI
return _find(dxpy.api.system_find_projects, query, limit, return_handler, first_page_size, **kwargs) | ['def', 'find_projects', '(', 'name', '=', 'None', ',', 'name_mode', '=', "'exact'", ',', 'properties', '=', 'None', ',', 'tags', '=', 'None', ',', 'level', '=', 'None', ',', 'describe', '=', 'False', ',', 'explicit_perms', '=', 'None', ',', 'region', '=', 'None', ',', 'public', '=', 'None', ',', 'created_after', '=', 'None', ',', 'created_before', '=', 'None', ',', 'billed_to', '=', 'None', ',', 'limit', '=', 'None', ',', 'return_handler', '=', 'False', ',', 'first_page_size', '=', '100', ',', 'containsPHI', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'query', '=', '{', '}', 'if', 'name', 'is', 'not', 'None', ':', 'if', 'name_mode', '==', "'exact'", ':', 'query', '[', "'name'", ']', '=', 'name', 'elif', 'name_mode', '==', "'glob'", ':', 'query', '[', "'name'", ']', '=', '{', "'glob'", ':', 'name', '}', 'elif', 'name_mode', '==', "'regexp'", ':', 'query', '[', "'name'", ']', '=', '{', "'regexp'", ':', 'name', '}', 'else', ':', 'raise', 'DXError', '(', "'find_projects: Unexpected value found for argument name_mode'", ')', 'if', 'properties', 'is', 'not', 'None', ':', 'query', '[', '"properties"', ']', '=', 'properties', 'if', 'tags', 'is', 'not', 'None', ':', 'query', '[', '"tags"', ']', '=', '{', '"$and"', ':', 'tags', '}', 'if', 'level', 'is', 'not', 'None', ':', 'query', '[', '"level"', ']', '=', 'level', 'if', 'describe', 'is', 'not', 'None', 'and', 'describe', 'is', 'not', 'False', ':', 'query', '[', '"describe"', ']', '=', 'describe', 'if', 'explicit_perms', 'is', 'not', 'None', ':', 'query', '[', "'explicitPermission'", ']', '=', 'explicit_perms', 'if', 'public', 'is', 'not', 'None', ':', 'query', '[', "'public'", ']', '=', 'public', 'if', 'created_after', 'is', 'not', 'None', 'or', 'created_before', 'is', 'not', 'None', ':', 'query', '[', '"created"', ']', '=', '{', '}', 'if', 'created_after', 'is', 'not', 'None', ':', 'query', '[', '"created"', ']', '[', '"after"', ']', '=', 'dxpy', '.', 'utils', '.', 'normalize_time_input', '(', 'created_after', ')', 'if', 'created_before', 'is', 'not', 'None', ':', 'query', '[', '"created"', ']', '[', '"before"', ']', '=', 'dxpy', '.', 'utils', '.', 'normalize_time_input', '(', 'created_before', ')', 'if', 'region', 'is', 'not', 'None', ':', 'query', '[', "'region'", ']', '=', 'region', 'if', 'billed_to', 'is', 'not', 'None', ':', 'query', '[', "'billTo'", ']', '=', 'billed_to', 'if', 'limit', 'is', 'not', 'None', ':', 'query', '[', '"limit"', ']', '=', 'limit', 'if', 'containsPHI', 'is', 'not', 'None', ':', 'query', '[', '"containsPHI"', ']', '=', 'containsPHI', 'return', '_find', '(', 'dxpy', '.', 'api', '.', 'system_find_projects', ',', 'query', ',', 'limit', ',', 'return_handler', ',', 'first_page_size', ',', '*', '*', 'kwargs', ')'] | :param name: Name of the project (also see *name_mode*)
:type name: string
:param name_mode: Method by which to interpret the *name* field ("exact": exact match, "glob": use "*" and "?" as wildcards, "regexp": interpret as a regular expression)
:type name_mode: string
:param properties: Properties (key-value pairs) that each result must have (use value True to require the property key and allow any value)
:type properties: dict
:param tags: Tags that each result must have
:type tags: list of strings
:param level: One of "VIEW", "UPLOAD", "CONTRIBUTE", or "ADMINSTER". If specified, only returns projects where the current user has at least the specified permission level.
:type level: string
:param describe: Controls whether to also return the output of
calling describe() on each project. Supply False to omit
describe output, True to obtain the default describe output, or
a dict to be supplied as the describe call input (which may be
used to customize the set of fields that is returned)
:type describe: bool or dict
:param explicit_perms: Filter on presence of an explicit permision. If True, matching projects must have an explicit permission (any permission granted directly to the user or an organization to which the user belongs). If False, matching projects must not have any explicit permissions for the user. (default is None, for no filter)
:type explicit_perms: boolean or None
:param region: If specified, only returns projects where the project is in the given region.
:type region: string
:param public: Filter on the project being public. If True, matching projects must be public. If False, matching projects must not be public. (default is None, for no filter)
:type public: boolean or None
:param created_after: Timestamp after which each result was created
(see note accompanying :meth:`find_data_objects()` for interpretation)
:type created_after: int or string
:param created_before: Timestamp before which each result was created
(see note accompanying :meth:`find_data_objects()` for interpretation)
:type created_before: int or string
:param billed_to: Entity ID (user or organization) that pays for the project's storage costs
:type billed_to: string
:param limit: The maximum number of results to be returned (if not specified, the number of results is unlimited)
:type limit: int
:param first_page_size: The number of results that the initial API call will return. Subsequent calls will raise this by multiplying by 2 up to a maximum of 1000.
:type first_page_size: int
:param return_handler: If True, yields results as dxpy object handlers (otherwise, yields each result as a dict with keys "id" and "project")
:type return_handler: boolean
:param containsPHI: If set to true, only returns projects that contain PHI.
If set to false, only returns projects that do not contain PHI.
:type containsPHI: boolean
:rtype: generator
Returns a generator that yields all projects that match the query.
It transparently handles paging through the result set if necessary.
For all parameters that are omitted, the search is not restricted by
the corresponding field.
You can use the *level* parameter to find projects that the user has
at least a specific level of access to (e.g. "CONTRIBUTE"). | [':', 'param', 'name', ':', 'Name', 'of', 'the', 'project', '(', 'also', 'see', '*', 'name_mode', '*', ')', ':', 'type', 'name', ':', 'string', ':', 'param', 'name_mode', ':', 'Method', 'by', 'which', 'to', 'interpret', 'the', '*', 'name', '*', 'field', '(', 'exact', ':', 'exact', 'match', 'glob', ':', 'use', '*', 'and', '?', 'as', 'wildcards', 'regexp', ':', 'interpret', 'as', 'a', 'regular', 'expression', ')', ':', 'type', 'name_mode', ':', 'string', ':', 'param', 'properties', ':', 'Properties', '(', 'key', '-', 'value', 'pairs', ')', 'that', 'each', 'result', 'must', 'have', '(', 'use', 'value', 'True', 'to', 'require', 'the', 'property', 'key', 'and', 'allow', 'any', 'value', ')', ':', 'type', 'properties', ':', 'dict', ':', 'param', 'tags', ':', 'Tags', 'that', 'each', 'result', 'must', 'have', ':', 'type', 'tags', ':', 'list', 'of', 'strings', ':', 'param', 'level', ':', 'One', 'of', 'VIEW', 'UPLOAD', 'CONTRIBUTE', 'or', 'ADMINSTER', '.', 'If', 'specified', 'only', 'returns', 'projects', 'where', 'the', 'current', 'user', 'has', 'at', 'least', 'the', 'specified', 'permission', 'level', '.', ':', 'type', 'level', ':', 'string', ':', 'param', 'describe', ':', 'Controls', 'whether', 'to', 'also', 'return', 'the', 'output', 'of', 'calling', 'describe', '()', 'on', 'each', 'project', '.', 'Supply', 'False', 'to', 'omit', 'describe', 'output', 'True', 'to', 'obtain', 'the', 'default', 'describe', 'output', 'or', 'a', 'dict', 'to', 'be', 'supplied', 'as', 'the', 'describe', 'call', 'input', '(', 'which', 'may', 'be', 'used', 'to', 'customize', 'the', 'set', 'of', 'fields', 'that', 'is', 'returned', ')', ':', 'type', 'describe', ':', 'bool', 'or', 'dict', ':', 'param', 'explicit_perms', ':', 'Filter', 'on', 'presence', 'of', 'an', 'explicit', 'permision', '.', 'If', 'True', 'matching', 'projects', 'must', 'have', 'an', 'explicit', 'permission', '(', 'any', 'permission', 'granted', 'directly', 'to', 'the', 'user', 'or', 'an', 'organization', 'to', 'which', 'the', 'user', 'belongs', ')', '.', 'If', 'False', 'matching', 'projects', 'must', 'not', 'have', 'any', 'explicit', 'permissions', 'for', 'the', 'user', '.', '(', 'default', 'is', 'None', 'for', 'no', 'filter', ')', ':', 'type', 'explicit_perms', ':', 'boolean', 'or', 'None', ':', 'param', 'region', ':', 'If', 'specified', 'only', 'returns', 'projects', 'where', 'the', 'project', 'is', 'in', 'the', 'given', 'region', '.', ':', 'type', 'region', ':', 'string', ':', 'param', 'public', ':', 'Filter', 'on', 'the', 'project', 'being', 'public', '.', 'If', 'True', 'matching', 'projects', 'must', 'be', 'public', '.', 'If', 'False', 'matching', 'projects', 'must', 'not', 'be', 'public', '.', '(', 'default', 'is', 'None', 'for', 'no', 'filter', ')', ':', 'type', 'public', ':', 'boolean', 'or', 'None', ':', 'param', 'created_after', ':', 'Timestamp', 'after', 'which', 'each', 'result', 'was', 'created', '(', 'see', 'note', 'accompanying', ':', 'meth', ':', 'find_data_objects', '()', 'for', 'interpretation', ')', ':', 'type', 'created_after', ':', 'int', 'or', 'string', ':', 'param', 'created_before', ':', 'Timestamp', 'before', 'which', 'each', 'result', 'was', 'created', '(', 'see', 'note', 'accompanying', ':', 'meth', ':', 'find_data_objects', '()', 'for', 'interpretation', ')', ':', 'type', 'created_before', ':', 'int', 'or', 'string', ':', 'param', 'billed_to', ':', 'Entity', 'ID', '(', 'user', 'or', 'organization', ')', 'that', 'pays', 'for', 'the', 'project', 's', 'storage', 'costs', ':', 'type', 'billed_to', ':', 'string', ':', 'param', 'limit', ':', 'The', 'maximum', 'number', 'of', 'results', 'to', 'be', 'returned', '(', 'if', 'not', 'specified', 'the', 'number', 'of', 'results', 'is', 'unlimited', ')', ':', 'type', 'limit', ':', 'int', ':', 'param', 'first_page_size', ':', 'The', 'number', 'of', 'results', 'that', 'the', 'initial', 'API', 'call', 'will', 'return', '.', 'Subsequent', 'calls', 'will', 'raise', 'this', 'by', 'multiplying', 'by', '2', 'up', 'to', 'a', 'maximum', 'of', '1000', '.', ':', 'type', 'first_page_size', ':', 'int', ':', 'param', 'return_handler', ':', 'If', 'True', 'yields', 'results', 'as', 'dxpy', 'object', 'handlers', '(', 'otherwise', 'yields', 'each', 'result', 'as', 'a', 'dict', 'with', 'keys', 'id', 'and', 'project', ')', ':', 'type', 'return_handler', ':', 'boolean', ':', 'param', 'containsPHI', ':', 'If', 'set', 'to', 'true', 'only', 'returns', 'projects', 'that', 'contain', 'PHI', '.', 'If', 'set', 'to', 'false', 'only', 'returns', 'projects', 'that', 'do', 'not', 'contain', 'PHI', '.', ':', 'type', 'containsPHI', ':', 'boolean', ':', 'rtype', ':', 'generator'] | train | https://github.com/dnanexus/dx-toolkit/blob/74befb53ad90fcf902d8983ae6d74580f402d619/src/python/dxpy/bindings/search.py#L434-L526 |
1,805 | ivilata/pymultihash | multihash/multihash.py | Multihash.encode | def encode(self, encoding=None):
r"""Encode into a multihash-encoded digest.
If `encoding` is `None`, a binary digest is produced:
>>> mh = Multihash(0x01, b'TEST')
>>> mh.encode()
b'\x01\x04TEST'
If the name of an `encoding` is specified, it is used to encode the
binary digest before returning it (see `CodecReg` for supported
codecs).
>>> mh.encode('base64')
b'AQRURVNU'
If the `encoding` is not available, a `KeyError` is raised.
"""
try:
fc = self.func.value
except AttributeError: # application-specific function code
fc = self.func
mhash = bytes([fc, len(self.digest)]) + self.digest
if encoding:
mhash = CodecReg.get_encoder(encoding)(mhash)
return mhash | python | def encode(self, encoding=None):
r"""Encode into a multihash-encoded digest.
If `encoding` is `None`, a binary digest is produced:
>>> mh = Multihash(0x01, b'TEST')
>>> mh.encode()
b'\x01\x04TEST'
If the name of an `encoding` is specified, it is used to encode the
binary digest before returning it (see `CodecReg` for supported
codecs).
>>> mh.encode('base64')
b'AQRURVNU'
If the `encoding` is not available, a `KeyError` is raised.
"""
try:
fc = self.func.value
except AttributeError: # application-specific function code
fc = self.func
mhash = bytes([fc, len(self.digest)]) + self.digest
if encoding:
mhash = CodecReg.get_encoder(encoding)(mhash)
return mhash | ['def', 'encode', '(', 'self', ',', 'encoding', '=', 'None', ')', ':', 'try', ':', 'fc', '=', 'self', '.', 'func', '.', 'value', 'except', 'AttributeError', ':', '# application-specific function code', 'fc', '=', 'self', '.', 'func', 'mhash', '=', 'bytes', '(', '[', 'fc', ',', 'len', '(', 'self', '.', 'digest', ')', ']', ')', '+', 'self', '.', 'digest', 'if', 'encoding', ':', 'mhash', '=', 'CodecReg', '.', 'get_encoder', '(', 'encoding', ')', '(', 'mhash', ')', 'return', 'mhash'] | r"""Encode into a multihash-encoded digest.
If `encoding` is `None`, a binary digest is produced:
>>> mh = Multihash(0x01, b'TEST')
>>> mh.encode()
b'\x01\x04TEST'
If the name of an `encoding` is specified, it is used to encode the
binary digest before returning it (see `CodecReg` for supported
codecs).
>>> mh.encode('base64')
b'AQRURVNU'
If the `encoding` is not available, a `KeyError` is raised. | ['r', 'Encode', 'into', 'a', 'multihash', '-', 'encoded', 'digest', '.'] | train | https://github.com/ivilata/pymultihash/blob/093365f20f6d8627c1fae13e0f4e0b35e9b39ad2/multihash/multihash.py#L120-L145 |
1,806 | consbio/gis-metadata-parser | gis_metadata/metadata_parser.py | MetadataParser._parse_dates | def _parse_dates(self, prop=DATES):
""" Creates and returns a Date Types data structure parsed from the metadata """
return parse_dates(self._xml_tree, self._data_structures[prop]) | python | def _parse_dates(self, prop=DATES):
""" Creates and returns a Date Types data structure parsed from the metadata """
return parse_dates(self._xml_tree, self._data_structures[prop]) | ['def', '_parse_dates', '(', 'self', ',', 'prop', '=', 'DATES', ')', ':', 'return', 'parse_dates', '(', 'self', '.', '_xml_tree', ',', 'self', '.', '_data_structures', '[', 'prop', ']', ')'] | Creates and returns a Date Types data structure parsed from the metadata | ['Creates', 'and', 'returns', 'a', 'Date', 'Types', 'data', 'structure', 'parsed', 'from', 'the', 'metadata'] | train | https://github.com/consbio/gis-metadata-parser/blob/59eefb2e51cd4d8cc3e94623a2167499ca9ef70f/gis_metadata/metadata_parser.py#L309-L312 |
1,807 | saltstack/salt | salt/thorium/check.py | lt | def lt(name, value):
'''
Only succeed if the value in the given register location is less than
the given value
USAGE:
.. code-block:: yaml
foo:
check.lt:
- value: 42
run_remote_ex:
local.cmd:
- tgt: '*'
- func: test.ping
- require:
- check: foo
'''
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
if name not in __reg__:
ret['result'] = False
ret['comment'] = 'Value {0} not in register'.format(name)
return ret
if __reg__[name]['val'] < value:
ret['result'] = True
return ret | python | def lt(name, value):
'''
Only succeed if the value in the given register location is less than
the given value
USAGE:
.. code-block:: yaml
foo:
check.lt:
- value: 42
run_remote_ex:
local.cmd:
- tgt: '*'
- func: test.ping
- require:
- check: foo
'''
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
if name not in __reg__:
ret['result'] = False
ret['comment'] = 'Value {0} not in register'.format(name)
return ret
if __reg__[name]['val'] < value:
ret['result'] = True
return ret | ['def', 'lt', '(', 'name', ',', 'value', ')', ':', 'ret', '=', '{', "'name'", ':', 'name', ',', "'result'", ':', 'False', ',', "'comment'", ':', "''", ',', "'changes'", ':', '{', '}', '}', 'if', 'name', 'not', 'in', '__reg__', ':', 'ret', '[', "'result'", ']', '=', 'False', 'ret', '[', "'comment'", ']', '=', "'Value {0} not in register'", '.', 'format', '(', 'name', ')', 'return', 'ret', 'if', '__reg__', '[', 'name', ']', '[', "'val'", ']', '<', 'value', ':', 'ret', '[', "'result'", ']', '=', 'True', 'return', 'ret'] | Only succeed if the value in the given register location is less than
the given value
USAGE:
.. code-block:: yaml
foo:
check.lt:
- value: 42
run_remote_ex:
local.cmd:
- tgt: '*'
- func: test.ping
- require:
- check: foo | ['Only', 'succeed', 'if', 'the', 'value', 'in', 'the', 'given', 'register', 'location', 'is', 'less', 'than', 'the', 'given', 'value'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/thorium/check.py#L83-L113 |
1,808 | sdispater/eloquent | eloquent/migrations/database_migration_repository.py | DatabaseMigrationRepository.get_last | def get_last(self):
"""
Get the last migration batch.
:rtype: list
"""
query = self.table().where('batch', self.get_last_batch_number())
return query.order_by('migration', 'desc').get() | python | def get_last(self):
"""
Get the last migration batch.
:rtype: list
"""
query = self.table().where('batch', self.get_last_batch_number())
return query.order_by('migration', 'desc').get() | ['def', 'get_last', '(', 'self', ')', ':', 'query', '=', 'self', '.', 'table', '(', ')', '.', 'where', '(', "'batch'", ',', 'self', '.', 'get_last_batch_number', '(', ')', ')', 'return', 'query', '.', 'order_by', '(', "'migration'", ',', "'desc'", ')', '.', 'get', '(', ')'] | Get the last migration batch.
:rtype: list | ['Get', 'the', 'last', 'migration', 'batch', '.'] | train | https://github.com/sdispater/eloquent/blob/0638b688d5fd0c1a46b7471dd465eeb4c2f84666/eloquent/migrations/database_migration_repository.py#L25-L33 |
1,809 | openego/ding0 | ding0/core/network/__init__.py | GridDing0.find_and_union_paths | def find_and_union_paths(self, node_source, nodes_target):
""" Determines shortest paths from `node_source` to all nodes in `node_target` in _graph using find_path().
The branches of all paths are stored in a set - the result is a list of unique branches.
Args
----
node_source: GridDing0
source node, member of _graph
node_target: GridDing0
target node, member of _graph
Returns
-------
:any:`list` of :obj:`BranchDing0`
branches: list of branches (list of nodes in _graph) #TODO:check
"""
branches = set()
for node_target in nodes_target:
path = self.find_path(node_source, node_target)
node_pairs = list(zip(path[0:len(path) - 1], path[1:len(path)]))
for n1, n2 in node_pairs:
branches.add(self._graph.adj[n1][n2]['branch'])
return list(branches) | python | def find_and_union_paths(self, node_source, nodes_target):
""" Determines shortest paths from `node_source` to all nodes in `node_target` in _graph using find_path().
The branches of all paths are stored in a set - the result is a list of unique branches.
Args
----
node_source: GridDing0
source node, member of _graph
node_target: GridDing0
target node, member of _graph
Returns
-------
:any:`list` of :obj:`BranchDing0`
branches: list of branches (list of nodes in _graph) #TODO:check
"""
branches = set()
for node_target in nodes_target:
path = self.find_path(node_source, node_target)
node_pairs = list(zip(path[0:len(path) - 1], path[1:len(path)]))
for n1, n2 in node_pairs:
branches.add(self._graph.adj[n1][n2]['branch'])
return list(branches) | ['def', 'find_and_union_paths', '(', 'self', ',', 'node_source', ',', 'nodes_target', ')', ':', 'branches', '=', 'set', '(', ')', 'for', 'node_target', 'in', 'nodes_target', ':', 'path', '=', 'self', '.', 'find_path', '(', 'node_source', ',', 'node_target', ')', 'node_pairs', '=', 'list', '(', 'zip', '(', 'path', '[', '0', ':', 'len', '(', 'path', ')', '-', '1', ']', ',', 'path', '[', '1', ':', 'len', '(', 'path', ')', ']', ')', ')', 'for', 'n1', ',', 'n2', 'in', 'node_pairs', ':', 'branches', '.', 'add', '(', 'self', '.', '_graph', '.', 'adj', '[', 'n1', ']', '[', 'n2', ']', '[', "'branch'", ']', ')', 'return', 'list', '(', 'branches', ')'] | Determines shortest paths from `node_source` to all nodes in `node_target` in _graph using find_path().
The branches of all paths are stored in a set - the result is a list of unique branches.
Args
----
node_source: GridDing0
source node, member of _graph
node_target: GridDing0
target node, member of _graph
Returns
-------
:any:`list` of :obj:`BranchDing0`
branches: list of branches (list of nodes in _graph) #TODO:check | ['Determines', 'shortest', 'paths', 'from', 'node_source', 'to', 'all', 'nodes', 'in', 'node_target', 'in', '_graph', 'using', 'find_path', '()', '.'] | train | https://github.com/openego/ding0/blob/e2d6528f96255e4bb22ba15514a4f1883564ed5d/ding0/core/network/__init__.py#L361-L385 |
1,810 | pywbem/pywbem | wbemcli.py | gc | def gc(cn, ns=None, lo=None, iq=None, ico=None, pl=None):
"""
This function is a wrapper for
:meth:`~pywbem.WBEMConnection.GetClass`.
Retrieve a class.
Parameters:
cn (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be retrieved (case independent).
If specified as a `CIMClassName` object, its `host` attribute will be
ignored.
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the namespace of the `cn` parameter if
specified as a `CIMClassName`, or to the default namespace of the
connection.
lo (:class:`py:bool`):
LocalOnly flag: Exclude inherited properties.
`None` will cause the server default of `True` to be used.
iq (:class:`py:bool`):
IncludeQualifiers flag: Include qualifiers.
`None` will cause the server default of `True` to be used.
ico (:class:`py:bool`):
IncludeClassOrigin flag: Include class origin information for
properties and methods in the retrieved class.
`None` will cause the server default of `False` to be used.
pl (:term:`string` or :term:`py:iterable` of :term:`string`):
PropertyList: Names of properties to be included (if not otherwise
excluded). An empty iterable indicates to include no properties.
If `None`, all properties will be included.
Returns:
:class:`~pywbem.CIMClass`:
The retrieved class.
"""
return CONN.GetClass(cn, ns,
LocalOnly=lo,
IncludeQualifiers=iq,
IncludeClassOrigin=ico,
PropertyList=pl) | python | def gc(cn, ns=None, lo=None, iq=None, ico=None, pl=None):
"""
This function is a wrapper for
:meth:`~pywbem.WBEMConnection.GetClass`.
Retrieve a class.
Parameters:
cn (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be retrieved (case independent).
If specified as a `CIMClassName` object, its `host` attribute will be
ignored.
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the namespace of the `cn` parameter if
specified as a `CIMClassName`, or to the default namespace of the
connection.
lo (:class:`py:bool`):
LocalOnly flag: Exclude inherited properties.
`None` will cause the server default of `True` to be used.
iq (:class:`py:bool`):
IncludeQualifiers flag: Include qualifiers.
`None` will cause the server default of `True` to be used.
ico (:class:`py:bool`):
IncludeClassOrigin flag: Include class origin information for
properties and methods in the retrieved class.
`None` will cause the server default of `False` to be used.
pl (:term:`string` or :term:`py:iterable` of :term:`string`):
PropertyList: Names of properties to be included (if not otherwise
excluded). An empty iterable indicates to include no properties.
If `None`, all properties will be included.
Returns:
:class:`~pywbem.CIMClass`:
The retrieved class.
"""
return CONN.GetClass(cn, ns,
LocalOnly=lo,
IncludeQualifiers=iq,
IncludeClassOrigin=ico,
PropertyList=pl) | ['def', 'gc', '(', 'cn', ',', 'ns', '=', 'None', ',', 'lo', '=', 'None', ',', 'iq', '=', 'None', ',', 'ico', '=', 'None', ',', 'pl', '=', 'None', ')', ':', 'return', 'CONN', '.', 'GetClass', '(', 'cn', ',', 'ns', ',', 'LocalOnly', '=', 'lo', ',', 'IncludeQualifiers', '=', 'iq', ',', 'IncludeClassOrigin', '=', 'ico', ',', 'PropertyList', '=', 'pl', ')'] | This function is a wrapper for
:meth:`~pywbem.WBEMConnection.GetClass`.
Retrieve a class.
Parameters:
cn (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be retrieved (case independent).
If specified as a `CIMClassName` object, its `host` attribute will be
ignored.
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the namespace of the `cn` parameter if
specified as a `CIMClassName`, or to the default namespace of the
connection.
lo (:class:`py:bool`):
LocalOnly flag: Exclude inherited properties.
`None` will cause the server default of `True` to be used.
iq (:class:`py:bool`):
IncludeQualifiers flag: Include qualifiers.
`None` will cause the server default of `True` to be used.
ico (:class:`py:bool`):
IncludeClassOrigin flag: Include class origin information for
properties and methods in the retrieved class.
`None` will cause the server default of `False` to be used.
pl (:term:`string` or :term:`py:iterable` of :term:`string`):
PropertyList: Names of properties to be included (if not otherwise
excluded). An empty iterable indicates to include no properties.
If `None`, all properties will be included.
Returns:
:class:`~pywbem.CIMClass`:
The retrieved class. | ['This', 'function', 'is', 'a', 'wrapper', 'for', ':', 'meth', ':', '~pywbem', '.', 'WBEMConnection', '.', 'GetClass', '.'] | train | https://github.com/pywbem/pywbem/blob/e54ecb82c2211e289a268567443d60fdd489f1e4/wbemcli.py#L2758-L2811 |
1,811 | scarface-4711/denonavr | denonavr/denonavr.py | DenonAVR.create_zones | def create_zones(self, add_zones):
"""Create instances of additional zones for the receiver."""
for zone, zname in add_zones.items():
# Name either set explicitly or name of Main Zone with suffix
zonename = "{} {}".format(self._name, zone) if (
zname is None) else zname
zone_inst = DenonAVRZones(self, zone, zonename)
self._zones[zone] = zone_inst | python | def create_zones(self, add_zones):
"""Create instances of additional zones for the receiver."""
for zone, zname in add_zones.items():
# Name either set explicitly or name of Main Zone with suffix
zonename = "{} {}".format(self._name, zone) if (
zname is None) else zname
zone_inst = DenonAVRZones(self, zone, zonename)
self._zones[zone] = zone_inst | ['def', 'create_zones', '(', 'self', ',', 'add_zones', ')', ':', 'for', 'zone', ',', 'zname', 'in', 'add_zones', '.', 'items', '(', ')', ':', '# Name either set explicitly or name of Main Zone with suffix', 'zonename', '=', '"{} {}"', '.', 'format', '(', 'self', '.', '_name', ',', 'zone', ')', 'if', '(', 'zname', 'is', 'None', ')', 'else', 'zname', 'zone_inst', '=', 'DenonAVRZones', '(', 'self', ',', 'zone', ',', 'zonename', ')', 'self', '.', '_zones', '[', 'zone', ']', '=', 'zone_inst'] | Create instances of additional zones for the receiver. | ['Create', 'instances', 'of', 'additional', 'zones', 'for', 'the', 'receiver', '.'] | train | https://github.com/scarface-4711/denonavr/blob/59a136e27b43cb1d1e140cf67705087b3aa377cd/denonavr/denonavr.py#L390-L397 |
1,812 | brocade/pynos | pynos/versions/ver_6/ver_6_0_1/yang/brocade_firmware_ext.py | brocade_firmware_ext.show_firmware_version_output_show_firmware_version_control_processor_memory | def show_firmware_version_output_show_firmware_version_control_processor_memory(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_firmware_version = ET.Element("show_firmware_version")
config = show_firmware_version
output = ET.SubElement(show_firmware_version, "output")
show_firmware_version = ET.SubElement(output, "show-firmware-version")
control_processor_memory = ET.SubElement(show_firmware_version, "control-processor-memory")
control_processor_memory.text = kwargs.pop('control_processor_memory')
callback = kwargs.pop('callback', self._callback)
return callback(config) | python | def show_firmware_version_output_show_firmware_version_control_processor_memory(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_firmware_version = ET.Element("show_firmware_version")
config = show_firmware_version
output = ET.SubElement(show_firmware_version, "output")
show_firmware_version = ET.SubElement(output, "show-firmware-version")
control_processor_memory = ET.SubElement(show_firmware_version, "control-processor-memory")
control_processor_memory.text = kwargs.pop('control_processor_memory')
callback = kwargs.pop('callback', self._callback)
return callback(config) | ['def', 'show_firmware_version_output_show_firmware_version_control_processor_memory', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'config', '=', 'ET', '.', 'Element', '(', '"config"', ')', 'show_firmware_version', '=', 'ET', '.', 'Element', '(', '"show_firmware_version"', ')', 'config', '=', 'show_firmware_version', 'output', '=', 'ET', '.', 'SubElement', '(', 'show_firmware_version', ',', '"output"', ')', 'show_firmware_version', '=', 'ET', '.', 'SubElement', '(', 'output', ',', '"show-firmware-version"', ')', 'control_processor_memory', '=', 'ET', '.', 'SubElement', '(', 'show_firmware_version', ',', '"control-processor-memory"', ')', 'control_processor_memory', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'control_processor_memory'", ')', 'callback', '=', 'kwargs', '.', 'pop', '(', "'callback'", ',', 'self', '.', '_callback', ')', 'return', 'callback', '(', 'config', ')'] | Auto Generated Code | ['Auto', 'Generated', 'Code'] | train | https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_6/ver_6_0_1/yang/brocade_firmware_ext.py#L137-L149 |
1,813 | takuti/flurs | flurs/base.py | RecommenderMixin.register_user | def register_user(self, user):
"""For new users, append their information into the dictionaries.
Args:
user (User): User.
"""
self.users[user.index] = {'known_items': set()}
self.n_user += 1 | python | def register_user(self, user):
"""For new users, append their information into the dictionaries.
Args:
user (User): User.
"""
self.users[user.index] = {'known_items': set()}
self.n_user += 1 | ['def', 'register_user', '(', 'self', ',', 'user', ')', ':', 'self', '.', 'users', '[', 'user', '.', 'index', ']', '=', '{', "'known_items'", ':', 'set', '(', ')', '}', 'self', '.', 'n_user', '+=', '1'] | For new users, append their information into the dictionaries.
Args:
user (User): User. | ['For', 'new', 'users', 'append', 'their', 'information', 'into', 'the', 'dictionaries', '.'] | train | https://github.com/takuti/flurs/blob/a998fc180b45db7eaf38dbbbf8125a93100b8a8c/flurs/base.py#L45-L53 |
1,814 | benjamin-hodgson/asynqp | doc/examples/helloworld.py | hello_world | def hello_world():
"""
Sends a 'hello world' message and then reads it from the queue.
"""
# connect to the RabbitMQ broker
connection = yield from asynqp.connect('localhost', 5672, username='guest', password='guest')
# Open a communications channel
channel = yield from connection.open_channel()
# Create a queue and an exchange on the broker
exchange = yield from channel.declare_exchange('test.exchange', 'direct')
queue = yield from channel.declare_queue('test.queue')
# Bind the queue to the exchange, so the queue will get messages published to the exchange
yield from queue.bind(exchange, 'routing.key')
# If you pass in a dict it will be automatically converted to JSON
msg = asynqp.Message({'hello': 'world'})
exchange.publish(msg, 'routing.key')
# Synchronously get a message from the queue
received_message = yield from queue.get()
print(received_message.json()) # get JSON from incoming messages easily
# Acknowledge a delivered message
received_message.ack()
yield from channel.close()
yield from connection.close() | python | def hello_world():
"""
Sends a 'hello world' message and then reads it from the queue.
"""
# connect to the RabbitMQ broker
connection = yield from asynqp.connect('localhost', 5672, username='guest', password='guest')
# Open a communications channel
channel = yield from connection.open_channel()
# Create a queue and an exchange on the broker
exchange = yield from channel.declare_exchange('test.exchange', 'direct')
queue = yield from channel.declare_queue('test.queue')
# Bind the queue to the exchange, so the queue will get messages published to the exchange
yield from queue.bind(exchange, 'routing.key')
# If you pass in a dict it will be automatically converted to JSON
msg = asynqp.Message({'hello': 'world'})
exchange.publish(msg, 'routing.key')
# Synchronously get a message from the queue
received_message = yield from queue.get()
print(received_message.json()) # get JSON from incoming messages easily
# Acknowledge a delivered message
received_message.ack()
yield from channel.close()
yield from connection.close() | ['def', 'hello_world', '(', ')', ':', '# connect to the RabbitMQ broker', 'connection', '=', 'yield', 'from', 'asynqp', '.', 'connect', '(', "'localhost'", ',', '5672', ',', 'username', '=', "'guest'", ',', 'password', '=', "'guest'", ')', '# Open a communications channel', 'channel', '=', 'yield', 'from', 'connection', '.', 'open_channel', '(', ')', '# Create a queue and an exchange on the broker', 'exchange', '=', 'yield', 'from', 'channel', '.', 'declare_exchange', '(', "'test.exchange'", ',', "'direct'", ')', 'queue', '=', 'yield', 'from', 'channel', '.', 'declare_queue', '(', "'test.queue'", ')', '# Bind the queue to the exchange, so the queue will get messages published to the exchange', 'yield', 'from', 'queue', '.', 'bind', '(', 'exchange', ',', "'routing.key'", ')', '# If you pass in a dict it will be automatically converted to JSON', 'msg', '=', 'asynqp', '.', 'Message', '(', '{', "'hello'", ':', "'world'", '}', ')', 'exchange', '.', 'publish', '(', 'msg', ',', "'routing.key'", ')', '# Synchronously get a message from the queue', 'received_message', '=', 'yield', 'from', 'queue', '.', 'get', '(', ')', 'print', '(', 'received_message', '.', 'json', '(', ')', ')', '# get JSON from incoming messages easily', '# Acknowledge a delivered message', 'received_message', '.', 'ack', '(', ')', 'yield', 'from', 'channel', '.', 'close', '(', ')', 'yield', 'from', 'connection', '.', 'close', '(', ')'] | Sends a 'hello world' message and then reads it from the queue. | ['Sends', 'a', 'hello', 'world', 'message', 'and', 'then', 'reads', 'it', 'from', 'the', 'queue', '.'] | train | https://github.com/benjamin-hodgson/asynqp/blob/ea8630d1803d10d4fd64b1a0e50f3097710b34d1/doc/examples/helloworld.py#L6-L35 |
1,815 | Hironsan/anago | anago/utils.py | load_glove | def load_glove(file):
"""Loads GloVe vectors in numpy array.
Args:
file (str): a path to a glove file.
Return:
dict: a dict of numpy arrays.
"""
model = {}
with open(file, encoding="utf8", errors='ignore') as f:
for line in f:
line = line.split(' ')
word = line[0]
vector = np.array([float(val) for val in line[1:]])
model[word] = vector
return model | python | def load_glove(file):
"""Loads GloVe vectors in numpy array.
Args:
file (str): a path to a glove file.
Return:
dict: a dict of numpy arrays.
"""
model = {}
with open(file, encoding="utf8", errors='ignore') as f:
for line in f:
line = line.split(' ')
word = line[0]
vector = np.array([float(val) for val in line[1:]])
model[word] = vector
return model | ['def', 'load_glove', '(', 'file', ')', ':', 'model', '=', '{', '}', 'with', 'open', '(', 'file', ',', 'encoding', '=', '"utf8"', ',', 'errors', '=', "'ignore'", ')', 'as', 'f', ':', 'for', 'line', 'in', 'f', ':', 'line', '=', 'line', '.', 'split', '(', "' '", ')', 'word', '=', 'line', '[', '0', ']', 'vector', '=', 'np', '.', 'array', '(', '[', 'float', '(', 'val', ')', 'for', 'val', 'in', 'line', '[', '1', ':', ']', ']', ')', 'model', '[', 'word', ']', '=', 'vector', 'return', 'model'] | Loads GloVe vectors in numpy array.
Args:
file (str): a path to a glove file.
Return:
dict: a dict of numpy arrays. | ['Loads', 'GloVe', 'vectors', 'in', 'numpy', 'array', '.'] | train | https://github.com/Hironsan/anago/blob/66a97f91c41f9613b736892e9762dccb9c28f623/anago/utils.py#L267-L284 |
1,816 | a1ezzz/wasp-general | wasp_general/uri.py | WURI.reset_component | def reset_component(self, component):
""" Unset component in this URI
:param component: component name (or component type) to reset
:return: None
"""
if isinstance(component, str) is True:
component = WURI.Component(component)
self.__components[component] = None | python | def reset_component(self, component):
""" Unset component in this URI
:param component: component name (or component type) to reset
:return: None
"""
if isinstance(component, str) is True:
component = WURI.Component(component)
self.__components[component] = None | ['def', 'reset_component', '(', 'self', ',', 'component', ')', ':', 'if', 'isinstance', '(', 'component', ',', 'str', ')', 'is', 'True', ':', 'component', '=', 'WURI', '.', 'Component', '(', 'component', ')', 'self', '.', '__components', '[', 'component', ']', '=', 'None'] | Unset component in this URI
:param component: component name (or component type) to reset
:return: None | ['Unset', 'component', 'in', 'this', 'URI'] | train | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/uri.py#L137-L146 |
1,817 | pydsigner/pygu | pygu/pygw.py | Container.remove | def remove(self, *widgets):
'''
Remove @widgets from the blitting hand of the Container(). Each arg
must be a Widget(), a fellow Container(), or an iterable. Else, things
get ugly...
'''
for w in widgets:
if w in self.widgets:
self.widgets.remove(w)
w.remove_internal(self)
elif w in self.containers:
self.containers.remove(w)
w.remove_internal(self)
else:
# If it isn't an iterable, we'll get an error here.
# Desired effect.
self.remove(*w) | python | def remove(self, *widgets):
'''
Remove @widgets from the blitting hand of the Container(). Each arg
must be a Widget(), a fellow Container(), or an iterable. Else, things
get ugly...
'''
for w in widgets:
if w in self.widgets:
self.widgets.remove(w)
w.remove_internal(self)
elif w in self.containers:
self.containers.remove(w)
w.remove_internal(self)
else:
# If it isn't an iterable, we'll get an error here.
# Desired effect.
self.remove(*w) | ['def', 'remove', '(', 'self', ',', '*', 'widgets', ')', ':', 'for', 'w', 'in', 'widgets', ':', 'if', 'w', 'in', 'self', '.', 'widgets', ':', 'self', '.', 'widgets', '.', 'remove', '(', 'w', ')', 'w', '.', 'remove_internal', '(', 'self', ')', 'elif', 'w', 'in', 'self', '.', 'containers', ':', 'self', '.', 'containers', '.', 'remove', '(', 'w', ')', 'w', '.', 'remove_internal', '(', 'self', ')', 'else', ':', "# If it isn't an iterable, we'll get an error here.", '# Desired effect.', 'self', '.', 'remove', '(', '*', 'w', ')'] | Remove @widgets from the blitting hand of the Container(). Each arg
must be a Widget(), a fellow Container(), or an iterable. Else, things
get ugly... | ['Remove'] | train | https://github.com/pydsigner/pygu/blob/09fe71534900933908ab83db12f5659b7827e31c/pygu/pygw.py#L197-L213 |
1,818 | mabuchilab/QNET | docs/_extensions/inheritance_diagram.py | InheritanceGraph._class_info | def _class_info(self, classes, show_builtins, private_bases, parts, aliases, top_classes):
# type: (List[Any], bool, bool, int, Optional[Dict[unicode, unicode]], List[Any]) -> List[Tuple[unicode, unicode, List[unicode], unicode]] # NOQA
"""Return name and bases for all classes that are ancestors of
*classes*.
*parts* gives the number of dotted name parts that is removed from the
displayed node names.
*top_classes* gives the name(s) of the top most ancestor class to traverse
to. Multiple names can be specified separated by comma.
"""
all_classes = {}
py_builtins = vars(builtins).values()
def recurse(cls):
# type: (Any) -> None
if not show_builtins and cls in py_builtins:
return
if not private_bases and cls.__name__.startswith('_'):
return
nodename = self.class_name(cls, parts, aliases)
fullname = self.class_name(cls, 0, aliases)
# Use first line of docstring as tooltip, if available
tooltip = None
try:
if cls.__doc__:
enc = ModuleAnalyzer.for_module(cls.__module__).encoding
doc = cls.__doc__.strip().split("\n")[0]
if not isinstance(doc, text_type):
doc = force_decode(doc, enc)
if doc:
tooltip = '"%s"' % doc.replace('"', '\\"')
except Exception: # might raise AttributeError for strange classes
pass
baselist = [] # type: List[unicode]
all_classes[cls] = (nodename, fullname, baselist, tooltip)
if fullname in top_classes:
return
for base in cls.__bases__:
if not show_builtins and base in py_builtins:
continue
if not private_bases and base.__name__.startswith('_'):
continue
baselist.append(self.class_name(base, parts, aliases))
if base not in all_classes:
recurse(base)
for cls in classes:
recurse(cls)
return list(all_classes.values()) | python | def _class_info(self, classes, show_builtins, private_bases, parts, aliases, top_classes):
# type: (List[Any], bool, bool, int, Optional[Dict[unicode, unicode]], List[Any]) -> List[Tuple[unicode, unicode, List[unicode], unicode]] # NOQA
"""Return name and bases for all classes that are ancestors of
*classes*.
*parts* gives the number of dotted name parts that is removed from the
displayed node names.
*top_classes* gives the name(s) of the top most ancestor class to traverse
to. Multiple names can be specified separated by comma.
"""
all_classes = {}
py_builtins = vars(builtins).values()
def recurse(cls):
# type: (Any) -> None
if not show_builtins and cls in py_builtins:
return
if not private_bases and cls.__name__.startswith('_'):
return
nodename = self.class_name(cls, parts, aliases)
fullname = self.class_name(cls, 0, aliases)
# Use first line of docstring as tooltip, if available
tooltip = None
try:
if cls.__doc__:
enc = ModuleAnalyzer.for_module(cls.__module__).encoding
doc = cls.__doc__.strip().split("\n")[0]
if not isinstance(doc, text_type):
doc = force_decode(doc, enc)
if doc:
tooltip = '"%s"' % doc.replace('"', '\\"')
except Exception: # might raise AttributeError for strange classes
pass
baselist = [] # type: List[unicode]
all_classes[cls] = (nodename, fullname, baselist, tooltip)
if fullname in top_classes:
return
for base in cls.__bases__:
if not show_builtins and base in py_builtins:
continue
if not private_bases and base.__name__.startswith('_'):
continue
baselist.append(self.class_name(base, parts, aliases))
if base not in all_classes:
recurse(base)
for cls in classes:
recurse(cls)
return list(all_classes.values()) | ['def', '_class_info', '(', 'self', ',', 'classes', ',', 'show_builtins', ',', 'private_bases', ',', 'parts', ',', 'aliases', ',', 'top_classes', ')', ':', '# type: (List[Any], bool, bool, int, Optional[Dict[unicode, unicode]], List[Any]) -> List[Tuple[unicode, unicode, List[unicode], unicode]] # NOQA', 'all_classes', '=', '{', '}', 'py_builtins', '=', 'vars', '(', 'builtins', ')', '.', 'values', '(', ')', 'def', 'recurse', '(', 'cls', ')', ':', '# type: (Any) -> None', 'if', 'not', 'show_builtins', 'and', 'cls', 'in', 'py_builtins', ':', 'return', 'if', 'not', 'private_bases', 'and', 'cls', '.', '__name__', '.', 'startswith', '(', "'_'", ')', ':', 'return', 'nodename', '=', 'self', '.', 'class_name', '(', 'cls', ',', 'parts', ',', 'aliases', ')', 'fullname', '=', 'self', '.', 'class_name', '(', 'cls', ',', '0', ',', 'aliases', ')', '# Use first line of docstring as tooltip, if available', 'tooltip', '=', 'None', 'try', ':', 'if', 'cls', '.', '__doc__', ':', 'enc', '=', 'ModuleAnalyzer', '.', 'for_module', '(', 'cls', '.', '__module__', ')', '.', 'encoding', 'doc', '=', 'cls', '.', '__doc__', '.', 'strip', '(', ')', '.', 'split', '(', '"\\n"', ')', '[', '0', ']', 'if', 'not', 'isinstance', '(', 'doc', ',', 'text_type', ')', ':', 'doc', '=', 'force_decode', '(', 'doc', ',', 'enc', ')', 'if', 'doc', ':', 'tooltip', '=', '\'"%s"\'', '%', 'doc', '.', 'replace', '(', '\'"\'', ',', '\'\\\\"\'', ')', 'except', 'Exception', ':', '# might raise AttributeError for strange classes', 'pass', 'baselist', '=', '[', ']', '# type: List[unicode]', 'all_classes', '[', 'cls', ']', '=', '(', 'nodename', ',', 'fullname', ',', 'baselist', ',', 'tooltip', ')', 'if', 'fullname', 'in', 'top_classes', ':', 'return', 'for', 'base', 'in', 'cls', '.', '__bases__', ':', 'if', 'not', 'show_builtins', 'and', 'base', 'in', 'py_builtins', ':', 'continue', 'if', 'not', 'private_bases', 'and', 'base', '.', '__name__', '.', 'startswith', '(', "'_'", ')', ':', 'continue', 'baselist', '.', 'append', '(', 'self', '.', 'class_name', '(', 'base', ',', 'parts', ',', 'aliases', ')', ')', 'if', 'base', 'not', 'in', 'all_classes', ':', 'recurse', '(', 'base', ')', 'for', 'cls', 'in', 'classes', ':', 'recurse', '(', 'cls', ')', 'return', 'list', '(', 'all_classes', '.', 'values', '(', ')', ')'] | Return name and bases for all classes that are ancestors of
*classes*.
*parts* gives the number of dotted name parts that is removed from the
displayed node names.
*top_classes* gives the name(s) of the top most ancestor class to traverse
to. Multiple names can be specified separated by comma. | ['Return', 'name', 'and', 'bases', 'for', 'all', 'classes', 'that', 'are', 'ancestors', 'of', '*', 'classes', '*', '.'] | train | https://github.com/mabuchilab/QNET/blob/cc20d26dad78691d34c67173e5cd67dcac94208a/docs/_extensions/inheritance_diagram.py#L181-L236 |
1,819 | exhuma/python-cluster | cluster/method/hierarchical.py | HierarchicalClustering.getlevel | def getlevel(self, threshold):
"""
Returns all clusters with a maximum distance of *threshold* in between
each other
:param threshold: the maximum distance between clusters.
See :py:meth:`~cluster.cluster.Cluster.getlevel`
"""
# if it's not worth clustering, just return the data
if len(self._input) <= 1:
return self._input
# initialize the cluster if not yet done
if not self.__cluster_created:
self.cluster()
return self._data[0].getlevel(threshold) | python | def getlevel(self, threshold):
"""
Returns all clusters with a maximum distance of *threshold* in between
each other
:param threshold: the maximum distance between clusters.
See :py:meth:`~cluster.cluster.Cluster.getlevel`
"""
# if it's not worth clustering, just return the data
if len(self._input) <= 1:
return self._input
# initialize the cluster if not yet done
if not self.__cluster_created:
self.cluster()
return self._data[0].getlevel(threshold) | ['def', 'getlevel', '(', 'self', ',', 'threshold', ')', ':', "# if it's not worth clustering, just return the data", 'if', 'len', '(', 'self', '.', '_input', ')', '<=', '1', ':', 'return', 'self', '.', '_input', '# initialize the cluster if not yet done', 'if', 'not', 'self', '.', '__cluster_created', ':', 'self', '.', 'cluster', '(', ')', 'return', 'self', '.', '_data', '[', '0', ']', '.', 'getlevel', '(', 'threshold', ')'] | Returns all clusters with a maximum distance of *threshold* in between
each other
:param threshold: the maximum distance between clusters.
See :py:meth:`~cluster.cluster.Cluster.getlevel` | ['Returns', 'all', 'clusters', 'with', 'a', 'maximum', 'distance', 'of', '*', 'threshold', '*', 'in', 'between', 'each', 'other'] | train | https://github.com/exhuma/python-cluster/blob/4c0ac14d9beafcd51f0d849151514083c296402f/cluster/method/hierarchical.py#L191-L209 |
1,820 | gitpython-developers/GitPython | git/refs/log.py | RefLog.entry_at | def entry_at(cls, filepath, index):
""":return: RefLogEntry at the given index
:param filepath: full path to the index file from which to read the entry
:param index: python list compatible index, i.e. it may be negative to
specify an entry counted from the end of the list
:raise IndexError: If the entry didn't exist
.. note:: This method is faster as it only parses the entry at index, skipping
all other lines. Nonetheless, the whole file has to be read if
the index is negative
"""
fp = open(filepath, 'rb')
if index < 0:
return RefLogEntry.from_line(fp.readlines()[index].strip())
else:
# read until index is reached
for i in xrange(index + 1):
line = fp.readline()
if not line:
break
# END abort on eof
# END handle runup
if i != index or not line:
raise IndexError
# END handle exception
return RefLogEntry.from_line(line.strip()) | python | def entry_at(cls, filepath, index):
""":return: RefLogEntry at the given index
:param filepath: full path to the index file from which to read the entry
:param index: python list compatible index, i.e. it may be negative to
specify an entry counted from the end of the list
:raise IndexError: If the entry didn't exist
.. note:: This method is faster as it only parses the entry at index, skipping
all other lines. Nonetheless, the whole file has to be read if
the index is negative
"""
fp = open(filepath, 'rb')
if index < 0:
return RefLogEntry.from_line(fp.readlines()[index].strip())
else:
# read until index is reached
for i in xrange(index + 1):
line = fp.readline()
if not line:
break
# END abort on eof
# END handle runup
if i != index or not line:
raise IndexError
# END handle exception
return RefLogEntry.from_line(line.strip()) | ['def', 'entry_at', '(', 'cls', ',', 'filepath', ',', 'index', ')', ':', 'fp', '=', 'open', '(', 'filepath', ',', "'rb'", ')', 'if', 'index', '<', '0', ':', 'return', 'RefLogEntry', '.', 'from_line', '(', 'fp', '.', 'readlines', '(', ')', '[', 'index', ']', '.', 'strip', '(', ')', ')', 'else', ':', '# read until index is reached', 'for', 'i', 'in', 'xrange', '(', 'index', '+', '1', ')', ':', 'line', '=', 'fp', '.', 'readline', '(', ')', 'if', 'not', 'line', ':', 'break', '# END abort on eof', '# END handle runup', 'if', 'i', '!=', 'index', 'or', 'not', 'line', ':', 'raise', 'IndexError', '# END handle exception', 'return', 'RefLogEntry', '.', 'from_line', '(', 'line', '.', 'strip', '(', ')', ')'] | :return: RefLogEntry at the given index
:param filepath: full path to the index file from which to read the entry
:param index: python list compatible index, i.e. it may be negative to
specify an entry counted from the end of the list
:raise IndexError: If the entry didn't exist
.. note:: This method is faster as it only parses the entry at index, skipping
all other lines. Nonetheless, the whole file has to be read if
the index is negative | [':', 'return', ':', 'RefLogEntry', 'at', 'the', 'given', 'index', ':', 'param', 'filepath', ':', 'full', 'path', 'to', 'the', 'index', 'file', 'from', 'which', 'to', 'read', 'the', 'entry', ':', 'param', 'index', ':', 'python', 'list', 'compatible', 'index', 'i', '.', 'e', '.', 'it', 'may', 'be', 'negative', 'to', 'specify', 'an', 'entry', 'counted', 'from', 'the', 'end', 'of', 'the', 'list'] | train | https://github.com/gitpython-developers/GitPython/blob/1f66e25c25cde2423917ee18c4704fff83b837d1/git/refs/log.py#L208-L236 |
1,821 | f3at/feat | src/feat/models/getter.py | source_getattr | def source_getattr():
"""
Creates a getter that will drop the current value
and retrieve the source's attribute with the context key as name.
"""
def source_getattr(_value, context, **_params):
value = getattr(context["model"].source, context["key"])
return _attr(value)
return source_getattr | python | def source_getattr():
"""
Creates a getter that will drop the current value
and retrieve the source's attribute with the context key as name.
"""
def source_getattr(_value, context, **_params):
value = getattr(context["model"].source, context["key"])
return _attr(value)
return source_getattr | ['def', 'source_getattr', '(', ')', ':', 'def', 'source_getattr', '(', '_value', ',', 'context', ',', '*', '*', '_params', ')', ':', 'value', '=', 'getattr', '(', 'context', '[', '"model"', ']', '.', 'source', ',', 'context', '[', '"key"', ']', ')', 'return', '_attr', '(', 'value', ')', 'return', 'source_getattr'] | Creates a getter that will drop the current value
and retrieve the source's attribute with the context key as name. | ['Creates', 'a', 'getter', 'that', 'will', 'drop', 'the', 'current', 'value', 'and', 'retrieve', 'the', 'source', 's', 'attribute', 'with', 'the', 'context', 'key', 'as', 'name', '.'] | train | https://github.com/f3at/feat/blob/15da93fc9d6ec8154f52a9172824e25821195ef8/src/feat/models/getter.py#L68-L78 |
1,822 | mmp2/megaman | megaman/relaxation/riemannian_relaxation.py | RiemannianRelaxation.compute_dual_rmetric | def compute_dual_rmetric(self,Ynew=None):
"""Helper function to calculate the """
usedY = self.Y if Ynew is None else Ynew
rieman_metric = RiemannMetric(usedY, self.laplacian_matrix)
return rieman_metric.get_dual_rmetric() | python | def compute_dual_rmetric(self,Ynew=None):
"""Helper function to calculate the """
usedY = self.Y if Ynew is None else Ynew
rieman_metric = RiemannMetric(usedY, self.laplacian_matrix)
return rieman_metric.get_dual_rmetric() | ['def', 'compute_dual_rmetric', '(', 'self', ',', 'Ynew', '=', 'None', ')', ':', 'usedY', '=', 'self', '.', 'Y', 'if', 'Ynew', 'is', 'None', 'else', 'Ynew', 'rieman_metric', '=', 'RiemannMetric', '(', 'usedY', ',', 'self', '.', 'laplacian_matrix', ')', 'return', 'rieman_metric', '.', 'get_dual_rmetric', '(', ')'] | Helper function to calculate the | ['Helper', 'function', 'to', 'calculate', 'the'] | train | https://github.com/mmp2/megaman/blob/faccaf267aad0a8b18ec8a705735fd9dd838ca1e/megaman/relaxation/riemannian_relaxation.py#L103-L107 |
1,823 | msoulier/tftpy | tftpy/TftpContexts.py | TftpContextClientDownload.start | def start(self):
"""Initiate the download."""
log.info("Sending tftp download request to %s" % self.host)
log.info(" filename -> %s" % self.file_to_transfer)
log.info(" options -> %s" % self.options)
self.metrics.start_time = time.time()
log.debug("Set metrics.start_time to %s" % self.metrics.start_time)
# FIXME: put this in a sendRRQ method?
pkt = TftpPacketRRQ()
pkt.filename = self.file_to_transfer
pkt.mode = "octet" # FIXME - shouldn't hardcode this
pkt.options = self.options
self.sock.sendto(pkt.encode().buffer, (self.host, self.port))
self.next_block = 1
self.last_pkt = pkt
self.state = TftpStateSentRRQ(self)
while self.state:
try:
log.debug("State is %s" % self.state)
self.cycle()
except TftpTimeout as err:
log.error(str(err))
self.retry_count += 1
if self.retry_count >= TIMEOUT_RETRIES:
log.debug("hit max retries, giving up")
raise
else:
log.warning("resending last packet")
self.state.resendLast()
except TftpFileNotFoundError as err:
# If we received file not found, then we should not save the open
# output file or we'll be left with a size zero file. Delete it,
# if it exists.
log.error("Received File not found error")
if self.fileobj is not None and not self.filelike_fileobj:
if os.path.exists(self.fileobj.name):
log.debug("unlinking output file of %s", self.fileobj.name)
os.unlink(self.fileobj.name)
raise | python | def start(self):
"""Initiate the download."""
log.info("Sending tftp download request to %s" % self.host)
log.info(" filename -> %s" % self.file_to_transfer)
log.info(" options -> %s" % self.options)
self.metrics.start_time = time.time()
log.debug("Set metrics.start_time to %s" % self.metrics.start_time)
# FIXME: put this in a sendRRQ method?
pkt = TftpPacketRRQ()
pkt.filename = self.file_to_transfer
pkt.mode = "octet" # FIXME - shouldn't hardcode this
pkt.options = self.options
self.sock.sendto(pkt.encode().buffer, (self.host, self.port))
self.next_block = 1
self.last_pkt = pkt
self.state = TftpStateSentRRQ(self)
while self.state:
try:
log.debug("State is %s" % self.state)
self.cycle()
except TftpTimeout as err:
log.error(str(err))
self.retry_count += 1
if self.retry_count >= TIMEOUT_RETRIES:
log.debug("hit max retries, giving up")
raise
else:
log.warning("resending last packet")
self.state.resendLast()
except TftpFileNotFoundError as err:
# If we received file not found, then we should not save the open
# output file or we'll be left with a size zero file. Delete it,
# if it exists.
log.error("Received File not found error")
if self.fileobj is not None and not self.filelike_fileobj:
if os.path.exists(self.fileobj.name):
log.debug("unlinking output file of %s", self.fileobj.name)
os.unlink(self.fileobj.name)
raise | ['def', 'start', '(', 'self', ')', ':', 'log', '.', 'info', '(', '"Sending tftp download request to %s"', '%', 'self', '.', 'host', ')', 'log', '.', 'info', '(', '" filename -> %s"', '%', 'self', '.', 'file_to_transfer', ')', 'log', '.', 'info', '(', '" options -> %s"', '%', 'self', '.', 'options', ')', 'self', '.', 'metrics', '.', 'start_time', '=', 'time', '.', 'time', '(', ')', 'log', '.', 'debug', '(', '"Set metrics.start_time to %s"', '%', 'self', '.', 'metrics', '.', 'start_time', ')', '# FIXME: put this in a sendRRQ method?', 'pkt', '=', 'TftpPacketRRQ', '(', ')', 'pkt', '.', 'filename', '=', 'self', '.', 'file_to_transfer', 'pkt', '.', 'mode', '=', '"octet"', "# FIXME - shouldn't hardcode this", 'pkt', '.', 'options', '=', 'self', '.', 'options', 'self', '.', 'sock', '.', 'sendto', '(', 'pkt', '.', 'encode', '(', ')', '.', 'buffer', ',', '(', 'self', '.', 'host', ',', 'self', '.', 'port', ')', ')', 'self', '.', 'next_block', '=', '1', 'self', '.', 'last_pkt', '=', 'pkt', 'self', '.', 'state', '=', 'TftpStateSentRRQ', '(', 'self', ')', 'while', 'self', '.', 'state', ':', 'try', ':', 'log', '.', 'debug', '(', '"State is %s"', '%', 'self', '.', 'state', ')', 'self', '.', 'cycle', '(', ')', 'except', 'TftpTimeout', 'as', 'err', ':', 'log', '.', 'error', '(', 'str', '(', 'err', ')', ')', 'self', '.', 'retry_count', '+=', '1', 'if', 'self', '.', 'retry_count', '>=', 'TIMEOUT_RETRIES', ':', 'log', '.', 'debug', '(', '"hit max retries, giving up"', ')', 'raise', 'else', ':', 'log', '.', 'warning', '(', '"resending last packet"', ')', 'self', '.', 'state', '.', 'resendLast', '(', ')', 'except', 'TftpFileNotFoundError', 'as', 'err', ':', '# If we received file not found, then we should not save the open', "# output file or we'll be left with a size zero file. Delete it,", '# if it exists.', 'log', '.', 'error', '(', '"Received File not found error"', ')', 'if', 'self', '.', 'fileobj', 'is', 'not', 'None', 'and', 'not', 'self', '.', 'filelike_fileobj', ':', 'if', 'os', '.', 'path', '.', 'exists', '(', 'self', '.', 'fileobj', '.', 'name', ')', ':', 'log', '.', 'debug', '(', '"unlinking output file of %s"', ',', 'self', '.', 'fileobj', '.', 'name', ')', 'os', '.', 'unlink', '(', 'self', '.', 'fileobj', '.', 'name', ')', 'raise'] | Initiate the download. | ['Initiate', 'the', 'download', '.'] | train | https://github.com/msoulier/tftpy/blob/af2f2fe89a3bf45748b78703820efb0986a8207a/tftpy/TftpContexts.py#L379-L422 |
1,824 | blockcypher/blockcypher-python | blockcypher/api.py | simple_spend_p2sh | def simple_spend_p2sh(all_from_pubkeys, from_privkeys_to_use, to_address, to_satoshis,
change_address=None, min_confirmations=0, api_key=None, coin_symbol='btc'):
'''
Simple method to spend from a p2sh address.
all_from_pubkeys is a list of *all* pubkeys for the address in question.
from_privkeys_to_use is a list of all privkeys that will be used to sign the tx (and no more).
If the address is a 2-of-3 multisig and you supply 1 (or 3) from_privkeys_to_use this will break.
Signature takes place locally (client-side) after unsigned transaction is verified.
Returns the tx_hash of the newly broadcast tx.
A change_address *must* be specified, except for a sweep (set to_satoshis = -1)
Note that this currently only supports compressed private keys.
'''
assert is_valid_coin_symbol(coin_symbol), coin_symbol
assert isinstance(to_satoshis, int), to_satoshis
assert api_key, 'api_key required'
if change_address:
err_msg = '%s not a valid address for %s' % (change_address, coin_symbol)
assert is_valid_address_for_coinsymbol(change_address, coin_symbol), err_msg
else:
assert to_satoshis == -1, 'you must supply a change address or sweep'
err_msg = '%s not a valid address for %s' % (to_address, coin_symbol)
assert is_valid_address_for_coinsymbol(to_address, coin_symbol), err_msg
# TODO: calculate from address from pubkeys
# err_msg = '%s is not a p2sh address' % to_address
# assert from_address[0] in COIN_SYMBOL_MAPPINGS[coin_symbol]['multisig_prefix_list'], err_msg
assert isinstance(all_from_pubkeys, (list, tuple))
assert len(all_from_pubkeys) > 1
assert isinstance(from_privkeys_to_use, (list, tuple)), from_privkeys_to_use
for from_privkey in from_privkeys_to_use:
from_pubkey = compress(privkey_to_pubkey(from_privkey))
err_msg = '%s not in %s' % (from_pubkey, all_from_pubkeys)
assert from_pubkey in all_from_pubkeys
script_type = 'multisig-%s-of-%s' % (
len(from_privkeys_to_use),
len(all_from_pubkeys),
)
inputs = [
{
'pubkeys': all_from_pubkeys,
'script_type': script_type,
},
]
logger.info('inputs: %s' % inputs)
outputs = [{'address': to_address, 'value': to_satoshis}, ]
logger.info('outputs: %s' % outputs)
# will fail loudly if tx doesn't verify client-side
unsigned_tx = create_unsigned_tx(
inputs=inputs,
outputs=outputs,
# may build with no change address, but if so will verify change in next step
# done for extra security in case of client-side bug in change address generation
change_address=change_address,
coin_symbol=coin_symbol,
min_confirmations=min_confirmations,
verify_tosigntx=False, # will verify in next step
include_tosigntx=True,
api_key=api_key,
)
logger.info('unsigned_tx: %s' % unsigned_tx)
if 'errors' in unsigned_tx:
print('TX Error(s): Tx NOT Signed or Broadcast')
for error in unsigned_tx['errors']:
print(error['error'])
# Abandon
raise Exception('Build Unsigned TX Error')
tx_is_correct, err_msg = verify_unsigned_tx(
unsigned_tx=unsigned_tx,
inputs=None,
outputs=outputs,
sweep_funds=bool(to_satoshis == -1),
change_address=change_address,
coin_symbol=coin_symbol,
)
if not tx_is_correct:
print(unsigned_tx) # for debug
raise Exception('TX Verification Error: %s' % err_msg)
txs_to_sign, privkey_list, pubkey_list = [], [], []
for cnt, proposed_input in enumerate(unsigned_tx['tx']['inputs']):
# confirm that the input matches the all_from_pubkeys
err_msg = 'Invalid input: %s != %s' % (
proposed_input['addresses'],
all_from_pubkeys,
)
assert set(proposed_input['addresses']) == set(all_from_pubkeys), err_msg
# build items to pass to make_tx_signatures
for from_privkey in from_privkeys_to_use:
txs_to_sign.append(unsigned_tx['tosign'][cnt])
privkey_list.append(from_privkey)
pubkey_list.append(compress(privkey_to_pubkey(from_privkey)))
logger.info('txs_to_sign: %s' % txs_to_sign)
# logger.info('privkey_list: %s' % privkey_list)
logger.info('pubkey_list: %s' % pubkey_list)
# sign locally
tx_signatures = make_tx_signatures(
txs_to_sign=txs_to_sign,
privkey_list=privkey_list,
pubkey_list=pubkey_list,
)
logger.info('tx_signatures: %s' % tx_signatures)
# broadcast TX
broadcasted_tx = broadcast_signed_transaction(
unsigned_tx=unsigned_tx,
signatures=tx_signatures,
pubkeys=pubkey_list,
coin_symbol=coin_symbol,
api_key=api_key,
)
logger.info('broadcasted_tx: %s' % broadcasted_tx)
if 'errors' in broadcasted_tx:
print('TX Error(s): Tx May NOT Have Been Broadcast')
for error in broadcasted_tx['errors']:
print(error['error'])
print(broadcasted_tx)
return
return broadcasted_tx['tx']['hash'] | python | def simple_spend_p2sh(all_from_pubkeys, from_privkeys_to_use, to_address, to_satoshis,
change_address=None, min_confirmations=0, api_key=None, coin_symbol='btc'):
'''
Simple method to spend from a p2sh address.
all_from_pubkeys is a list of *all* pubkeys for the address in question.
from_privkeys_to_use is a list of all privkeys that will be used to sign the tx (and no more).
If the address is a 2-of-3 multisig and you supply 1 (or 3) from_privkeys_to_use this will break.
Signature takes place locally (client-side) after unsigned transaction is verified.
Returns the tx_hash of the newly broadcast tx.
A change_address *must* be specified, except for a sweep (set to_satoshis = -1)
Note that this currently only supports compressed private keys.
'''
assert is_valid_coin_symbol(coin_symbol), coin_symbol
assert isinstance(to_satoshis, int), to_satoshis
assert api_key, 'api_key required'
if change_address:
err_msg = '%s not a valid address for %s' % (change_address, coin_symbol)
assert is_valid_address_for_coinsymbol(change_address, coin_symbol), err_msg
else:
assert to_satoshis == -1, 'you must supply a change address or sweep'
err_msg = '%s not a valid address for %s' % (to_address, coin_symbol)
assert is_valid_address_for_coinsymbol(to_address, coin_symbol), err_msg
# TODO: calculate from address from pubkeys
# err_msg = '%s is not a p2sh address' % to_address
# assert from_address[0] in COIN_SYMBOL_MAPPINGS[coin_symbol]['multisig_prefix_list'], err_msg
assert isinstance(all_from_pubkeys, (list, tuple))
assert len(all_from_pubkeys) > 1
assert isinstance(from_privkeys_to_use, (list, tuple)), from_privkeys_to_use
for from_privkey in from_privkeys_to_use:
from_pubkey = compress(privkey_to_pubkey(from_privkey))
err_msg = '%s not in %s' % (from_pubkey, all_from_pubkeys)
assert from_pubkey in all_from_pubkeys
script_type = 'multisig-%s-of-%s' % (
len(from_privkeys_to_use),
len(all_from_pubkeys),
)
inputs = [
{
'pubkeys': all_from_pubkeys,
'script_type': script_type,
},
]
logger.info('inputs: %s' % inputs)
outputs = [{'address': to_address, 'value': to_satoshis}, ]
logger.info('outputs: %s' % outputs)
# will fail loudly if tx doesn't verify client-side
unsigned_tx = create_unsigned_tx(
inputs=inputs,
outputs=outputs,
# may build with no change address, but if so will verify change in next step
# done for extra security in case of client-side bug in change address generation
change_address=change_address,
coin_symbol=coin_symbol,
min_confirmations=min_confirmations,
verify_tosigntx=False, # will verify in next step
include_tosigntx=True,
api_key=api_key,
)
logger.info('unsigned_tx: %s' % unsigned_tx)
if 'errors' in unsigned_tx:
print('TX Error(s): Tx NOT Signed or Broadcast')
for error in unsigned_tx['errors']:
print(error['error'])
# Abandon
raise Exception('Build Unsigned TX Error')
tx_is_correct, err_msg = verify_unsigned_tx(
unsigned_tx=unsigned_tx,
inputs=None,
outputs=outputs,
sweep_funds=bool(to_satoshis == -1),
change_address=change_address,
coin_symbol=coin_symbol,
)
if not tx_is_correct:
print(unsigned_tx) # for debug
raise Exception('TX Verification Error: %s' % err_msg)
txs_to_sign, privkey_list, pubkey_list = [], [], []
for cnt, proposed_input in enumerate(unsigned_tx['tx']['inputs']):
# confirm that the input matches the all_from_pubkeys
err_msg = 'Invalid input: %s != %s' % (
proposed_input['addresses'],
all_from_pubkeys,
)
assert set(proposed_input['addresses']) == set(all_from_pubkeys), err_msg
# build items to pass to make_tx_signatures
for from_privkey in from_privkeys_to_use:
txs_to_sign.append(unsigned_tx['tosign'][cnt])
privkey_list.append(from_privkey)
pubkey_list.append(compress(privkey_to_pubkey(from_privkey)))
logger.info('txs_to_sign: %s' % txs_to_sign)
# logger.info('privkey_list: %s' % privkey_list)
logger.info('pubkey_list: %s' % pubkey_list)
# sign locally
tx_signatures = make_tx_signatures(
txs_to_sign=txs_to_sign,
privkey_list=privkey_list,
pubkey_list=pubkey_list,
)
logger.info('tx_signatures: %s' % tx_signatures)
# broadcast TX
broadcasted_tx = broadcast_signed_transaction(
unsigned_tx=unsigned_tx,
signatures=tx_signatures,
pubkeys=pubkey_list,
coin_symbol=coin_symbol,
api_key=api_key,
)
logger.info('broadcasted_tx: %s' % broadcasted_tx)
if 'errors' in broadcasted_tx:
print('TX Error(s): Tx May NOT Have Been Broadcast')
for error in broadcasted_tx['errors']:
print(error['error'])
print(broadcasted_tx)
return
return broadcasted_tx['tx']['hash'] | ['def', 'simple_spend_p2sh', '(', 'all_from_pubkeys', ',', 'from_privkeys_to_use', ',', 'to_address', ',', 'to_satoshis', ',', 'change_address', '=', 'None', ',', 'min_confirmations', '=', '0', ',', 'api_key', '=', 'None', ',', 'coin_symbol', '=', "'btc'", ')', ':', 'assert', 'is_valid_coin_symbol', '(', 'coin_symbol', ')', ',', 'coin_symbol', 'assert', 'isinstance', '(', 'to_satoshis', ',', 'int', ')', ',', 'to_satoshis', 'assert', 'api_key', ',', "'api_key required'", 'if', 'change_address', ':', 'err_msg', '=', "'%s not a valid address for %s'", '%', '(', 'change_address', ',', 'coin_symbol', ')', 'assert', 'is_valid_address_for_coinsymbol', '(', 'change_address', ',', 'coin_symbol', ')', ',', 'err_msg', 'else', ':', 'assert', 'to_satoshis', '==', '-', '1', ',', "'you must supply a change address or sweep'", 'err_msg', '=', "'%s not a valid address for %s'", '%', '(', 'to_address', ',', 'coin_symbol', ')', 'assert', 'is_valid_address_for_coinsymbol', '(', 'to_address', ',', 'coin_symbol', ')', ',', 'err_msg', '# TODO: calculate from address from pubkeys', "# err_msg = '%s is not a p2sh address' % to_address", "# assert from_address[0] in COIN_SYMBOL_MAPPINGS[coin_symbol]['multisig_prefix_list'], err_msg", 'assert', 'isinstance', '(', 'all_from_pubkeys', ',', '(', 'list', ',', 'tuple', ')', ')', 'assert', 'len', '(', 'all_from_pubkeys', ')', '>', '1', 'assert', 'isinstance', '(', 'from_privkeys_to_use', ',', '(', 'list', ',', 'tuple', ')', ')', ',', 'from_privkeys_to_use', 'for', 'from_privkey', 'in', 'from_privkeys_to_use', ':', 'from_pubkey', '=', 'compress', '(', 'privkey_to_pubkey', '(', 'from_privkey', ')', ')', 'err_msg', '=', "'%s not in %s'", '%', '(', 'from_pubkey', ',', 'all_from_pubkeys', ')', 'assert', 'from_pubkey', 'in', 'all_from_pubkeys', 'script_type', '=', "'multisig-%s-of-%s'", '%', '(', 'len', '(', 'from_privkeys_to_use', ')', ',', 'len', '(', 'all_from_pubkeys', ')', ',', ')', 'inputs', '=', '[', '{', "'pubkeys'", ':', 'all_from_pubkeys', ',', "'script_type'", ':', 'script_type', ',', '}', ',', ']', 'logger', '.', 'info', '(', "'inputs: %s'", '%', 'inputs', ')', 'outputs', '=', '[', '{', "'address'", ':', 'to_address', ',', "'value'", ':', 'to_satoshis', '}', ',', ']', 'logger', '.', 'info', '(', "'outputs: %s'", '%', 'outputs', ')', "# will fail loudly if tx doesn't verify client-side", 'unsigned_tx', '=', 'create_unsigned_tx', '(', 'inputs', '=', 'inputs', ',', 'outputs', '=', 'outputs', ',', '# may build with no change address, but if so will verify change in next step', '# done for extra security in case of client-side bug in change address generation', 'change_address', '=', 'change_address', ',', 'coin_symbol', '=', 'coin_symbol', ',', 'min_confirmations', '=', 'min_confirmations', ',', 'verify_tosigntx', '=', 'False', ',', '# will verify in next step', 'include_tosigntx', '=', 'True', ',', 'api_key', '=', 'api_key', ',', ')', 'logger', '.', 'info', '(', "'unsigned_tx: %s'", '%', 'unsigned_tx', ')', 'if', "'errors'", 'in', 'unsigned_tx', ':', 'print', '(', "'TX Error(s): Tx NOT Signed or Broadcast'", ')', 'for', 'error', 'in', 'unsigned_tx', '[', "'errors'", ']', ':', 'print', '(', 'error', '[', "'error'", ']', ')', '# Abandon', 'raise', 'Exception', '(', "'Build Unsigned TX Error'", ')', 'tx_is_correct', ',', 'err_msg', '=', 'verify_unsigned_tx', '(', 'unsigned_tx', '=', 'unsigned_tx', ',', 'inputs', '=', 'None', ',', 'outputs', '=', 'outputs', ',', 'sweep_funds', '=', 'bool', '(', 'to_satoshis', '==', '-', '1', ')', ',', 'change_address', '=', 'change_address', ',', 'coin_symbol', '=', 'coin_symbol', ',', ')', 'if', 'not', 'tx_is_correct', ':', 'print', '(', 'unsigned_tx', ')', '# for debug', 'raise', 'Exception', '(', "'TX Verification Error: %s'", '%', 'err_msg', ')', 'txs_to_sign', ',', 'privkey_list', ',', 'pubkey_list', '=', '[', ']', ',', '[', ']', ',', '[', ']', 'for', 'cnt', ',', 'proposed_input', 'in', 'enumerate', '(', 'unsigned_tx', '[', "'tx'", ']', '[', "'inputs'", ']', ')', ':', '# confirm that the input matches the all_from_pubkeys', 'err_msg', '=', "'Invalid input: %s != %s'", '%', '(', 'proposed_input', '[', "'addresses'", ']', ',', 'all_from_pubkeys', ',', ')', 'assert', 'set', '(', 'proposed_input', '[', "'addresses'", ']', ')', '==', 'set', '(', 'all_from_pubkeys', ')', ',', 'err_msg', '# build items to pass to make_tx_signatures', 'for', 'from_privkey', 'in', 'from_privkeys_to_use', ':', 'txs_to_sign', '.', 'append', '(', 'unsigned_tx', '[', "'tosign'", ']', '[', 'cnt', ']', ')', 'privkey_list', '.', 'append', '(', 'from_privkey', ')', 'pubkey_list', '.', 'append', '(', 'compress', '(', 'privkey_to_pubkey', '(', 'from_privkey', ')', ')', ')', 'logger', '.', 'info', '(', "'txs_to_sign: %s'", '%', 'txs_to_sign', ')', "# logger.info('privkey_list: %s' % privkey_list)", 'logger', '.', 'info', '(', "'pubkey_list: %s'", '%', 'pubkey_list', ')', '# sign locally', 'tx_signatures', '=', 'make_tx_signatures', '(', 'txs_to_sign', '=', 'txs_to_sign', ',', 'privkey_list', '=', 'privkey_list', ',', 'pubkey_list', '=', 'pubkey_list', ',', ')', 'logger', '.', 'info', '(', "'tx_signatures: %s'", '%', 'tx_signatures', ')', '# broadcast TX', 'broadcasted_tx', '=', 'broadcast_signed_transaction', '(', 'unsigned_tx', '=', 'unsigned_tx', ',', 'signatures', '=', 'tx_signatures', ',', 'pubkeys', '=', 'pubkey_list', ',', 'coin_symbol', '=', 'coin_symbol', ',', 'api_key', '=', 'api_key', ',', ')', 'logger', '.', 'info', '(', "'broadcasted_tx: %s'", '%', 'broadcasted_tx', ')', 'if', "'errors'", 'in', 'broadcasted_tx', ':', 'print', '(', "'TX Error(s): Tx May NOT Have Been Broadcast'", ')', 'for', 'error', 'in', 'broadcasted_tx', '[', "'errors'", ']', ':', 'print', '(', 'error', '[', "'error'", ']', ')', 'print', '(', 'broadcasted_tx', ')', 'return', 'return', 'broadcasted_tx', '[', "'tx'", ']', '[', "'hash'", ']'] | Simple method to spend from a p2sh address.
all_from_pubkeys is a list of *all* pubkeys for the address in question.
from_privkeys_to_use is a list of all privkeys that will be used to sign the tx (and no more).
If the address is a 2-of-3 multisig and you supply 1 (or 3) from_privkeys_to_use this will break.
Signature takes place locally (client-side) after unsigned transaction is verified.
Returns the tx_hash of the newly broadcast tx.
A change_address *must* be specified, except for a sweep (set to_satoshis = -1)
Note that this currently only supports compressed private keys. | ['Simple', 'method', 'to', 'spend', 'from', 'a', 'p2sh', 'address', '.'] | train | https://github.com/blockcypher/blockcypher-python/blob/7601ea21916957ff279384fd699527ff9c28a56e/blockcypher/api.py#L1701-L1839 |
1,825 | nornir-automation/nornir | nornir/plugins/tasks/networking/netmiko_file_transfer.py | netmiko_file_transfer | def netmiko_file_transfer(
task: Task, source_file: str, dest_file: str, **kwargs: Any
) -> Result:
"""
Execute Netmiko file_transfer method
Arguments:
source_file: Source file.
dest_file: Destination file.
kwargs: Additional arguments to pass to file_transfer
Returns:
Result object with the following attributes set:
* result (``bool``): file exists and MD5 is valid
* changed (``bool``): the destination file was changed
"""
net_connect = task.host.get_connection("netmiko", task.nornir.config)
kwargs.setdefault("direction", "put")
scp_result = file_transfer(
net_connect, source_file=source_file, dest_file=dest_file, **kwargs
)
if kwargs.get("disable_md5") is True:
file_valid = scp_result["file_exists"]
else:
file_valid = scp_result["file_exists"] and scp_result["file_verified"]
return Result(
host=task.host, result=file_valid, changed=scp_result["file_transferred"]
) | python | def netmiko_file_transfer(
task: Task, source_file: str, dest_file: str, **kwargs: Any
) -> Result:
"""
Execute Netmiko file_transfer method
Arguments:
source_file: Source file.
dest_file: Destination file.
kwargs: Additional arguments to pass to file_transfer
Returns:
Result object with the following attributes set:
* result (``bool``): file exists and MD5 is valid
* changed (``bool``): the destination file was changed
"""
net_connect = task.host.get_connection("netmiko", task.nornir.config)
kwargs.setdefault("direction", "put")
scp_result = file_transfer(
net_connect, source_file=source_file, dest_file=dest_file, **kwargs
)
if kwargs.get("disable_md5") is True:
file_valid = scp_result["file_exists"]
else:
file_valid = scp_result["file_exists"] and scp_result["file_verified"]
return Result(
host=task.host, result=file_valid, changed=scp_result["file_transferred"]
) | ['def', 'netmiko_file_transfer', '(', 'task', ':', 'Task', ',', 'source_file', ':', 'str', ',', 'dest_file', ':', 'str', ',', '*', '*', 'kwargs', ':', 'Any', ')', '->', 'Result', ':', 'net_connect', '=', 'task', '.', 'host', '.', 'get_connection', '(', '"netmiko"', ',', 'task', '.', 'nornir', '.', 'config', ')', 'kwargs', '.', 'setdefault', '(', '"direction"', ',', '"put"', ')', 'scp_result', '=', 'file_transfer', '(', 'net_connect', ',', 'source_file', '=', 'source_file', ',', 'dest_file', '=', 'dest_file', ',', '*', '*', 'kwargs', ')', 'if', 'kwargs', '.', 'get', '(', '"disable_md5"', ')', 'is', 'True', ':', 'file_valid', '=', 'scp_result', '[', '"file_exists"', ']', 'else', ':', 'file_valid', '=', 'scp_result', '[', '"file_exists"', ']', 'and', 'scp_result', '[', '"file_verified"', ']', 'return', 'Result', '(', 'host', '=', 'task', '.', 'host', ',', 'result', '=', 'file_valid', ',', 'changed', '=', 'scp_result', '[', '"file_transferred"', ']', ')'] | Execute Netmiko file_transfer method
Arguments:
source_file: Source file.
dest_file: Destination file.
kwargs: Additional arguments to pass to file_transfer
Returns:
Result object with the following attributes set:
* result (``bool``): file exists and MD5 is valid
* changed (``bool``): the destination file was changed | ['Execute', 'Netmiko', 'file_transfer', 'method'] | train | https://github.com/nornir-automation/nornir/blob/3425c47fd870db896cb80f619bae23bd98d50c74/nornir/plugins/tasks/networking/netmiko_file_transfer.py#L8-L36 |
1,826 | BrianHicks/emit | emit/router/core.py | Router.resolve_node_modules | def resolve_node_modules(self):
'import the modules specified in init'
if not self.resolved_node_modules:
try:
self.resolved_node_modules = [
importlib.import_module(mod, self.node_package)
for mod in self.node_modules
]
except ImportError:
self.resolved_node_modules = []
raise
return self.resolved_node_modules | python | def resolve_node_modules(self):
'import the modules specified in init'
if not self.resolved_node_modules:
try:
self.resolved_node_modules = [
importlib.import_module(mod, self.node_package)
for mod in self.node_modules
]
except ImportError:
self.resolved_node_modules = []
raise
return self.resolved_node_modules | ['def', 'resolve_node_modules', '(', 'self', ')', ':', 'if', 'not', 'self', '.', 'resolved_node_modules', ':', 'try', ':', 'self', '.', 'resolved_node_modules', '=', '[', 'importlib', '.', 'import_module', '(', 'mod', ',', 'self', '.', 'node_package', ')', 'for', 'mod', 'in', 'self', '.', 'node_modules', ']', 'except', 'ImportError', ':', 'self', '.', 'resolved_node_modules', '=', '[', ']', 'raise', 'return', 'self', '.', 'resolved_node_modules'] | import the modules specified in init | ['import', 'the', 'modules', 'specified', 'in', 'init'] | train | https://github.com/BrianHicks/emit/blob/19a86c2392b136c9e857000798ccaa525aa0ed84/emit/router/core.py#L161-L173 |
1,827 | pazz/alot | alot/ui.py | UI.choice | def choice(self, message, choices=None, select=None, cancel=None,
msg_position='above', choices_to_return=None):
"""
prompt user to make a choice.
:param message: string to display before list of choices
:type message: unicode
:param choices: dict of possible choices
:type choices: dict: keymap->choice (both str)
:param choices_to_return: dict of possible choices to return for the
choices of the choices of paramter
:type choices: dict: keymap->choice key is str and value is any obj)
:param select: choice to return if enter/return is hit. Ignored if set
to `None`.
:type select: str
:param cancel: choice to return if escape is hit. Ignored if set to
`None`.
:type cancel: str
:param msg_position: determines if `message` is above or left of the
prompt. Must be `above` or `left`.
:type msg_position: str
:rtype: asyncio.Future
"""
choices = choices or {'y': 'yes', 'n': 'no'}
assert select is None or select in choices.values()
assert cancel is None or cancel in choices.values()
assert msg_position in ['left', 'above']
fut = asyncio.get_event_loop().create_future() # Create a returned future
oldroot = self.mainloop.widget
def select_or_cancel(text):
"""Restore the main screen and invoce the callback (delayed return)
with the given text."""
self.mainloop.widget = oldroot
self._passall = False
fut.set_result(text)
# set up widgets
msgpart = urwid.Text(message)
choicespart = ChoiceWidget(choices,
choices_to_return=choices_to_return,
callback=select_or_cancel, select=select,
cancel=cancel)
# build widget
if msg_position == 'left':
both = urwid.Columns(
[
('fixed', len(message), msgpart),
('weight', 1, choicespart),
], dividechars=1)
else: # above
both = urwid.Pile([msgpart, choicespart])
att = settings.get_theming_attribute('global', 'prompt')
both = urwid.AttrMap(both, att, att)
# put promptwidget as overlay on main widget
overlay = urwid.Overlay(both, oldroot,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 1),
None)
self.mainloop.widget = overlay
self._passall = True
return fut | python | def choice(self, message, choices=None, select=None, cancel=None,
msg_position='above', choices_to_return=None):
"""
prompt user to make a choice.
:param message: string to display before list of choices
:type message: unicode
:param choices: dict of possible choices
:type choices: dict: keymap->choice (both str)
:param choices_to_return: dict of possible choices to return for the
choices of the choices of paramter
:type choices: dict: keymap->choice key is str and value is any obj)
:param select: choice to return if enter/return is hit. Ignored if set
to `None`.
:type select: str
:param cancel: choice to return if escape is hit. Ignored if set to
`None`.
:type cancel: str
:param msg_position: determines if `message` is above or left of the
prompt. Must be `above` or `left`.
:type msg_position: str
:rtype: asyncio.Future
"""
choices = choices or {'y': 'yes', 'n': 'no'}
assert select is None or select in choices.values()
assert cancel is None or cancel in choices.values()
assert msg_position in ['left', 'above']
fut = asyncio.get_event_loop().create_future() # Create a returned future
oldroot = self.mainloop.widget
def select_or_cancel(text):
"""Restore the main screen and invoce the callback (delayed return)
with the given text."""
self.mainloop.widget = oldroot
self._passall = False
fut.set_result(text)
# set up widgets
msgpart = urwid.Text(message)
choicespart = ChoiceWidget(choices,
choices_to_return=choices_to_return,
callback=select_or_cancel, select=select,
cancel=cancel)
# build widget
if msg_position == 'left':
both = urwid.Columns(
[
('fixed', len(message), msgpart),
('weight', 1, choicespart),
], dividechars=1)
else: # above
both = urwid.Pile([msgpart, choicespart])
att = settings.get_theming_attribute('global', 'prompt')
both = urwid.AttrMap(both, att, att)
# put promptwidget as overlay on main widget
overlay = urwid.Overlay(both, oldroot,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 1),
None)
self.mainloop.widget = overlay
self._passall = True
return fut | ['def', 'choice', '(', 'self', ',', 'message', ',', 'choices', '=', 'None', ',', 'select', '=', 'None', ',', 'cancel', '=', 'None', ',', 'msg_position', '=', "'above'", ',', 'choices_to_return', '=', 'None', ')', ':', 'choices', '=', 'choices', 'or', '{', "'y'", ':', "'yes'", ',', "'n'", ':', "'no'", '}', 'assert', 'select', 'is', 'None', 'or', 'select', 'in', 'choices', '.', 'values', '(', ')', 'assert', 'cancel', 'is', 'None', 'or', 'cancel', 'in', 'choices', '.', 'values', '(', ')', 'assert', 'msg_position', 'in', '[', "'left'", ',', "'above'", ']', 'fut', '=', 'asyncio', '.', 'get_event_loop', '(', ')', '.', 'create_future', '(', ')', '# Create a returned future', 'oldroot', '=', 'self', '.', 'mainloop', '.', 'widget', 'def', 'select_or_cancel', '(', 'text', ')', ':', '"""Restore the main screen and invoce the callback (delayed return)\n with the given text."""', 'self', '.', 'mainloop', '.', 'widget', '=', 'oldroot', 'self', '.', '_passall', '=', 'False', 'fut', '.', 'set_result', '(', 'text', ')', '# set up widgets', 'msgpart', '=', 'urwid', '.', 'Text', '(', 'message', ')', 'choicespart', '=', 'ChoiceWidget', '(', 'choices', ',', 'choices_to_return', '=', 'choices_to_return', ',', 'callback', '=', 'select_or_cancel', ',', 'select', '=', 'select', ',', 'cancel', '=', 'cancel', ')', '# build widget', 'if', 'msg_position', '==', "'left'", ':', 'both', '=', 'urwid', '.', 'Columns', '(', '[', '(', "'fixed'", ',', 'len', '(', 'message', ')', ',', 'msgpart', ')', ',', '(', "'weight'", ',', '1', ',', 'choicespart', ')', ',', ']', ',', 'dividechars', '=', '1', ')', 'else', ':', '# above', 'both', '=', 'urwid', '.', 'Pile', '(', '[', 'msgpart', ',', 'choicespart', ']', ')', 'att', '=', 'settings', '.', 'get_theming_attribute', '(', "'global'", ',', "'prompt'", ')', 'both', '=', 'urwid', '.', 'AttrMap', '(', 'both', ',', 'att', ',', 'att', ')', '# put promptwidget as overlay on main widget', 'overlay', '=', 'urwid', '.', 'Overlay', '(', 'both', ',', 'oldroot', ',', '(', "'fixed left'", ',', '0', ')', ',', '(', "'fixed right'", ',', '0', ')', ',', '(', "'fixed bottom'", ',', '1', ')', ',', 'None', ')', 'self', '.', 'mainloop', '.', 'widget', '=', 'overlay', 'self', '.', '_passall', '=', 'True', 'return', 'fut'] | prompt user to make a choice.
:param message: string to display before list of choices
:type message: unicode
:param choices: dict of possible choices
:type choices: dict: keymap->choice (both str)
:param choices_to_return: dict of possible choices to return for the
choices of the choices of paramter
:type choices: dict: keymap->choice key is str and value is any obj)
:param select: choice to return if enter/return is hit. Ignored if set
to `None`.
:type select: str
:param cancel: choice to return if escape is hit. Ignored if set to
`None`.
:type cancel: str
:param msg_position: determines if `message` is above or left of the
prompt. Must be `above` or `left`.
:type msg_position: str
:rtype: asyncio.Future | ['prompt', 'user', 'to', 'make', 'a', 'choice', '.'] | train | https://github.com/pazz/alot/blob/d0297605c0ec1c6b65f541d0fd5b69ac5a0f4ded/alot/ui.py#L514-L579 |
1,828 | widdowquinn/pyani | pyani/pyani_graphics.py | clean_axis | def clean_axis(axis):
"""Remove ticks, tick labels, and frame from axis"""
axis.get_xaxis().set_ticks([])
axis.get_yaxis().set_ticks([])
for spine in list(axis.spines.values()):
spine.set_visible(False) | python | def clean_axis(axis):
"""Remove ticks, tick labels, and frame from axis"""
axis.get_xaxis().set_ticks([])
axis.get_yaxis().set_ticks([])
for spine in list(axis.spines.values()):
spine.set_visible(False) | ['def', 'clean_axis', '(', 'axis', ')', ':', 'axis', '.', 'get_xaxis', '(', ')', '.', 'set_ticks', '(', '[', ']', ')', 'axis', '.', 'get_yaxis', '(', ')', '.', 'set_ticks', '(', '[', ']', ')', 'for', 'spine', 'in', 'list', '(', 'axis', '.', 'spines', '.', 'values', '(', ')', ')', ':', 'spine', '.', 'set_visible', '(', 'False', ')'] | Remove ticks, tick labels, and frame from axis | ['Remove', 'ticks', 'tick', 'labels', 'and', 'frame', 'from', 'axis'] | train | https://github.com/widdowquinn/pyani/blob/2b24ec971401e04024bba896e4011984fe3f53f0/pyani/pyani_graphics.py#L63-L68 |
1,829 | sernst/cauldron | cauldron/session/projects/steps.py | ProjectStep.get_dom | def get_dom(self) -> str:
""" Retrieves the current value of the DOM for the step """
if self.is_running:
return self.dumps()
if self.dom is not None:
return self.dom
dom = self.dumps()
self.dom = dom
return dom | python | def get_dom(self) -> str:
""" Retrieves the current value of the DOM for the step """
if self.is_running:
return self.dumps()
if self.dom is not None:
return self.dom
dom = self.dumps()
self.dom = dom
return dom | ['def', 'get_dom', '(', 'self', ')', '->', 'str', ':', 'if', 'self', '.', 'is_running', ':', 'return', 'self', '.', 'dumps', '(', ')', 'if', 'self', '.', 'dom', 'is', 'not', 'None', ':', 'return', 'self', '.', 'dom', 'dom', '=', 'self', '.', 'dumps', '(', ')', 'self', '.', 'dom', '=', 'dom', 'return', 'dom'] | Retrieves the current value of the DOM for the step | ['Retrieves', 'the', 'current', 'value', 'of', 'the', 'DOM', 'for', 'the', 'step'] | train | https://github.com/sernst/cauldron/blob/4086aec9c038c402ea212c79fe8bd0d27104f9cf/cauldron/session/projects/steps.py#L179-L190 |
1,830 | juju/charm-helpers | charmhelpers/cli/__init__.py | OutputFormatter.tab | def tab(self, output):
"""Output data in excel-compatible tab-delimited format"""
import csv
csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab)
csvwriter.writerows(output) | python | def tab(self, output):
"""Output data in excel-compatible tab-delimited format"""
import csv
csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab)
csvwriter.writerows(output) | ['def', 'tab', '(', 'self', ',', 'output', ')', ':', 'import', 'csv', 'csvwriter', '=', 'csv', '.', 'writer', '(', 'self', '.', 'outfile', ',', 'dialect', '=', 'csv', '.', 'excel_tab', ')', 'csvwriter', '.', 'writerows', '(', 'output', ')'] | Output data in excel-compatible tab-delimited format | ['Output', 'data', 'in', 'excel', '-', 'compatible', 'tab', '-', 'delimited', 'format'] | train | https://github.com/juju/charm-helpers/blob/aa785c40c3b7a8c69dbfbc7921d6b9f30142e171/charmhelpers/cli/__init__.py#L81-L85 |
1,831 | PMBio/limix-backup | limix/deprecated/archive/qtl_old.py | interact_GxG | def interact_GxG(pheno,snps1,snps2=None,K=None,covs=None):
"""
Epistasis test between two sets of SNPs
Args:
pheno: [N x 1] SP.array of 1 phenotype for N individuals
snps1: [N x S1] SP.array of S1 SNPs for N individuals
snps2: [N x S2] SP.array of S2 SNPs for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
Returns:
pv: [S2 x S1] SP.array of P values for epistasis tests beten all SNPs in
snps1 and snps2
"""
if K is None:
K=SP.eye(N)
N=snps1.shape[0]
if snps2 is None:
snps2 = snps1
return interact_GxE(snps=snps1,pheno=pheno,env=snps2,covs=covs,K=K) | python | def interact_GxG(pheno,snps1,snps2=None,K=None,covs=None):
"""
Epistasis test between two sets of SNPs
Args:
pheno: [N x 1] SP.array of 1 phenotype for N individuals
snps1: [N x S1] SP.array of S1 SNPs for N individuals
snps2: [N x S2] SP.array of S2 SNPs for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
Returns:
pv: [S2 x S1] SP.array of P values for epistasis tests beten all SNPs in
snps1 and snps2
"""
if K is None:
K=SP.eye(N)
N=snps1.shape[0]
if snps2 is None:
snps2 = snps1
return interact_GxE(snps=snps1,pheno=pheno,env=snps2,covs=covs,K=K) | ['def', 'interact_GxG', '(', 'pheno', ',', 'snps1', ',', 'snps2', '=', 'None', ',', 'K', '=', 'None', ',', 'covs', '=', 'None', ')', ':', 'if', 'K', 'is', 'None', ':', 'K', '=', 'SP', '.', 'eye', '(', 'N', ')', 'N', '=', 'snps1', '.', 'shape', '[', '0', ']', 'if', 'snps2', 'is', 'None', ':', 'snps2', '=', 'snps1', 'return', 'interact_GxE', '(', 'snps', '=', 'snps1', ',', 'pheno', '=', 'pheno', ',', 'env', '=', 'snps2', ',', 'covs', '=', 'covs', ',', 'K', '=', 'K', ')'] | Epistasis test between two sets of SNPs
Args:
pheno: [N x 1] SP.array of 1 phenotype for N individuals
snps1: [N x S1] SP.array of S1 SNPs for N individuals
snps2: [N x S2] SP.array of S2 SNPs for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
Returns:
pv: [S2 x S1] SP.array of P values for epistasis tests beten all SNPs in
snps1 and snps2 | ['Epistasis', 'test', 'between', 'two', 'sets', 'of', 'SNPs'] | train | https://github.com/PMBio/limix-backup/blob/1e201fdb5c694d0d5506f207f3de65d8ef66146c/limix/deprecated/archive/qtl_old.py#L488-L509 |
1,832 | noirbizarre/django-eztables | eztables/views.py | DatatablesView.get_page | def get_page(self, form):
'''Get the requested page'''
page_size = form.cleaned_data['iDisplayLength']
start_index = form.cleaned_data['iDisplayStart']
paginator = Paginator(self.object_list, page_size)
num_page = (start_index / page_size) + 1
return paginator.page(num_page) | python | def get_page(self, form):
'''Get the requested page'''
page_size = form.cleaned_data['iDisplayLength']
start_index = form.cleaned_data['iDisplayStart']
paginator = Paginator(self.object_list, page_size)
num_page = (start_index / page_size) + 1
return paginator.page(num_page) | ['def', 'get_page', '(', 'self', ',', 'form', ')', ':', 'page_size', '=', 'form', '.', 'cleaned_data', '[', "'iDisplayLength'", ']', 'start_index', '=', 'form', '.', 'cleaned_data', '[', "'iDisplayStart'", ']', 'paginator', '=', 'Paginator', '(', 'self', '.', 'object_list', ',', 'page_size', ')', 'num_page', '=', '(', 'start_index', '/', 'page_size', ')', '+', '1', 'return', 'paginator', '.', 'page', '(', 'num_page', ')'] | Get the requested page | ['Get', 'the', 'requested', 'page'] | train | https://github.com/noirbizarre/django-eztables/blob/347e74dcc08121d20f4cf942181d873dbe33b995/eztables/views.py#L179-L185 |
1,833 | FactoryBoy/factory_boy | factory/base.py | BaseFactory.attributes | def attributes(cls, create=False, extra=None):
"""Build a dict of attribute values, respecting declaration order.
The process is:
- Handle 'orderless' attributes, overriding defaults with provided
kwargs when applicable
- Handle ordered attributes, overriding them with provided kwargs when
applicable; the current list of computed attributes is available
to the currently processed object.
"""
warnings.warn(
"Usage of Factory.attributes() is deprecated.",
DeprecationWarning,
stacklevel=2,
)
declarations = cls._meta.pre_declarations.as_dict()
declarations.update(extra or {})
from . import helpers
return helpers.make_factory(dict, **declarations) | python | def attributes(cls, create=False, extra=None):
"""Build a dict of attribute values, respecting declaration order.
The process is:
- Handle 'orderless' attributes, overriding defaults with provided
kwargs when applicable
- Handle ordered attributes, overriding them with provided kwargs when
applicable; the current list of computed attributes is available
to the currently processed object.
"""
warnings.warn(
"Usage of Factory.attributes() is deprecated.",
DeprecationWarning,
stacklevel=2,
)
declarations = cls._meta.pre_declarations.as_dict()
declarations.update(extra or {})
from . import helpers
return helpers.make_factory(dict, **declarations) | ['def', 'attributes', '(', 'cls', ',', 'create', '=', 'False', ',', 'extra', '=', 'None', ')', ':', 'warnings', '.', 'warn', '(', '"Usage of Factory.attributes() is deprecated."', ',', 'DeprecationWarning', ',', 'stacklevel', '=', '2', ',', ')', 'declarations', '=', 'cls', '.', '_meta', '.', 'pre_declarations', '.', 'as_dict', '(', ')', 'declarations', '.', 'update', '(', 'extra', 'or', '{', '}', ')', 'from', '.', 'import', 'helpers', 'return', 'helpers', '.', 'make_factory', '(', 'dict', ',', '*', '*', 'declarations', ')'] | Build a dict of attribute values, respecting declaration order.
The process is:
- Handle 'orderless' attributes, overriding defaults with provided
kwargs when applicable
- Handle ordered attributes, overriding them with provided kwargs when
applicable; the current list of computed attributes is available
to the currently processed object. | ['Build', 'a', 'dict', 'of', 'attribute', 'values', 'respecting', 'declaration', 'order', '.'] | train | https://github.com/FactoryBoy/factory_boy/blob/edaa7c7f5a14065b229927903bd7989cc93cd069/factory/base.py#L444-L462 |
1,834 | AtteqCom/zsl | src/zsl/resource/json_server_resource.py | JsonServerResource.read | def read(self, params, args, data):
"""Modifies the parameters and adds metadata for read results."""
result_count = None
result_links = None
if params is None:
params = []
if args:
args = args.copy()
else:
args = {}
ctx = self._create_context(params, args, data)
row_id = ctx.get_row_id()
if not row_id:
self._transform_list_args(args)
if 'page' in args or 'limit' in args:
ctx = self._create_context(params, args, data)
result_count = self._get_collection_count(ctx)
if 'page' in args:
result_links = _get_link_pages(
page=args['page'],
per_page=int(args['limit']),
count=result_count,
page_url=request.url
)
if 'limit' not in args:
args['limit'] = 'unlimited'
self._create_related(args)
try:
return ResourceResult(
body=super(JsonServerResource, self).read(params, args, data),
count=result_count,
links=result_links
)
except NoResultFound:
return NOT_FOUND | python | def read(self, params, args, data):
"""Modifies the parameters and adds metadata for read results."""
result_count = None
result_links = None
if params is None:
params = []
if args:
args = args.copy()
else:
args = {}
ctx = self._create_context(params, args, data)
row_id = ctx.get_row_id()
if not row_id:
self._transform_list_args(args)
if 'page' in args or 'limit' in args:
ctx = self._create_context(params, args, data)
result_count = self._get_collection_count(ctx)
if 'page' in args:
result_links = _get_link_pages(
page=args['page'],
per_page=int(args['limit']),
count=result_count,
page_url=request.url
)
if 'limit' not in args:
args['limit'] = 'unlimited'
self._create_related(args)
try:
return ResourceResult(
body=super(JsonServerResource, self).read(params, args, data),
count=result_count,
links=result_links
)
except NoResultFound:
return NOT_FOUND | ['def', 'read', '(', 'self', ',', 'params', ',', 'args', ',', 'data', ')', ':', 'result_count', '=', 'None', 'result_links', '=', 'None', 'if', 'params', 'is', 'None', ':', 'params', '=', '[', ']', 'if', 'args', ':', 'args', '=', 'args', '.', 'copy', '(', ')', 'else', ':', 'args', '=', '{', '}', 'ctx', '=', 'self', '.', '_create_context', '(', 'params', ',', 'args', ',', 'data', ')', 'row_id', '=', 'ctx', '.', 'get_row_id', '(', ')', 'if', 'not', 'row_id', ':', 'self', '.', '_transform_list_args', '(', 'args', ')', 'if', "'page'", 'in', 'args', 'or', "'limit'", 'in', 'args', ':', 'ctx', '=', 'self', '.', '_create_context', '(', 'params', ',', 'args', ',', 'data', ')', 'result_count', '=', 'self', '.', '_get_collection_count', '(', 'ctx', ')', 'if', "'page'", 'in', 'args', ':', 'result_links', '=', '_get_link_pages', '(', 'page', '=', 'args', '[', "'page'", ']', ',', 'per_page', '=', 'int', '(', 'args', '[', "'limit'", ']', ')', ',', 'count', '=', 'result_count', ',', 'page_url', '=', 'request', '.', 'url', ')', 'if', "'limit'", 'not', 'in', 'args', ':', 'args', '[', "'limit'", ']', '=', "'unlimited'", 'self', '.', '_create_related', '(', 'args', ')', 'try', ':', 'return', 'ResourceResult', '(', 'body', '=', 'super', '(', 'JsonServerResource', ',', 'self', ')', '.', 'read', '(', 'params', ',', 'args', ',', 'data', ')', ',', 'count', '=', 'result_count', ',', 'links', '=', 'result_links', ')', 'except', 'NoResultFound', ':', 'return', 'NOT_FOUND'] | Modifies the parameters and adds metadata for read results. | ['Modifies', 'the', 'parameters', 'and', 'adds', 'metadata', 'for', 'read', 'results', '.'] | train | https://github.com/AtteqCom/zsl/blob/ab51a96da1780ff642912396d4b85bdcb72560c1/src/zsl/resource/json_server_resource.py#L188-L231 |
1,835 | JNRowe/upoints | upoints/osm.py | _get_flags | def _get_flags(osm_obj):
"""Create element independent flags output.
Args:
osm_obj (Node): Object with OSM-style metadata
Returns:
list: Human readable flags output
"""
flags = []
if osm_obj.visible:
flags.append('visible')
if osm_obj.user:
flags.append('user: %s' % osm_obj.user)
if osm_obj.timestamp:
flags.append('timestamp: %s' % osm_obj.timestamp.isoformat())
if osm_obj.tags:
flags.append(', '.join('%s: %s' % (k, v)
for k, v in sorted(osm_obj.tags.items())))
return flags | python | def _get_flags(osm_obj):
"""Create element independent flags output.
Args:
osm_obj (Node): Object with OSM-style metadata
Returns:
list: Human readable flags output
"""
flags = []
if osm_obj.visible:
flags.append('visible')
if osm_obj.user:
flags.append('user: %s' % osm_obj.user)
if osm_obj.timestamp:
flags.append('timestamp: %s' % osm_obj.timestamp.isoformat())
if osm_obj.tags:
flags.append(', '.join('%s: %s' % (k, v)
for k, v in sorted(osm_obj.tags.items())))
return flags | ['def', '_get_flags', '(', 'osm_obj', ')', ':', 'flags', '=', '[', ']', 'if', 'osm_obj', '.', 'visible', ':', 'flags', '.', 'append', '(', "'visible'", ')', 'if', 'osm_obj', '.', 'user', ':', 'flags', '.', 'append', '(', "'user: %s'", '%', 'osm_obj', '.', 'user', ')', 'if', 'osm_obj', '.', 'timestamp', ':', 'flags', '.', 'append', '(', "'timestamp: %s'", '%', 'osm_obj', '.', 'timestamp', '.', 'isoformat', '(', ')', ')', 'if', 'osm_obj', '.', 'tags', ':', 'flags', '.', 'append', '(', "', '", '.', 'join', '(', "'%s: %s'", '%', '(', 'k', ',', 'v', ')', 'for', 'k', ',', 'v', 'in', 'sorted', '(', 'osm_obj', '.', 'tags', '.', 'items', '(', ')', ')', ')', ')', 'return', 'flags'] | Create element independent flags output.
Args:
osm_obj (Node): Object with OSM-style metadata
Returns:
list: Human readable flags output | ['Create', 'element', 'independent', 'flags', 'output', '.'] | train | https://github.com/JNRowe/upoints/blob/1e4b7a53ed2a06cd854523d54c36aabdccea3830/upoints/osm.py#L62-L81 |
1,836 | cytoscape/py2cytoscape | py2cytoscape/cyrest/networks.py | networks.deleteNode | def deleteNode(self, networkId, nodeId, verbose=None):
"""
Deletes the node specified by the `nodeId` and `networkId` parameters.
:param networkId: SUID of the network containing the node.
:param nodeId: SUID of the node
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'', method="DELETE", verbose=verbose)
return response | python | def deleteNode(self, networkId, nodeId, verbose=None):
"""
Deletes the node specified by the `nodeId` and `networkId` parameters.
:param networkId: SUID of the network containing the node.
:param nodeId: SUID of the node
:param verbose: print more
:returns: default: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/nodes/'+str(nodeId)+'', method="DELETE", verbose=verbose)
return response | ['def', 'deleteNode', '(', 'self', ',', 'networkId', ',', 'nodeId', ',', 'verbose', '=', 'None', ')', ':', 'response', '=', 'api', '(', 'url', '=', 'self', '.', '___url', '+', "'networks/'", '+', 'str', '(', 'networkId', ')', '+', "'/nodes/'", '+', 'str', '(', 'nodeId', ')', '+', "''", ',', 'method', '=', '"DELETE"', ',', 'verbose', '=', 'verbose', ')', 'return', 'response'] | Deletes the node specified by the `nodeId` and `networkId` parameters.
:param networkId: SUID of the network containing the node.
:param nodeId: SUID of the node
:param verbose: print more
:returns: default: successful operation | ['Deletes', 'the', 'node', 'specified', 'by', 'the', 'nodeId', 'and', 'networkId', 'parameters', '.'] | train | https://github.com/cytoscape/py2cytoscape/blob/dd34de8d028f512314d0057168df7fef7c5d5195/py2cytoscape/cyrest/networks.py#L744-L756 |
1,837 | cqparts/cqparts | src/cqparts_fasteners/solidtypes/fastener_heads/base.py | FastenerHead.make_cutter | def make_cutter(self):
"""
Create solid to subtract from material to make way for the fastener's
head (just the head)
"""
return cadquery.Workplane('XY') \
.circle(self.access_diameter / 2) \
.extrude(self.access_height) | python | def make_cutter(self):
"""
Create solid to subtract from material to make way for the fastener's
head (just the head)
"""
return cadquery.Workplane('XY') \
.circle(self.access_diameter / 2) \
.extrude(self.access_height) | ['def', 'make_cutter', '(', 'self', ')', ':', 'return', 'cadquery', '.', 'Workplane', '(', "'XY'", ')', '.', 'circle', '(', 'self', '.', 'access_diameter', '/', '2', ')', '.', 'extrude', '(', 'self', '.', 'access_height', ')'] | Create solid to subtract from material to make way for the fastener's
head (just the head) | ['Create', 'solid', 'to', 'subtract', 'from', 'material', 'to', 'make', 'way', 'for', 'the', 'fastener', 's', 'head', '(', 'just', 'the', 'head', ')'] | train | https://github.com/cqparts/cqparts/blob/018e87e14c2c4d1d40b4bfe6a7e22bcf9baf0a53/src/cqparts_fasteners/solidtypes/fastener_heads/base.py#L26-L33 |
1,838 | azraq27/neural | neural/freesurfer.py | mgz_to_nifti | def mgz_to_nifti(filename,prefix=None,gzip=True):
'''Convert ``filename`` to a NIFTI file using ``mri_convert``'''
setup_freesurfer()
if prefix==None:
prefix = nl.prefix(filename) + '.nii'
if gzip and not prefix.endswith('.gz'):
prefix += '.gz'
nl.run([os.path.join(freesurfer_home,'bin','mri_convert'),filename,prefix],products=prefix) | python | def mgz_to_nifti(filename,prefix=None,gzip=True):
'''Convert ``filename`` to a NIFTI file using ``mri_convert``'''
setup_freesurfer()
if prefix==None:
prefix = nl.prefix(filename) + '.nii'
if gzip and not prefix.endswith('.gz'):
prefix += '.gz'
nl.run([os.path.join(freesurfer_home,'bin','mri_convert'),filename,prefix],products=prefix) | ['def', 'mgz_to_nifti', '(', 'filename', ',', 'prefix', '=', 'None', ',', 'gzip', '=', 'True', ')', ':', 'setup_freesurfer', '(', ')', 'if', 'prefix', '==', 'None', ':', 'prefix', '=', 'nl', '.', 'prefix', '(', 'filename', ')', '+', "'.nii'", 'if', 'gzip', 'and', 'not', 'prefix', '.', 'endswith', '(', "'.gz'", ')', ':', 'prefix', '+=', "'.gz'", 'nl', '.', 'run', '(', '[', 'os', '.', 'path', '.', 'join', '(', 'freesurfer_home', ',', "'bin'", ',', "'mri_convert'", ')', ',', 'filename', ',', 'prefix', ']', ',', 'products', '=', 'prefix', ')'] | Convert ``filename`` to a NIFTI file using ``mri_convert`` | ['Convert', 'filename', 'to', 'a', 'NIFTI', 'file', 'using', 'mri_convert'] | train | https://github.com/azraq27/neural/blob/fe91bfeecbf73ad99708cf5dca66cb61fcd529f5/neural/freesurfer.py#L34-L41 |
1,839 | SatelliteQE/nailgun | nailgun/entity_mixins.py | EntityReadMixin.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Get information about the current entity.
1. Create a new entity of type ``type(self)``.
2. Call :meth:`read_json` and capture the response.
3. Populate the entity with the response.
4. Return the entity.
Step one is skipped if the ``entity`` argument is specified. Step two
is skipped if the ``attrs`` argument is specified. Step three is
modified by the ``ignore`` argument.
All of an entity's one-to-one and one-to-many relationships are
populated with objects of the correct type. For example, if
``SomeEntity.other_entity`` is a one-to-one relationship, this should
return ``True``::
isinstance(
SomeEntity(id=N).read().other_entity,
nailgun.entity_mixins.Entity
)
Additionally, both of these commands should succeed::
SomeEntity(id=N).read().other_entity.id
SomeEntity(id=N).read().other_entity.read().other_attr
In the example above, ``other_entity.id`` is the **only** attribute
with a meaningful value. Calling ``other_entity.read`` populates the
remaining entity attributes.
:param nailgun.entity_mixins.Entity entity: The object to be populated
and returned. An object of type ``type(self)`` by default.
:param attrs: A dict. Data used to populate the object's attributes.
The response from
:meth:`nailgun.entity_mixins.EntityReadMixin.read_json` by default.
:param ignore: A set of attributes which should not be read from the
server. This is mainly useful for attributes like a password which
are not returned.
:return: An instance of type ``type(self)``.
:rtype: nailgun.entity_mixins.Entity
"""
if entity is None:
entity = type(self)(self._server_config)
if attrs is None:
attrs = self.read_json(params=params)
if ignore is None:
ignore = set()
for field_name, field in entity.get_fields().items():
if field_name in ignore:
continue
if isinstance(field, OneToOneField):
entity_id = _get_entity_id(field_name, attrs)
if entity_id is None:
referenced_entity = None
else:
referenced_entity = field.entity(
self._server_config,
id=entity_id,
)
setattr(entity, field_name, referenced_entity)
elif isinstance(field, OneToManyField):
referenced_entities = [
field.entity(self._server_config, id=entity_id)
for entity_id
in _get_entity_ids(field_name, attrs)
]
setattr(entity, field_name, referenced_entities)
else:
setattr(entity, field_name, attrs[field_name])
return entity | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Get information about the current entity.
1. Create a new entity of type ``type(self)``.
2. Call :meth:`read_json` and capture the response.
3. Populate the entity with the response.
4. Return the entity.
Step one is skipped if the ``entity`` argument is specified. Step two
is skipped if the ``attrs`` argument is specified. Step three is
modified by the ``ignore`` argument.
All of an entity's one-to-one and one-to-many relationships are
populated with objects of the correct type. For example, if
``SomeEntity.other_entity`` is a one-to-one relationship, this should
return ``True``::
isinstance(
SomeEntity(id=N).read().other_entity,
nailgun.entity_mixins.Entity
)
Additionally, both of these commands should succeed::
SomeEntity(id=N).read().other_entity.id
SomeEntity(id=N).read().other_entity.read().other_attr
In the example above, ``other_entity.id`` is the **only** attribute
with a meaningful value. Calling ``other_entity.read`` populates the
remaining entity attributes.
:param nailgun.entity_mixins.Entity entity: The object to be populated
and returned. An object of type ``type(self)`` by default.
:param attrs: A dict. Data used to populate the object's attributes.
The response from
:meth:`nailgun.entity_mixins.EntityReadMixin.read_json` by default.
:param ignore: A set of attributes which should not be read from the
server. This is mainly useful for attributes like a password which
are not returned.
:return: An instance of type ``type(self)``.
:rtype: nailgun.entity_mixins.Entity
"""
if entity is None:
entity = type(self)(self._server_config)
if attrs is None:
attrs = self.read_json(params=params)
if ignore is None:
ignore = set()
for field_name, field in entity.get_fields().items():
if field_name in ignore:
continue
if isinstance(field, OneToOneField):
entity_id = _get_entity_id(field_name, attrs)
if entity_id is None:
referenced_entity = None
else:
referenced_entity = field.entity(
self._server_config,
id=entity_id,
)
setattr(entity, field_name, referenced_entity)
elif isinstance(field, OneToManyField):
referenced_entities = [
field.entity(self._server_config, id=entity_id)
for entity_id
in _get_entity_ids(field_name, attrs)
]
setattr(entity, field_name, referenced_entities)
else:
setattr(entity, field_name, attrs[field_name])
return entity | ['def', 'read', '(', 'self', ',', 'entity', '=', 'None', ',', 'attrs', '=', 'None', ',', 'ignore', '=', 'None', ',', 'params', '=', 'None', ')', ':', 'if', 'entity', 'is', 'None', ':', 'entity', '=', 'type', '(', 'self', ')', '(', 'self', '.', '_server_config', ')', 'if', 'attrs', 'is', 'None', ':', 'attrs', '=', 'self', '.', 'read_json', '(', 'params', '=', 'params', ')', 'if', 'ignore', 'is', 'None', ':', 'ignore', '=', 'set', '(', ')', 'for', 'field_name', ',', 'field', 'in', 'entity', '.', 'get_fields', '(', ')', '.', 'items', '(', ')', ':', 'if', 'field_name', 'in', 'ignore', ':', 'continue', 'if', 'isinstance', '(', 'field', ',', 'OneToOneField', ')', ':', 'entity_id', '=', '_get_entity_id', '(', 'field_name', ',', 'attrs', ')', 'if', 'entity_id', 'is', 'None', ':', 'referenced_entity', '=', 'None', 'else', ':', 'referenced_entity', '=', 'field', '.', 'entity', '(', 'self', '.', '_server_config', ',', 'id', '=', 'entity_id', ',', ')', 'setattr', '(', 'entity', ',', 'field_name', ',', 'referenced_entity', ')', 'elif', 'isinstance', '(', 'field', ',', 'OneToManyField', ')', ':', 'referenced_entities', '=', '[', 'field', '.', 'entity', '(', 'self', '.', '_server_config', ',', 'id', '=', 'entity_id', ')', 'for', 'entity_id', 'in', '_get_entity_ids', '(', 'field_name', ',', 'attrs', ')', ']', 'setattr', '(', 'entity', ',', 'field_name', ',', 'referenced_entities', ')', 'else', ':', 'setattr', '(', 'entity', ',', 'field_name', ',', 'attrs', '[', 'field_name', ']', ')', 'return', 'entity'] | Get information about the current entity.
1. Create a new entity of type ``type(self)``.
2. Call :meth:`read_json` and capture the response.
3. Populate the entity with the response.
4. Return the entity.
Step one is skipped if the ``entity`` argument is specified. Step two
is skipped if the ``attrs`` argument is specified. Step three is
modified by the ``ignore`` argument.
All of an entity's one-to-one and one-to-many relationships are
populated with objects of the correct type. For example, if
``SomeEntity.other_entity`` is a one-to-one relationship, this should
return ``True``::
isinstance(
SomeEntity(id=N).read().other_entity,
nailgun.entity_mixins.Entity
)
Additionally, both of these commands should succeed::
SomeEntity(id=N).read().other_entity.id
SomeEntity(id=N).read().other_entity.read().other_attr
In the example above, ``other_entity.id`` is the **only** attribute
with a meaningful value. Calling ``other_entity.read`` populates the
remaining entity attributes.
:param nailgun.entity_mixins.Entity entity: The object to be populated
and returned. An object of type ``type(self)`` by default.
:param attrs: A dict. Data used to populate the object's attributes.
The response from
:meth:`nailgun.entity_mixins.EntityReadMixin.read_json` by default.
:param ignore: A set of attributes which should not be read from the
server. This is mainly useful for attributes like a password which
are not returned.
:return: An instance of type ``type(self)``.
:rtype: nailgun.entity_mixins.Entity | ['Get', 'information', 'about', 'the', 'current', 'entity', '.'] | train | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L751-L823 |
1,840 | iskandr/fancyimpute | fancyimpute/solver.py | Solver.transform | def transform(self, X, y=None):
"""
Transform input `X`.
Note: all imputations should have a `fit_transform` method,
but only some (like IterativeImputer) also support inductive mode
using `fit` or `fit_transform` on `X_train` and then `transform`
on new `X_test`.
"""
raise ValueError(
"%s.transform not implemented! This imputation algorithm likely "
"doesn't support inductive mode. Only %s.fit_transform is "
"supported at this time." % (
self.__class__.__name__, self.__class__.__name__)) | python | def transform(self, X, y=None):
"""
Transform input `X`.
Note: all imputations should have a `fit_transform` method,
but only some (like IterativeImputer) also support inductive mode
using `fit` or `fit_transform` on `X_train` and then `transform`
on new `X_test`.
"""
raise ValueError(
"%s.transform not implemented! This imputation algorithm likely "
"doesn't support inductive mode. Only %s.fit_transform is "
"supported at this time." % (
self.__class__.__name__, self.__class__.__name__)) | ['def', 'transform', '(', 'self', ',', 'X', ',', 'y', '=', 'None', ')', ':', 'raise', 'ValueError', '(', '"%s.transform not implemented! This imputation algorithm likely "', '"doesn\'t support inductive mode. Only %s.fit_transform is "', '"supported at this time."', '%', '(', 'self', '.', '__class__', '.', '__name__', ',', 'self', '.', '__class__', '.', '__name__', ')', ')'] | Transform input `X`.
Note: all imputations should have a `fit_transform` method,
but only some (like IterativeImputer) also support inductive mode
using `fit` or `fit_transform` on `X_train` and then `transform`
on new `X_test`. | ['Transform', 'input', 'X', '.'] | train | https://github.com/iskandr/fancyimpute/blob/9f0837d387c7303d5c8c925a9989ca77a1a96e3e/fancyimpute/solver.py#L215-L228 |
1,841 | IBMStreams/pypi.streamsx | streamsx/topology/topology.py | Stream.publish | def publish(self, topic, schema=None, name=None):
"""
Publish this stream on a topic for other Streams applications to subscribe to.
A Streams application may publish a stream to allow other
Streams applications to subscribe to it. A subscriber
matches a publisher if the topic and schema match.
By default a stream is published using its schema.
A stream of :py:const:`Python objects <streamsx.topology.schema.CommonSchema.Python>` can be subscribed to by other Streams Python applications.
If a stream is published setting `schema` to
:py:const:`~streamsx.topology.schema.CommonSchema.Json`
then it is published as a stream of JSON objects.
Other Streams applications may subscribe to it regardless
of their implementation language.
If a stream is published setting `schema` to
:py:const:`~streamsx.topology.schema.CommonSchema.String`
then it is published as strings
Other Streams applications may subscribe to it regardless
of their implementation language.
Supported values of `schema` are only
:py:const:`~streamsx.topology.schema.CommonSchema.Json`
and
:py:const:`~streamsx.topology.schema.CommonSchema.String`.
Args:
topic(str): Topic to publish this stream to.
schema: Schema to publish. Defaults to the schema of this stream.
name(str): Name of the publish operator, defaults to a generated name.
Returns:
streamsx.topology.topology.Sink: Stream termination.
.. versionadded:: 1.6.1 `name` parameter.
.. versionchanged:: 1.7
Now returns a :py:class:`Sink` instance.
"""
sl = _SourceLocation(_source_info(), 'publish')
schema = streamsx.topology.schema._normalize(schema)
if schema is not None and self.oport.schema.schema() != schema.schema():
nc = None
if schema == streamsx.topology.schema.CommonSchema.Json:
schema_change = self.as_json()
elif schema == streamsx.topology.schema.CommonSchema.String:
schema_change = self.as_string()
else:
raise ValueError(schema)
if self._placeable:
self._colocate(schema_change, 'publish')
sp = schema_change.publish(topic, schema=schema, name=name)
sp._op().sl = sl
return sp
_name = self.topology.graph._requested_name(name, action="publish")
# publish is never stateful
op = self.topology.graph.addOperator("com.ibm.streamsx.topology.topic::Publish", params={'topic': topic}, sl=sl, name=_name, stateful=False)
op.addInputPort(outputPort=self.oport)
op._layout_group('Publish', name if name else _name)
sink = Sink(op)
if self._placeable:
self._colocate(sink, 'publish')
return sink | python | def publish(self, topic, schema=None, name=None):
"""
Publish this stream on a topic for other Streams applications to subscribe to.
A Streams application may publish a stream to allow other
Streams applications to subscribe to it. A subscriber
matches a publisher if the topic and schema match.
By default a stream is published using its schema.
A stream of :py:const:`Python objects <streamsx.topology.schema.CommonSchema.Python>` can be subscribed to by other Streams Python applications.
If a stream is published setting `schema` to
:py:const:`~streamsx.topology.schema.CommonSchema.Json`
then it is published as a stream of JSON objects.
Other Streams applications may subscribe to it regardless
of their implementation language.
If a stream is published setting `schema` to
:py:const:`~streamsx.topology.schema.CommonSchema.String`
then it is published as strings
Other Streams applications may subscribe to it regardless
of their implementation language.
Supported values of `schema` are only
:py:const:`~streamsx.topology.schema.CommonSchema.Json`
and
:py:const:`~streamsx.topology.schema.CommonSchema.String`.
Args:
topic(str): Topic to publish this stream to.
schema: Schema to publish. Defaults to the schema of this stream.
name(str): Name of the publish operator, defaults to a generated name.
Returns:
streamsx.topology.topology.Sink: Stream termination.
.. versionadded:: 1.6.1 `name` parameter.
.. versionchanged:: 1.7
Now returns a :py:class:`Sink` instance.
"""
sl = _SourceLocation(_source_info(), 'publish')
schema = streamsx.topology.schema._normalize(schema)
if schema is not None and self.oport.schema.schema() != schema.schema():
nc = None
if schema == streamsx.topology.schema.CommonSchema.Json:
schema_change = self.as_json()
elif schema == streamsx.topology.schema.CommonSchema.String:
schema_change = self.as_string()
else:
raise ValueError(schema)
if self._placeable:
self._colocate(schema_change, 'publish')
sp = schema_change.publish(topic, schema=schema, name=name)
sp._op().sl = sl
return sp
_name = self.topology.graph._requested_name(name, action="publish")
# publish is never stateful
op = self.topology.graph.addOperator("com.ibm.streamsx.topology.topic::Publish", params={'topic': topic}, sl=sl, name=_name, stateful=False)
op.addInputPort(outputPort=self.oport)
op._layout_group('Publish', name if name else _name)
sink = Sink(op)
if self._placeable:
self._colocate(sink, 'publish')
return sink | ['def', 'publish', '(', 'self', ',', 'topic', ',', 'schema', '=', 'None', ',', 'name', '=', 'None', ')', ':', 'sl', '=', '_SourceLocation', '(', '_source_info', '(', ')', ',', "'publish'", ')', 'schema', '=', 'streamsx', '.', 'topology', '.', 'schema', '.', '_normalize', '(', 'schema', ')', 'if', 'schema', 'is', 'not', 'None', 'and', 'self', '.', 'oport', '.', 'schema', '.', 'schema', '(', ')', '!=', 'schema', '.', 'schema', '(', ')', ':', 'nc', '=', 'None', 'if', 'schema', '==', 'streamsx', '.', 'topology', '.', 'schema', '.', 'CommonSchema', '.', 'Json', ':', 'schema_change', '=', 'self', '.', 'as_json', '(', ')', 'elif', 'schema', '==', 'streamsx', '.', 'topology', '.', 'schema', '.', 'CommonSchema', '.', 'String', ':', 'schema_change', '=', 'self', '.', 'as_string', '(', ')', 'else', ':', 'raise', 'ValueError', '(', 'schema', ')', 'if', 'self', '.', '_placeable', ':', 'self', '.', '_colocate', '(', 'schema_change', ',', "'publish'", ')', 'sp', '=', 'schema_change', '.', 'publish', '(', 'topic', ',', 'schema', '=', 'schema', ',', 'name', '=', 'name', ')', 'sp', '.', '_op', '(', ')', '.', 'sl', '=', 'sl', 'return', 'sp', '_name', '=', 'self', '.', 'topology', '.', 'graph', '.', '_requested_name', '(', 'name', ',', 'action', '=', '"publish"', ')', '# publish is never stateful', 'op', '=', 'self', '.', 'topology', '.', 'graph', '.', 'addOperator', '(', '"com.ibm.streamsx.topology.topic::Publish"', ',', 'params', '=', '{', "'topic'", ':', 'topic', '}', ',', 'sl', '=', 'sl', ',', 'name', '=', '_name', ',', 'stateful', '=', 'False', ')', 'op', '.', 'addInputPort', '(', 'outputPort', '=', 'self', '.', 'oport', ')', 'op', '.', '_layout_group', '(', "'Publish'", ',', 'name', 'if', 'name', 'else', '_name', ')', 'sink', '=', 'Sink', '(', 'op', ')', 'if', 'self', '.', '_placeable', ':', 'self', '.', '_colocate', '(', 'sink', ',', "'publish'", ')', 'return', 'sink'] | Publish this stream on a topic for other Streams applications to subscribe to.
A Streams application may publish a stream to allow other
Streams applications to subscribe to it. A subscriber
matches a publisher if the topic and schema match.
By default a stream is published using its schema.
A stream of :py:const:`Python objects <streamsx.topology.schema.CommonSchema.Python>` can be subscribed to by other Streams Python applications.
If a stream is published setting `schema` to
:py:const:`~streamsx.topology.schema.CommonSchema.Json`
then it is published as a stream of JSON objects.
Other Streams applications may subscribe to it regardless
of their implementation language.
If a stream is published setting `schema` to
:py:const:`~streamsx.topology.schema.CommonSchema.String`
then it is published as strings
Other Streams applications may subscribe to it regardless
of their implementation language.
Supported values of `schema` are only
:py:const:`~streamsx.topology.schema.CommonSchema.Json`
and
:py:const:`~streamsx.topology.schema.CommonSchema.String`.
Args:
topic(str): Topic to publish this stream to.
schema: Schema to publish. Defaults to the schema of this stream.
name(str): Name of the publish operator, defaults to a generated name.
Returns:
streamsx.topology.topology.Sink: Stream termination.
.. versionadded:: 1.6.1 `name` parameter.
.. versionchanged:: 1.7
Now returns a :py:class:`Sink` instance. | ['Publish', 'this', 'stream', 'on', 'a', 'topic', 'for', 'other', 'Streams', 'applications', 'to', 'subscribe', 'to', '.', 'A', 'Streams', 'application', 'may', 'publish', 'a', 'stream', 'to', 'allow', 'other', 'Streams', 'applications', 'to', 'subscribe', 'to', 'it', '.', 'A', 'subscriber', 'matches', 'a', 'publisher', 'if', 'the', 'topic', 'and', 'schema', 'match', '.'] | train | https://github.com/IBMStreams/pypi.streamsx/blob/abd67b4757120f6f805787fba390f53e9df9cdd8/streamsx/topology/topology.py#L1601-L1667 |
1,842 | cmbruns/pyopenvr | src/openvr/__init__.py | IVRRenderModels.getRenderModelName | def getRenderModelName(self, unRenderModelIndex, pchRenderModelName, unRenderModelNameLen):
"""
Use this to get the names of available render models. Index does not correlate to a tracked device index, but
is only used for iterating over all available render models. If the index is out of range, this function will return 0.
Otherwise, it will return the size of the buffer required for the name.
"""
fn = self.function_table.getRenderModelName
result = fn(unRenderModelIndex, pchRenderModelName, unRenderModelNameLen)
return result | python | def getRenderModelName(self, unRenderModelIndex, pchRenderModelName, unRenderModelNameLen):
"""
Use this to get the names of available render models. Index does not correlate to a tracked device index, but
is only used for iterating over all available render models. If the index is out of range, this function will return 0.
Otherwise, it will return the size of the buffer required for the name.
"""
fn = self.function_table.getRenderModelName
result = fn(unRenderModelIndex, pchRenderModelName, unRenderModelNameLen)
return result | ['def', 'getRenderModelName', '(', 'self', ',', 'unRenderModelIndex', ',', 'pchRenderModelName', ',', 'unRenderModelNameLen', ')', ':', 'fn', '=', 'self', '.', 'function_table', '.', 'getRenderModelName', 'result', '=', 'fn', '(', 'unRenderModelIndex', ',', 'pchRenderModelName', ',', 'unRenderModelNameLen', ')', 'return', 'result'] | Use this to get the names of available render models. Index does not correlate to a tracked device index, but
is only used for iterating over all available render models. If the index is out of range, this function will return 0.
Otherwise, it will return the size of the buffer required for the name. | ['Use', 'this', 'to', 'get', 'the', 'names', 'of', 'available', 'render', 'models', '.', 'Index', 'does', 'not', 'correlate', 'to', 'a', 'tracked', 'device', 'index', 'but', 'is', 'only', 'used', 'for', 'iterating', 'over', 'all', 'available', 'render', 'models', '.', 'If', 'the', 'index', 'is', 'out', 'of', 'range', 'this', 'function', 'will', 'return', '0', '.', 'Otherwise', 'it', 'will', 'return', 'the', 'size', 'of', 'the', 'buffer', 'required', 'for', 'the', 'name', '.'] | train | https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L5373-L5382 |
1,843 | aiogram/aiogram | aiogram/bot/bot.py | Bot.send_voice | async def send_voice(self, chat_id: typing.Union[base.Integer, base.String],
voice: typing.Union[base.InputFile, base.String],
caption: typing.Union[base.String, None] = None,
parse_mode: typing.Union[base.String, None] = None,
duration: typing.Union[base.Integer, None] = None,
disable_notification: typing.Union[base.Boolean, None] = None,
reply_to_message_id: typing.Union[base.Integer, None] = None,
reply_markup: typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup,
types.ReplyKeyboardRemove,
types.ForceReply, None] = None) -> types.Message:
"""
Use this method to send audio files, if you want Telegram clients to display the file
as a playable voice message.
For this to work, your audio must be in an .ogg file encoded with OPUS
(other formats may be sent as Audio or Document).
Source: https://core.telegram.org/bots/api#sendvoice
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param voice: Audio file to send
:type voice: :obj:`typing.Union[base.InputFile, base.String]`
:param caption: Voice message caption, 0-1024 characters
:type caption: :obj:`typing.Union[base.String, None]`
:param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic,
fixed-width text or inline URLs in your bot's message.
:type parse_mode: :obj:`typing.Union[base.String, None]`
:param duration: Duration of the voice message in seconds
:type duration: :obj:`typing.Union[base.Integer, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Union[base.Integer, None]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message`
"""
reply_markup = prepare_arg(reply_markup)
payload = generate_payload(**locals(), exclude=['voice'])
if self.parse_mode:
payload.setdefault('parse_mode', self.parse_mode)
files = {}
prepare_file(payload, files, 'voice', voice)
result = await self.request(api.Methods.SEND_VOICE, payload, files)
return types.Message(**result) | python | async def send_voice(self, chat_id: typing.Union[base.Integer, base.String],
voice: typing.Union[base.InputFile, base.String],
caption: typing.Union[base.String, None] = None,
parse_mode: typing.Union[base.String, None] = None,
duration: typing.Union[base.Integer, None] = None,
disable_notification: typing.Union[base.Boolean, None] = None,
reply_to_message_id: typing.Union[base.Integer, None] = None,
reply_markup: typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup,
types.ReplyKeyboardRemove,
types.ForceReply, None] = None) -> types.Message:
"""
Use this method to send audio files, if you want Telegram clients to display the file
as a playable voice message.
For this to work, your audio must be in an .ogg file encoded with OPUS
(other formats may be sent as Audio or Document).
Source: https://core.telegram.org/bots/api#sendvoice
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param voice: Audio file to send
:type voice: :obj:`typing.Union[base.InputFile, base.String]`
:param caption: Voice message caption, 0-1024 characters
:type caption: :obj:`typing.Union[base.String, None]`
:param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic,
fixed-width text or inline URLs in your bot's message.
:type parse_mode: :obj:`typing.Union[base.String, None]`
:param duration: Duration of the voice message in seconds
:type duration: :obj:`typing.Union[base.Integer, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Union[base.Integer, None]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message`
"""
reply_markup = prepare_arg(reply_markup)
payload = generate_payload(**locals(), exclude=['voice'])
if self.parse_mode:
payload.setdefault('parse_mode', self.parse_mode)
files = {}
prepare_file(payload, files, 'voice', voice)
result = await self.request(api.Methods.SEND_VOICE, payload, files)
return types.Message(**result) | ['async', 'def', 'send_voice', '(', 'self', ',', 'chat_id', ':', 'typing', '.', 'Union', '[', 'base', '.', 'Integer', ',', 'base', '.', 'String', ']', ',', 'voice', ':', 'typing', '.', 'Union', '[', 'base', '.', 'InputFile', ',', 'base', '.', 'String', ']', ',', 'caption', ':', 'typing', '.', 'Union', '[', 'base', '.', 'String', ',', 'None', ']', '=', 'None', ',', 'parse_mode', ':', 'typing', '.', 'Union', '[', 'base', '.', 'String', ',', 'None', ']', '=', 'None', ',', 'duration', ':', 'typing', '.', 'Union', '[', 'base', '.', 'Integer', ',', 'None', ']', '=', 'None', ',', 'disable_notification', ':', 'typing', '.', 'Union', '[', 'base', '.', 'Boolean', ',', 'None', ']', '=', 'None', ',', 'reply_to_message_id', ':', 'typing', '.', 'Union', '[', 'base', '.', 'Integer', ',', 'None', ']', '=', 'None', ',', 'reply_markup', ':', 'typing', '.', 'Union', '[', 'types', '.', 'InlineKeyboardMarkup', ',', 'types', '.', 'ReplyKeyboardMarkup', ',', 'types', '.', 'ReplyKeyboardRemove', ',', 'types', '.', 'ForceReply', ',', 'None', ']', '=', 'None', ')', '->', 'types', '.', 'Message', ':', 'reply_markup', '=', 'prepare_arg', '(', 'reply_markup', ')', 'payload', '=', 'generate_payload', '(', '*', '*', 'locals', '(', ')', ',', 'exclude', '=', '[', "'voice'", ']', ')', 'if', 'self', '.', 'parse_mode', ':', 'payload', '.', 'setdefault', '(', "'parse_mode'", ',', 'self', '.', 'parse_mode', ')', 'files', '=', '{', '}', 'prepare_file', '(', 'payload', ',', 'files', ',', "'voice'", ',', 'voice', ')', 'result', '=', 'await', 'self', '.', 'request', '(', 'api', '.', 'Methods', '.', 'SEND_VOICE', ',', 'payload', ',', 'files', ')', 'return', 'types', '.', 'Message', '(', '*', '*', 'result', ')'] | Use this method to send audio files, if you want Telegram clients to display the file
as a playable voice message.
For this to work, your audio must be in an .ogg file encoded with OPUS
(other formats may be sent as Audio or Document).
Source: https://core.telegram.org/bots/api#sendvoice
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param voice: Audio file to send
:type voice: :obj:`typing.Union[base.InputFile, base.String]`
:param caption: Voice message caption, 0-1024 characters
:type caption: :obj:`typing.Union[base.String, None]`
:param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic,
fixed-width text or inline URLs in your bot's message.
:type parse_mode: :obj:`typing.Union[base.String, None]`
:param duration: Duration of the voice message in seconds
:type duration: :obj:`typing.Union[base.Integer, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Union[base.Integer, None]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message` | ['Use', 'this', 'method', 'to', 'send', 'audio', 'files', 'if', 'you', 'want', 'Telegram', 'clients', 'to', 'display', 'the', 'file', 'as', 'a', 'playable', 'voice', 'message', '.'] | train | https://github.com/aiogram/aiogram/blob/2af930149ce2482547721e2c8755c10307295e48/aiogram/bot/bot.py#L525-L575 |
1,844 | NuGrid/NuGridPy | nugridpy/data_plot.py | DataPlot.abu_profile | def abu_profile(self,ixaxis='mass',isos=None,ifig=None,fname=None,logy=False,
colourblind=False):
'''
Plot common abundances as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string, optional
'mass', 'logradius' or 'radius'
The default value is 'mass'
isos : list, optional
list of isos to plot, i.e. ['h1','he4','c12'] for MESA or
['H-1','He-4','C-12'] for SE output. If None, the code decides
itself what to plot.
The default is None.
ifig : integer or string, optional
The figure label
The default value is None
fname : integer, optional
What cycle to plot from (if SE output)
The default value is None
logy : boolean, optional
Should y-axis be logarithmic?
The default value is False
colourblind : boolean, optional
do you want to use the colourblind colour palette from the NuGrid
nuutils module?
'''
pT=self._classTest()
# Class-specific things:
if pT is 'mesa_profile':
x = self.get(ixaxis)
if ixaxis is 'radius':
x = x*ast.rsun_cm
if isos is None:
isos=['h1','he4','c12','c13','n14','o16','ne20','ne22','mg24','mg25',
'al26','si28','si30','s32','s34','cl35','ar36','ar38','cr52',
'cr56','fe56','ni56']
risos=[i for i in isos if i in self.cols]
abunds = [self.get(riso) for riso in risos]
names=risos
elif pT is 'se':
if fname is None:
raise IOError("Please provide the cycle number fname")
x = self.se.get(fname,ixaxis)
if isos is None:
isos=['H-1','He-4','C-12','C-13','N-14','O-16','Ne-20','Ne-22','Mg-24','Mg-25',
'Sl-26','Si-28','Si-30','S-32','S-34','Cl-35','Ar-36','Ar-38','Cr-52',
'Cr-56','Fe-56','Ni-56']
risos=[i for i in isos if i in self.se.isotopes]
abunds = self.se.get(fname,'iso_massf',risos)
names=risos
else:
raise IOError("Sorry. the density_profile method is not available \
for this class")
# Plot-specific things:
if ixaxis is 'logradius':
x = np.log10(x)
xlab='$\log_{10}(r\,/\,{\\rm cm})$'
elif ixaxis is 'radius':
x = old_div(x, 1.e8)
xlab = 'r / Mm'
else:
xlab='${\\rm Mass}\,/\,M_\odot$'
if ifig is not None:
pl.figure(ifig)
from . import utils as u
cb = u.colourblind
lscb = u.linestylecb # colourblind linestyle function
for i in range(len(risos)):
if logy:
y = np.log10(abunds if len(risos) < 2 else abunds[i])
else:
y = abunds if len(risos) < 2 else abunds[i]
if colourblind:
pl.plot(x,y,ls=lscb(i)[0],marker=lscb(i)[1],
color=lscb(i)[2],markevery=u.linestyle(i)[1]*20,
label=names[i],mec='None')
else:
pl.plot(x,y,u.linestyle(i)[0],markevery=u.linestyle(i)[1]*20,
label=names[i],mec='None')
pl.legend(loc='best').draw_frame(False)
pl.xlabel(xlab)
pl.ylabel('$\log(X)$') | python | def abu_profile(self,ixaxis='mass',isos=None,ifig=None,fname=None,logy=False,
colourblind=False):
'''
Plot common abundances as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string, optional
'mass', 'logradius' or 'radius'
The default value is 'mass'
isos : list, optional
list of isos to plot, i.e. ['h1','he4','c12'] for MESA or
['H-1','He-4','C-12'] for SE output. If None, the code decides
itself what to plot.
The default is None.
ifig : integer or string, optional
The figure label
The default value is None
fname : integer, optional
What cycle to plot from (if SE output)
The default value is None
logy : boolean, optional
Should y-axis be logarithmic?
The default value is False
colourblind : boolean, optional
do you want to use the colourblind colour palette from the NuGrid
nuutils module?
'''
pT=self._classTest()
# Class-specific things:
if pT is 'mesa_profile':
x = self.get(ixaxis)
if ixaxis is 'radius':
x = x*ast.rsun_cm
if isos is None:
isos=['h1','he4','c12','c13','n14','o16','ne20','ne22','mg24','mg25',
'al26','si28','si30','s32','s34','cl35','ar36','ar38','cr52',
'cr56','fe56','ni56']
risos=[i for i in isos if i in self.cols]
abunds = [self.get(riso) for riso in risos]
names=risos
elif pT is 'se':
if fname is None:
raise IOError("Please provide the cycle number fname")
x = self.se.get(fname,ixaxis)
if isos is None:
isos=['H-1','He-4','C-12','C-13','N-14','O-16','Ne-20','Ne-22','Mg-24','Mg-25',
'Sl-26','Si-28','Si-30','S-32','S-34','Cl-35','Ar-36','Ar-38','Cr-52',
'Cr-56','Fe-56','Ni-56']
risos=[i for i in isos if i in self.se.isotopes]
abunds = self.se.get(fname,'iso_massf',risos)
names=risos
else:
raise IOError("Sorry. the density_profile method is not available \
for this class")
# Plot-specific things:
if ixaxis is 'logradius':
x = np.log10(x)
xlab='$\log_{10}(r\,/\,{\\rm cm})$'
elif ixaxis is 'radius':
x = old_div(x, 1.e8)
xlab = 'r / Mm'
else:
xlab='${\\rm Mass}\,/\,M_\odot$'
if ifig is not None:
pl.figure(ifig)
from . import utils as u
cb = u.colourblind
lscb = u.linestylecb # colourblind linestyle function
for i in range(len(risos)):
if logy:
y = np.log10(abunds if len(risos) < 2 else abunds[i])
else:
y = abunds if len(risos) < 2 else abunds[i]
if colourblind:
pl.plot(x,y,ls=lscb(i)[0],marker=lscb(i)[1],
color=lscb(i)[2],markevery=u.linestyle(i)[1]*20,
label=names[i],mec='None')
else:
pl.plot(x,y,u.linestyle(i)[0],markevery=u.linestyle(i)[1]*20,
label=names[i],mec='None')
pl.legend(loc='best').draw_frame(False)
pl.xlabel(xlab)
pl.ylabel('$\log(X)$') | ['def', 'abu_profile', '(', 'self', ',', 'ixaxis', '=', "'mass'", ',', 'isos', '=', 'None', ',', 'ifig', '=', 'None', ',', 'fname', '=', 'None', ',', 'logy', '=', 'False', ',', 'colourblind', '=', 'False', ')', ':', 'pT', '=', 'self', '.', '_classTest', '(', ')', '# Class-specific things:', 'if', 'pT', 'is', "'mesa_profile'", ':', 'x', '=', 'self', '.', 'get', '(', 'ixaxis', ')', 'if', 'ixaxis', 'is', "'radius'", ':', 'x', '=', 'x', '*', 'ast', '.', 'rsun_cm', 'if', 'isos', 'is', 'None', ':', 'isos', '=', '[', "'h1'", ',', "'he4'", ',', "'c12'", ',', "'c13'", ',', "'n14'", ',', "'o16'", ',', "'ne20'", ',', "'ne22'", ',', "'mg24'", ',', "'mg25'", ',', "'al26'", ',', "'si28'", ',', "'si30'", ',', "'s32'", ',', "'s34'", ',', "'cl35'", ',', "'ar36'", ',', "'ar38'", ',', "'cr52'", ',', "'cr56'", ',', "'fe56'", ',', "'ni56'", ']', 'risos', '=', '[', 'i', 'for', 'i', 'in', 'isos', 'if', 'i', 'in', 'self', '.', 'cols', ']', 'abunds', '=', '[', 'self', '.', 'get', '(', 'riso', ')', 'for', 'riso', 'in', 'risos', ']', 'names', '=', 'risos', 'elif', 'pT', 'is', "'se'", ':', 'if', 'fname', 'is', 'None', ':', 'raise', 'IOError', '(', '"Please provide the cycle number fname"', ')', 'x', '=', 'self', '.', 'se', '.', 'get', '(', 'fname', ',', 'ixaxis', ')', 'if', 'isos', 'is', 'None', ':', 'isos', '=', '[', "'H-1'", ',', "'He-4'", ',', "'C-12'", ',', "'C-13'", ',', "'N-14'", ',', "'O-16'", ',', "'Ne-20'", ',', "'Ne-22'", ',', "'Mg-24'", ',', "'Mg-25'", ',', "'Sl-26'", ',', "'Si-28'", ',', "'Si-30'", ',', "'S-32'", ',', "'S-34'", ',', "'Cl-35'", ',', "'Ar-36'", ',', "'Ar-38'", ',', "'Cr-52'", ',', "'Cr-56'", ',', "'Fe-56'", ',', "'Ni-56'", ']', 'risos', '=', '[', 'i', 'for', 'i', 'in', 'isos', 'if', 'i', 'in', 'self', '.', 'se', '.', 'isotopes', ']', 'abunds', '=', 'self', '.', 'se', '.', 'get', '(', 'fname', ',', "'iso_massf'", ',', 'risos', ')', 'names', '=', 'risos', 'else', ':', 'raise', 'IOError', '(', '"Sorry. the density_profile method is not available \\\n for this class"', ')', '# Plot-specific things:', 'if', 'ixaxis', 'is', "'logradius'", ':', 'x', '=', 'np', '.', 'log10', '(', 'x', ')', 'xlab', '=', "'$\\log_{10}(r\\,/\\,{\\\\rm cm})$'", 'elif', 'ixaxis', 'is', "'radius'", ':', 'x', '=', 'old_div', '(', 'x', ',', '1.e8', ')', 'xlab', '=', "'r / Mm'", 'else', ':', 'xlab', '=', "'", '\\\\', '\\,', '\\,', 'o', 'dot', '$', "'", 'if', 'ifig', 'is', 'not', 'None', ':', 'pl', '.', 'figure', '(', 'ifig', ')', 'from', '.', 'import', 'utils', 'as', 'u', 'cb', '=', 'u', '.', 'colourblind', 'lscb', '=', 'u', '.', 'linestylecb', '# colourblind linestyle function', 'for', 'i', 'in', 'range', '(', 'len', '(', 'risos', ')', ')', ':', 'if', 'logy', ':', 'y', '=', 'np', '.', 'log10', '(', 'abunds', 'if', 'len', '(', 'risos', ')', '<', '2', 'else', 'abunds', '[', 'i', ']', ')', 'else', ':', 'y', '=', 'abunds', 'if', 'len', '(', 'risos', ')', '<', '2', 'else', 'abunds', '[', 'i', ']', 'if', 'colourblind', ':', 'pl', '.', 'plot', '(', 'x', ',', 'y', ',', 'ls', '=', 'lscb', '(', 'i', ')', '[', '0', ']', ',', 'marker', '=', 'lscb', '(', 'i', ')', '[', '1', ']', ',', 'color', '=', 'lscb', '(', 'i', ')', '[', '2', ']', ',', 'markevery', '=', 'u', '.', 'linestyle', '(', 'i', ')', '[', '1', ']', '*', '20', ',', 'label', '=', 'names', '[', 'i', ']', ',', 'mec', '=', "'None'", ')', 'else', ':', 'pl', '.', 'plot', '(', 'x', ',', 'y', ',', 'u', '.', 'linestyle', '(', 'i', ')', '[', '0', ']', ',', 'markevery', '=', 'u', '.', 'linestyle', '(', 'i', ')', '[', '1', ']', '*', '20', ',', 'label', '=', 'names', '[', 'i', ']', ',', 'mec', '=', "'None'", ')', 'pl', '.', 'legend', '(', 'loc', '=', "'best'", ')', '.', 'draw_frame', '(', 'False', ')', 'pl', '.', 'xlabel', '(', 'xlab', ')', 'pl', '.', 'ylabel', '(', "'$\\log(X)$'", ')'] | Plot common abundances as a function of either mass coordiate or radius.
Parameters
----------
ixaxis : string, optional
'mass', 'logradius' or 'radius'
The default value is 'mass'
isos : list, optional
list of isos to plot, i.e. ['h1','he4','c12'] for MESA or
['H-1','He-4','C-12'] for SE output. If None, the code decides
itself what to plot.
The default is None.
ifig : integer or string, optional
The figure label
The default value is None
fname : integer, optional
What cycle to plot from (if SE output)
The default value is None
logy : boolean, optional
Should y-axis be logarithmic?
The default value is False
colourblind : boolean, optional
do you want to use the colourblind colour palette from the NuGrid
nuutils module? | ['Plot', 'common', 'abundances', 'as', 'a', 'function', 'of', 'either', 'mass', 'coordiate', 'or', 'radius', '.'] | train | https://github.com/NuGrid/NuGridPy/blob/eee8047446e398be77362d82c1d8b3310054fab0/nugridpy/data_plot.py#L4658-L4745 |
1,845 | gwastro/pycbc | pycbc/results/scatter_histograms.py | remove_common_offset | def remove_common_offset(arr):
"""Given an array of data, removes a common offset > 1000, returning the
removed value.
"""
offset = 0
isneg = (arr <= 0).all()
# make sure all values have the same sign
if isneg or (arr >= 0).all():
# only remove offset if the minimum and maximum values are the same
# order of magintude and > O(1000)
minpwr = numpy.log10(abs(arr).min())
maxpwr = numpy.log10(abs(arr).max())
if numpy.floor(minpwr) == numpy.floor(maxpwr) and minpwr > 3:
offset = numpy.floor(10**minpwr)
if isneg:
offset *= -1
arr = arr - offset
return arr, int(offset) | python | def remove_common_offset(arr):
"""Given an array of data, removes a common offset > 1000, returning the
removed value.
"""
offset = 0
isneg = (arr <= 0).all()
# make sure all values have the same sign
if isneg or (arr >= 0).all():
# only remove offset if the minimum and maximum values are the same
# order of magintude and > O(1000)
minpwr = numpy.log10(abs(arr).min())
maxpwr = numpy.log10(abs(arr).max())
if numpy.floor(minpwr) == numpy.floor(maxpwr) and minpwr > 3:
offset = numpy.floor(10**minpwr)
if isneg:
offset *= -1
arr = arr - offset
return arr, int(offset) | ['def', 'remove_common_offset', '(', 'arr', ')', ':', 'offset', '=', '0', 'isneg', '=', '(', 'arr', '<=', '0', ')', '.', 'all', '(', ')', '# make sure all values have the same sign', 'if', 'isneg', 'or', '(', 'arr', '>=', '0', ')', '.', 'all', '(', ')', ':', '# only remove offset if the minimum and maximum values are the same', '# order of magintude and > O(1000)', 'minpwr', '=', 'numpy', '.', 'log10', '(', 'abs', '(', 'arr', ')', '.', 'min', '(', ')', ')', 'maxpwr', '=', 'numpy', '.', 'log10', '(', 'abs', '(', 'arr', ')', '.', 'max', '(', ')', ')', 'if', 'numpy', '.', 'floor', '(', 'minpwr', ')', '==', 'numpy', '.', 'floor', '(', 'maxpwr', ')', 'and', 'minpwr', '>', '3', ':', 'offset', '=', 'numpy', '.', 'floor', '(', '10', '**', 'minpwr', ')', 'if', 'isneg', ':', 'offset', '*=', '-', '1', 'arr', '=', 'arr', '-', 'offset', 'return', 'arr', ',', 'int', '(', 'offset', ')'] | Given an array of data, removes a common offset > 1000, returning the
removed value. | ['Given', 'an', 'array', 'of', 'data', 'removes', 'a', 'common', 'offset', '>', '1000', 'returning', 'the', 'removed', 'value', '.'] | train | https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/results/scatter_histograms.py#L753-L770 |
1,846 | JdeRobot/base | src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py | MAVLink.ahrs_encode | def ahrs_encode(self, omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw):
'''
Status of DCM attitude estimator
omegaIx : X gyro drift estimate rad/s (float)
omegaIy : Y gyro drift estimate rad/s (float)
omegaIz : Z gyro drift estimate rad/s (float)
accel_weight : average accel_weight (float)
renorm_val : average renormalisation value (float)
error_rp : average error_roll_pitch value (float)
error_yaw : average error_yaw value (float)
'''
return MAVLink_ahrs_message(omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw) | python | def ahrs_encode(self, omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw):
'''
Status of DCM attitude estimator
omegaIx : X gyro drift estimate rad/s (float)
omegaIy : Y gyro drift estimate rad/s (float)
omegaIz : Z gyro drift estimate rad/s (float)
accel_weight : average accel_weight (float)
renorm_val : average renormalisation value (float)
error_rp : average error_roll_pitch value (float)
error_yaw : average error_yaw value (float)
'''
return MAVLink_ahrs_message(omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw) | ['def', 'ahrs_encode', '(', 'self', ',', 'omegaIx', ',', 'omegaIy', ',', 'omegaIz', ',', 'accel_weight', ',', 'renorm_val', ',', 'error_rp', ',', 'error_yaw', ')', ':', 'return', 'MAVLink_ahrs_message', '(', 'omegaIx', ',', 'omegaIy', ',', 'omegaIz', ',', 'accel_weight', ',', 'renorm_val', ',', 'error_rp', ',', 'error_yaw', ')'] | Status of DCM attitude estimator
omegaIx : X gyro drift estimate rad/s (float)
omegaIy : Y gyro drift estimate rad/s (float)
omegaIz : Z gyro drift estimate rad/s (float)
accel_weight : average accel_weight (float)
renorm_val : average renormalisation value (float)
error_rp : average error_roll_pitch value (float)
error_yaw : average error_yaw value (float) | ['Status', 'of', 'DCM', 'attitude', 'estimator'] | train | https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py#L10042-L10055 |
1,847 | saltstack/salt | salt/beacons/sh.py | _get_shells | def _get_shells():
'''
Return the valid shells on this system
'''
start = time.time()
if 'sh.last_shells' in __context__:
if start - __context__['sh.last_shells'] > 5:
__context__['sh.last_shells'] = start
else:
__context__['sh.shells'] = __salt__['cmd.shells']()
else:
__context__['sh.last_shells'] = start
__context__['sh.shells'] = __salt__['cmd.shells']()
return __context__['sh.shells'] | python | def _get_shells():
'''
Return the valid shells on this system
'''
start = time.time()
if 'sh.last_shells' in __context__:
if start - __context__['sh.last_shells'] > 5:
__context__['sh.last_shells'] = start
else:
__context__['sh.shells'] = __salt__['cmd.shells']()
else:
__context__['sh.last_shells'] = start
__context__['sh.shells'] = __salt__['cmd.shells']()
return __context__['sh.shells'] | ['def', '_get_shells', '(', ')', ':', 'start', '=', 'time', '.', 'time', '(', ')', 'if', "'sh.last_shells'", 'in', '__context__', ':', 'if', 'start', '-', '__context__', '[', "'sh.last_shells'", ']', '>', '5', ':', '__context__', '[', "'sh.last_shells'", ']', '=', 'start', 'else', ':', '__context__', '[', "'sh.shells'", ']', '=', '__salt__', '[', "'cmd.shells'", ']', '(', ')', 'else', ':', '__context__', '[', "'sh.last_shells'", ']', '=', 'start', '__context__', '[', "'sh.shells'", ']', '=', '__salt__', '[', "'cmd.shells'", ']', '(', ')', 'return', '__context__', '[', "'sh.shells'", ']'] | Return the valid shells on this system | ['Return', 'the', 'valid', 'shells', 'on', 'this', 'system'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/beacons/sh.py#L29-L42 |
1,848 | RaRe-Technologies/smart_open | smart_open/smart_open_lib.py | _parse_uri | def _parse_uri(uri_as_string):
"""
Parse the given URI from a string.
Supported URI schemes are:
* file
* hdfs
* http
* https
* s3
* s3a
* s3n
* s3u
* webhdfs
.s3, s3a and s3n are treated the same way. s3u is s3 but without SSL.
Valid URI examples::
* s3://my_bucket/my_key
* s3://my_key:my_secret@my_bucket/my_key
* s3://my_key:my_secret@my_server:my_port@my_bucket/my_key
* hdfs:///path/file
* hdfs://path/file
* webhdfs://host:port/path/file
* ./local/path/file
* ~/local/path/file
* local/path/file
* ./local/path/file.gz
* file:///home/user/file
* file:///home/user/file.bz2
* [ssh|scp|sftp]://username@host//path/file
* [ssh|scp|sftp]://username@host/path/file
"""
if os.name == 'nt':
# urlsplit doesn't work on Windows -- it parses the drive as the scheme...
if '://' not in uri_as_string:
# no protocol given => assume a local file
uri_as_string = 'file://' + uri_as_string
parsed_uri = _my_urlsplit(uri_as_string)
if parsed_uri.scheme == "hdfs":
return _parse_uri_hdfs(parsed_uri)
elif parsed_uri.scheme == "webhdfs":
return _parse_uri_webhdfs(parsed_uri)
elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES:
return _parse_uri_s3x(parsed_uri)
elif parsed_uri.scheme == 'file':
return _parse_uri_file(parsed_uri.netloc + parsed_uri.path)
elif parsed_uri.scheme in ('', None):
return _parse_uri_file(uri_as_string)
elif parsed_uri.scheme.startswith('http'):
return Uri(scheme=parsed_uri.scheme, uri_path=uri_as_string)
elif parsed_uri.scheme in smart_open_ssh.SCHEMES:
return _parse_uri_ssh(parsed_uri)
else:
raise NotImplementedError(
"unknown URI scheme %r in %r" % (parsed_uri.scheme, uri_as_string)
) | python | def _parse_uri(uri_as_string):
"""
Parse the given URI from a string.
Supported URI schemes are:
* file
* hdfs
* http
* https
* s3
* s3a
* s3n
* s3u
* webhdfs
.s3, s3a and s3n are treated the same way. s3u is s3 but without SSL.
Valid URI examples::
* s3://my_bucket/my_key
* s3://my_key:my_secret@my_bucket/my_key
* s3://my_key:my_secret@my_server:my_port@my_bucket/my_key
* hdfs:///path/file
* hdfs://path/file
* webhdfs://host:port/path/file
* ./local/path/file
* ~/local/path/file
* local/path/file
* ./local/path/file.gz
* file:///home/user/file
* file:///home/user/file.bz2
* [ssh|scp|sftp]://username@host//path/file
* [ssh|scp|sftp]://username@host/path/file
"""
if os.name == 'nt':
# urlsplit doesn't work on Windows -- it parses the drive as the scheme...
if '://' not in uri_as_string:
# no protocol given => assume a local file
uri_as_string = 'file://' + uri_as_string
parsed_uri = _my_urlsplit(uri_as_string)
if parsed_uri.scheme == "hdfs":
return _parse_uri_hdfs(parsed_uri)
elif parsed_uri.scheme == "webhdfs":
return _parse_uri_webhdfs(parsed_uri)
elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES:
return _parse_uri_s3x(parsed_uri)
elif parsed_uri.scheme == 'file':
return _parse_uri_file(parsed_uri.netloc + parsed_uri.path)
elif parsed_uri.scheme in ('', None):
return _parse_uri_file(uri_as_string)
elif parsed_uri.scheme.startswith('http'):
return Uri(scheme=parsed_uri.scheme, uri_path=uri_as_string)
elif parsed_uri.scheme in smart_open_ssh.SCHEMES:
return _parse_uri_ssh(parsed_uri)
else:
raise NotImplementedError(
"unknown URI scheme %r in %r" % (parsed_uri.scheme, uri_as_string)
) | ['def', '_parse_uri', '(', 'uri_as_string', ')', ':', 'if', 'os', '.', 'name', '==', "'nt'", ':', "# urlsplit doesn't work on Windows -- it parses the drive as the scheme...", 'if', "'://'", 'not', 'in', 'uri_as_string', ':', '# no protocol given => assume a local file', 'uri_as_string', '=', "'file://'", '+', 'uri_as_string', 'parsed_uri', '=', '_my_urlsplit', '(', 'uri_as_string', ')', 'if', 'parsed_uri', '.', 'scheme', '==', '"hdfs"', ':', 'return', '_parse_uri_hdfs', '(', 'parsed_uri', ')', 'elif', 'parsed_uri', '.', 'scheme', '==', '"webhdfs"', ':', 'return', '_parse_uri_webhdfs', '(', 'parsed_uri', ')', 'elif', 'parsed_uri', '.', 'scheme', 'in', 'smart_open_s3', '.', 'SUPPORTED_SCHEMES', ':', 'return', '_parse_uri_s3x', '(', 'parsed_uri', ')', 'elif', 'parsed_uri', '.', 'scheme', '==', "'file'", ':', 'return', '_parse_uri_file', '(', 'parsed_uri', '.', 'netloc', '+', 'parsed_uri', '.', 'path', ')', 'elif', 'parsed_uri', '.', 'scheme', 'in', '(', "''", ',', 'None', ')', ':', 'return', '_parse_uri_file', '(', 'uri_as_string', ')', 'elif', 'parsed_uri', '.', 'scheme', '.', 'startswith', '(', "'http'", ')', ':', 'return', 'Uri', '(', 'scheme', '=', 'parsed_uri', '.', 'scheme', ',', 'uri_path', '=', 'uri_as_string', ')', 'elif', 'parsed_uri', '.', 'scheme', 'in', 'smart_open_ssh', '.', 'SCHEMES', ':', 'return', '_parse_uri_ssh', '(', 'parsed_uri', ')', 'else', ':', 'raise', 'NotImplementedError', '(', '"unknown URI scheme %r in %r"', '%', '(', 'parsed_uri', '.', 'scheme', ',', 'uri_as_string', ')', ')'] | Parse the given URI from a string.
Supported URI schemes are:
* file
* hdfs
* http
* https
* s3
* s3a
* s3n
* s3u
* webhdfs
.s3, s3a and s3n are treated the same way. s3u is s3 but without SSL.
Valid URI examples::
* s3://my_bucket/my_key
* s3://my_key:my_secret@my_bucket/my_key
* s3://my_key:my_secret@my_server:my_port@my_bucket/my_key
* hdfs:///path/file
* hdfs://path/file
* webhdfs://host:port/path/file
* ./local/path/file
* ~/local/path/file
* local/path/file
* ./local/path/file.gz
* file:///home/user/file
* file:///home/user/file.bz2
* [ssh|scp|sftp]://username@host//path/file
* [ssh|scp|sftp]://username@host/path/file | ['Parse', 'the', 'given', 'URI', 'from', 'a', 'string', '.'] | train | https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L658-L719 |
1,849 | ewels/MultiQC | multiqc/modules/biobloomtools/biobloomtools.py | MultiqcModule.parse_bbt | def parse_bbt(self, fh):
""" Parse the BioBloom Tools output into a 3D dict """
parsed_data = OrderedDict()
headers = None
for l in fh:
s = l.split("\t")
if headers is None:
headers = s
else:
parsed_data[s[0]] = dict()
for i, h in enumerate(headers[1:]):
parsed_data[s[0]][h] = float(s[i+1])
return parsed_data | python | def parse_bbt(self, fh):
""" Parse the BioBloom Tools output into a 3D dict """
parsed_data = OrderedDict()
headers = None
for l in fh:
s = l.split("\t")
if headers is None:
headers = s
else:
parsed_data[s[0]] = dict()
for i, h in enumerate(headers[1:]):
parsed_data[s[0]][h] = float(s[i+1])
return parsed_data | ['def', 'parse_bbt', '(', 'self', ',', 'fh', ')', ':', 'parsed_data', '=', 'OrderedDict', '(', ')', 'headers', '=', 'None', 'for', 'l', 'in', 'fh', ':', 's', '=', 'l', '.', 'split', '(', '"\\t"', ')', 'if', 'headers', 'is', 'None', ':', 'headers', '=', 's', 'else', ':', 'parsed_data', '[', 's', '[', '0', ']', ']', '=', 'dict', '(', ')', 'for', 'i', ',', 'h', 'in', 'enumerate', '(', 'headers', '[', '1', ':', ']', ')', ':', 'parsed_data', '[', 's', '[', '0', ']', ']', '[', 'h', ']', '=', 'float', '(', 's', '[', 'i', '+', '1', ']', ')', 'return', 'parsed_data'] | Parse the BioBloom Tools output into a 3D dict | ['Parse', 'the', 'BioBloom', 'Tools', 'output', 'into', 'a', '3D', 'dict'] | train | https://github.com/ewels/MultiQC/blob/2037d6322b2554146a74efbf869156ad20d4c4ec/multiqc/modules/biobloomtools/biobloomtools.py#L58-L71 |
1,850 | angr/claripy | claripy/frontend_mixins/smtlib_script_dumper_mixin.py | SMTLibScriptDumperMixin.get_smtlib_script_satisfiability | def get_smtlib_script_satisfiability(self, extra_constraints=(), extra_variables=()):
"""
Return an smt-lib script that check the satisfiability of the current constraints
:return string: smt-lib script
"""
try:
e_csts = self._solver_backend.convert_list(extra_constraints + tuple(self.constraints))
e_variables = self._solver_backend.convert_list(extra_variables)
variables, csts = self._solver_backend._get_all_vars_and_constraints(e_c=e_csts, e_v=e_variables)
return self._solver_backend._get_satisfiability_smt_script(csts, variables)
except BackendError as e:
raise ClaripyFrontendError("Backend error during smtlib script generation") from e | python | def get_smtlib_script_satisfiability(self, extra_constraints=(), extra_variables=()):
"""
Return an smt-lib script that check the satisfiability of the current constraints
:return string: smt-lib script
"""
try:
e_csts = self._solver_backend.convert_list(extra_constraints + tuple(self.constraints))
e_variables = self._solver_backend.convert_list(extra_variables)
variables, csts = self._solver_backend._get_all_vars_and_constraints(e_c=e_csts, e_v=e_variables)
return self._solver_backend._get_satisfiability_smt_script(csts, variables)
except BackendError as e:
raise ClaripyFrontendError("Backend error during smtlib script generation") from e | ['def', 'get_smtlib_script_satisfiability', '(', 'self', ',', 'extra_constraints', '=', '(', ')', ',', 'extra_variables', '=', '(', ')', ')', ':', 'try', ':', 'e_csts', '=', 'self', '.', '_solver_backend', '.', 'convert_list', '(', 'extra_constraints', '+', 'tuple', '(', 'self', '.', 'constraints', ')', ')', 'e_variables', '=', 'self', '.', '_solver_backend', '.', 'convert_list', '(', 'extra_variables', ')', 'variables', ',', 'csts', '=', 'self', '.', '_solver_backend', '.', '_get_all_vars_and_constraints', '(', 'e_c', '=', 'e_csts', ',', 'e_v', '=', 'e_variables', ')', 'return', 'self', '.', '_solver_backend', '.', '_get_satisfiability_smt_script', '(', 'csts', ',', 'variables', ')', 'except', 'BackendError', 'as', 'e', ':', 'raise', 'ClaripyFrontendError', '(', '"Backend error during smtlib script generation"', ')', 'from', 'e'] | Return an smt-lib script that check the satisfiability of the current constraints
:return string: smt-lib script | ['Return', 'an', 'smt', '-', 'lib', 'script', 'that', 'check', 'the', 'satisfiability', 'of', 'the', 'current', 'constraints'] | train | https://github.com/angr/claripy/blob/4ed61924880af1ea8fb778047d896ec0156412a6/claripy/frontend_mixins/smtlib_script_dumper_mixin.py#L10-L23 |
1,851 | gabstopper/smc-python | smc/core/node.py | Node.rename | def rename(self, name):
"""
Rename this node
:param str name: new name for node
"""
self.update(name='{} node {}'.format(name, self.nodeid)) | python | def rename(self, name):
"""
Rename this node
:param str name: new name for node
"""
self.update(name='{} node {}'.format(name, self.nodeid)) | ['def', 'rename', '(', 'self', ',', 'name', ')', ':', 'self', '.', 'update', '(', 'name', '=', "'{} node {}'", '.', 'format', '(', 'name', ',', 'self', '.', 'nodeid', ')', ')'] | Rename this node
:param str name: new name for node | ['Rename', 'this', 'node', ':', 'param', 'str', 'name', ':', 'new', 'name', 'for', 'node'] | train | https://github.com/gabstopper/smc-python/blob/e027b8a5dcfaf884eada32d113d41c1e56b32457/smc/core/node.py#L57-L63 |
1,852 | python-xlib/python-xlib | Xlib/display.py | Display.open_font | def open_font(self, name):
"""Open the font identifed by the pattern name and return its
font object. If name does not match any font, None is returned."""
fid = self.display.allocate_resource_id()
ec = error.CatchError(error.BadName)
request.OpenFont(display = self.display,
onerror = ec,
fid = fid,
name = name)
self.sync()
if ec.get_error():
self.display.free_resource_id(fid)
return None
else:
cls = self.display.get_resource_class('font', fontable.Font)
return cls(self.display, fid, owner = 1) | python | def open_font(self, name):
"""Open the font identifed by the pattern name and return its
font object. If name does not match any font, None is returned."""
fid = self.display.allocate_resource_id()
ec = error.CatchError(error.BadName)
request.OpenFont(display = self.display,
onerror = ec,
fid = fid,
name = name)
self.sync()
if ec.get_error():
self.display.free_resource_id(fid)
return None
else:
cls = self.display.get_resource_class('font', fontable.Font)
return cls(self.display, fid, owner = 1) | ['def', 'open_font', '(', 'self', ',', 'name', ')', ':', 'fid', '=', 'self', '.', 'display', '.', 'allocate_resource_id', '(', ')', 'ec', '=', 'error', '.', 'CatchError', '(', 'error', '.', 'BadName', ')', 'request', '.', 'OpenFont', '(', 'display', '=', 'self', '.', 'display', ',', 'onerror', '=', 'ec', ',', 'fid', '=', 'fid', ',', 'name', '=', 'name', ')', 'self', '.', 'sync', '(', ')', 'if', 'ec', '.', 'get_error', '(', ')', ':', 'self', '.', 'display', '.', 'free_resource_id', '(', 'fid', ')', 'return', 'None', 'else', ':', 'cls', '=', 'self', '.', 'display', '.', 'get_resource_class', '(', "'font'", ',', 'fontable', '.', 'Font', ')', 'return', 'cls', '(', 'self', '.', 'display', ',', 'fid', ',', 'owner', '=', '1', ')'] | Open the font identifed by the pattern name and return its
font object. If name does not match any font, None is returned. | ['Open', 'the', 'font', 'identifed', 'by', 'the', 'pattern', 'name', 'and', 'return', 'its', 'font', 'object', '.', 'If', 'name', 'does', 'not', 'match', 'any', 'font', 'None', 'is', 'returned', '.'] | train | https://github.com/python-xlib/python-xlib/blob/8901e831737e79fe5645f48089d70e1d1046d2f2/Xlib/display.py#L618-L635 |
1,853 | AndrewAnnex/SpiceyPy | spiceypy/spiceypy.py | et2lst | def et2lst(et, body, lon, typein, timlen=_default_len_out, ampmlen=_default_len_out):
"""
Given an ephemeris epoch, compute the local solar time for
an object on the surface of a body at a specified longitude.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/et2lst_c.html
:param et: Epoch in seconds past J2000 epoch.
:type et: float
:param body: ID-code of the body of interest.
:type body: int
:param lon: Longitude of surface point (RADIANS).
:type lon: float
:param typein: Type of longitude "PLANETOCENTRIC", etc.
:type typein: str
:param timlen: Available room in output time string.
:type timlen: int
:param ampmlen: Available room in output ampm string.
:type ampmlen: int
:return:
Local hour on a "24 hour" clock,
Minutes past the hour,
Seconds past the minute,
String giving local time on 24 hour clock,
String giving time on A.M. / P.M. scale.
:rtype: tuple
"""
et = ctypes.c_double(et)
body = ctypes.c_int(body)
lon = ctypes.c_double(lon)
typein = stypes.stringToCharP(typein)
timlen = ctypes.c_int(timlen)
ampmlen = ctypes.c_int(ampmlen)
hr = ctypes.c_int()
mn = ctypes.c_int()
sc = ctypes.c_int()
time = stypes.stringToCharP(timlen)
ampm = stypes.stringToCharP(ampmlen)
libspice.et2lst_c(et, body, lon, typein, timlen, ampmlen,
ctypes.byref(hr), ctypes.byref(mn), ctypes.byref(sc),
time, ampm)
return hr.value, mn.value, sc.value, stypes.toPythonString(
time), stypes.toPythonString(ampm) | python | def et2lst(et, body, lon, typein, timlen=_default_len_out, ampmlen=_default_len_out):
"""
Given an ephemeris epoch, compute the local solar time for
an object on the surface of a body at a specified longitude.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/et2lst_c.html
:param et: Epoch in seconds past J2000 epoch.
:type et: float
:param body: ID-code of the body of interest.
:type body: int
:param lon: Longitude of surface point (RADIANS).
:type lon: float
:param typein: Type of longitude "PLANETOCENTRIC", etc.
:type typein: str
:param timlen: Available room in output time string.
:type timlen: int
:param ampmlen: Available room in output ampm string.
:type ampmlen: int
:return:
Local hour on a "24 hour" clock,
Minutes past the hour,
Seconds past the minute,
String giving local time on 24 hour clock,
String giving time on A.M. / P.M. scale.
:rtype: tuple
"""
et = ctypes.c_double(et)
body = ctypes.c_int(body)
lon = ctypes.c_double(lon)
typein = stypes.stringToCharP(typein)
timlen = ctypes.c_int(timlen)
ampmlen = ctypes.c_int(ampmlen)
hr = ctypes.c_int()
mn = ctypes.c_int()
sc = ctypes.c_int()
time = stypes.stringToCharP(timlen)
ampm = stypes.stringToCharP(ampmlen)
libspice.et2lst_c(et, body, lon, typein, timlen, ampmlen,
ctypes.byref(hr), ctypes.byref(mn), ctypes.byref(sc),
time, ampm)
return hr.value, mn.value, sc.value, stypes.toPythonString(
time), stypes.toPythonString(ampm) | ['def', 'et2lst', '(', 'et', ',', 'body', ',', 'lon', ',', 'typein', ',', 'timlen', '=', '_default_len_out', ',', 'ampmlen', '=', '_default_len_out', ')', ':', 'et', '=', 'ctypes', '.', 'c_double', '(', 'et', ')', 'body', '=', 'ctypes', '.', 'c_int', '(', 'body', ')', 'lon', '=', 'ctypes', '.', 'c_double', '(', 'lon', ')', 'typein', '=', 'stypes', '.', 'stringToCharP', '(', 'typein', ')', 'timlen', '=', 'ctypes', '.', 'c_int', '(', 'timlen', ')', 'ampmlen', '=', 'ctypes', '.', 'c_int', '(', 'ampmlen', ')', 'hr', '=', 'ctypes', '.', 'c_int', '(', ')', 'mn', '=', 'ctypes', '.', 'c_int', '(', ')', 'sc', '=', 'ctypes', '.', 'c_int', '(', ')', 'time', '=', 'stypes', '.', 'stringToCharP', '(', 'timlen', ')', 'ampm', '=', 'stypes', '.', 'stringToCharP', '(', 'ampmlen', ')', 'libspice', '.', 'et2lst_c', '(', 'et', ',', 'body', ',', 'lon', ',', 'typein', ',', 'timlen', ',', 'ampmlen', ',', 'ctypes', '.', 'byref', '(', 'hr', ')', ',', 'ctypes', '.', 'byref', '(', 'mn', ')', ',', 'ctypes', '.', 'byref', '(', 'sc', ')', ',', 'time', ',', 'ampm', ')', 'return', 'hr', '.', 'value', ',', 'mn', '.', 'value', ',', 'sc', '.', 'value', ',', 'stypes', '.', 'toPythonString', '(', 'time', ')', ',', 'stypes', '.', 'toPythonString', '(', 'ampm', ')'] | Given an ephemeris epoch, compute the local solar time for
an object on the surface of a body at a specified longitude.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/et2lst_c.html
:param et: Epoch in seconds past J2000 epoch.
:type et: float
:param body: ID-code of the body of interest.
:type body: int
:param lon: Longitude of surface point (RADIANS).
:type lon: float
:param typein: Type of longitude "PLANETOCENTRIC", etc.
:type typein: str
:param timlen: Available room in output time string.
:type timlen: int
:param ampmlen: Available room in output ampm string.
:type ampmlen: int
:return:
Local hour on a "24 hour" clock,
Minutes past the hour,
Seconds past the minute,
String giving local time on 24 hour clock,
String giving time on A.M. / P.M. scale.
:rtype: tuple | ['Given', 'an', 'ephemeris', 'epoch', 'compute', 'the', 'local', 'solar', 'time', 'for', 'an', 'object', 'on', 'the', 'surface', 'of', 'a', 'body', 'at', 'a', 'specified', 'longitude', '.'] | train | https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L4984-L5026 |
1,854 | pkgw/pwkit | pwkit/latex.py | latexify | def latexify(obj, **kwargs):
"""Render an object in LaTeX appropriately.
"""
if hasattr(obj, '__pk_latex__'):
return obj.__pk_latex__(**kwargs)
if isinstance(obj, text_type):
from .unicode_to_latex import unicode_to_latex
return unicode_to_latex(obj)
if isinstance(obj, bool):
# isinstance(True, int) = True, so gotta handle this first.
raise ValueError('no well-defined LaTeXification of bool %r' % obj)
if isinstance(obj, float):
nplaces = kwargs.get('nplaces')
if nplaces is None:
return '$%f$' % obj
return '$%.*f$' % (nplaces, obj)
if isinstance(obj, int):
return '$%d$' % obj
if isinstance(obj, binary_type):
if all(c in _printable_ascii for c in obj):
return obj.decode('ascii')
raise ValueError('no safe LaTeXification of binary string %r' % obj)
raise ValueError('can\'t LaTeXify %r' % obj) | python | def latexify(obj, **kwargs):
"""Render an object in LaTeX appropriately.
"""
if hasattr(obj, '__pk_latex__'):
return obj.__pk_latex__(**kwargs)
if isinstance(obj, text_type):
from .unicode_to_latex import unicode_to_latex
return unicode_to_latex(obj)
if isinstance(obj, bool):
# isinstance(True, int) = True, so gotta handle this first.
raise ValueError('no well-defined LaTeXification of bool %r' % obj)
if isinstance(obj, float):
nplaces = kwargs.get('nplaces')
if nplaces is None:
return '$%f$' % obj
return '$%.*f$' % (nplaces, obj)
if isinstance(obj, int):
return '$%d$' % obj
if isinstance(obj, binary_type):
if all(c in _printable_ascii for c in obj):
return obj.decode('ascii')
raise ValueError('no safe LaTeXification of binary string %r' % obj)
raise ValueError('can\'t LaTeXify %r' % obj) | ['def', 'latexify', '(', 'obj', ',', '*', '*', 'kwargs', ')', ':', 'if', 'hasattr', '(', 'obj', ',', "'__pk_latex__'", ')', ':', 'return', 'obj', '.', '__pk_latex__', '(', '*', '*', 'kwargs', ')', 'if', 'isinstance', '(', 'obj', ',', 'text_type', ')', ':', 'from', '.', 'unicode_to_latex', 'import', 'unicode_to_latex', 'return', 'unicode_to_latex', '(', 'obj', ')', 'if', 'isinstance', '(', 'obj', ',', 'bool', ')', ':', '# isinstance(True, int) = True, so gotta handle this first.', 'raise', 'ValueError', '(', "'no well-defined LaTeXification of bool %r'", '%', 'obj', ')', 'if', 'isinstance', '(', 'obj', ',', 'float', ')', ':', 'nplaces', '=', 'kwargs', '.', 'get', '(', "'nplaces'", ')', 'if', 'nplaces', 'is', 'None', ':', 'return', "'$%f$'", '%', 'obj', 'return', "'$%.*f$'", '%', '(', 'nplaces', ',', 'obj', ')', 'if', 'isinstance', '(', 'obj', ',', 'int', ')', ':', 'return', "'$%d$'", '%', 'obj', 'if', 'isinstance', '(', 'obj', ',', 'binary_type', ')', ':', 'if', 'all', '(', 'c', 'in', '_printable_ascii', 'for', 'c', 'in', 'obj', ')', ':', 'return', 'obj', '.', 'decode', '(', "'ascii'", ')', 'raise', 'ValueError', '(', "'no safe LaTeXification of binary string %r'", '%', 'obj', ')', 'raise', 'ValueError', '(', "'can\\'t LaTeXify %r'", '%', 'obj', ')'] | Render an object in LaTeX appropriately. | ['Render', 'an', 'object', 'in', 'LaTeX', 'appropriately', '.'] | train | https://github.com/pkgw/pwkit/blob/d40957a1c3d2ea34e7ceac2267ee9635135f2793/pwkit/latex.py#L145-L174 |
1,855 | saltstack/salt | salt/modules/publish.py | runner | def runner(fun, arg=None, timeout=5):
'''
Execute a runner on the master and return the data from the runner
function
CLI Example:
.. code-block:: bash
salt publish.runner manage.down
'''
arg = _parse_args(arg)
if 'master_uri' not in __opts__:
return 'No access to master. If using salt-call with --local, please remove.'
log.info('Publishing runner \'%s\' to %s', fun, __opts__['master_uri'])
auth = salt.crypt.SAuth(__opts__)
tok = auth.gen_token(b'salt')
load = {'cmd': 'minion_runner',
'fun': fun,
'arg': arg,
'tok': tok,
'tmo': timeout,
'id': __opts__['id'],
'no_parse': __opts__.get('no_parse', [])}
channel = salt.transport.client.ReqChannel.factory(__opts__)
try:
return channel.send(load)
except SaltReqTimeoutError:
return '\'{0}\' runner publish timed out'.format(fun)
finally:
channel.close() | python | def runner(fun, arg=None, timeout=5):
'''
Execute a runner on the master and return the data from the runner
function
CLI Example:
.. code-block:: bash
salt publish.runner manage.down
'''
arg = _parse_args(arg)
if 'master_uri' not in __opts__:
return 'No access to master. If using salt-call with --local, please remove.'
log.info('Publishing runner \'%s\' to %s', fun, __opts__['master_uri'])
auth = salt.crypt.SAuth(__opts__)
tok = auth.gen_token(b'salt')
load = {'cmd': 'minion_runner',
'fun': fun,
'arg': arg,
'tok': tok,
'tmo': timeout,
'id': __opts__['id'],
'no_parse': __opts__.get('no_parse', [])}
channel = salt.transport.client.ReqChannel.factory(__opts__)
try:
return channel.send(load)
except SaltReqTimeoutError:
return '\'{0}\' runner publish timed out'.format(fun)
finally:
channel.close() | ['def', 'runner', '(', 'fun', ',', 'arg', '=', 'None', ',', 'timeout', '=', '5', ')', ':', 'arg', '=', '_parse_args', '(', 'arg', ')', 'if', "'master_uri'", 'not', 'in', '__opts__', ':', 'return', "'No access to master. If using salt-call with --local, please remove.'", 'log', '.', 'info', '(', "'Publishing runner \\'%s\\' to %s'", ',', 'fun', ',', '__opts__', '[', "'master_uri'", ']', ')', 'auth', '=', 'salt', '.', 'crypt', '.', 'SAuth', '(', '__opts__', ')', 'tok', '=', 'auth', '.', 'gen_token', '(', "b'salt'", ')', 'load', '=', '{', "'cmd'", ':', "'minion_runner'", ',', "'fun'", ':', 'fun', ',', "'arg'", ':', 'arg', ',', "'tok'", ':', 'tok', ',', "'tmo'", ':', 'timeout', ',', "'id'", ':', '__opts__', '[', "'id'", ']', ',', "'no_parse'", ':', '__opts__', '.', 'get', '(', "'no_parse'", ',', '[', ']', ')', '}', 'channel', '=', 'salt', '.', 'transport', '.', 'client', '.', 'ReqChannel', '.', 'factory', '(', '__opts__', ')', 'try', ':', 'return', 'channel', '.', 'send', '(', 'load', ')', 'except', 'SaltReqTimeoutError', ':', 'return', "'\\'{0}\\' runner publish timed out'", '.', 'format', '(', 'fun', ')', 'finally', ':', 'channel', '.', 'close', '(', ')'] | Execute a runner on the master and return the data from the runner
function
CLI Example:
.. code-block:: bash
salt publish.runner manage.down | ['Execute', 'a', 'runner', 'on', 'the', 'master', 'and', 'return', 'the', 'data', 'from', 'the', 'runner', 'function'] | train | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/publish.py#L303-L335 |
1,856 | EVEprosper/ProsperCommon | prosper/common/prosper_config.py | read_config | def read_config(
config_filepath,
logger=logging.getLogger('ProsperCommon'),
):
"""fetch and parse config file
Args:
config_filepath (str): path to config file. abspath > relpath
logger (:obj:`logging.Logger`): logger to catch error msgs
"""
config_parser = configparser.ConfigParser(
interpolation=ExtendedInterpolation(),
allow_no_value=True,
delimiters=('='),
inline_comment_prefixes=('#')
)
logger.debug('config_filepath=%s', config_filepath)
with open(config_filepath, 'r') as filehandle:
config_parser.read_file(filehandle)
return config_parser | python | def read_config(
config_filepath,
logger=logging.getLogger('ProsperCommon'),
):
"""fetch and parse config file
Args:
config_filepath (str): path to config file. abspath > relpath
logger (:obj:`logging.Logger`): logger to catch error msgs
"""
config_parser = configparser.ConfigParser(
interpolation=ExtendedInterpolation(),
allow_no_value=True,
delimiters=('='),
inline_comment_prefixes=('#')
)
logger.debug('config_filepath=%s', config_filepath)
with open(config_filepath, 'r') as filehandle:
config_parser.read_file(filehandle)
return config_parser | ['def', 'read_config', '(', 'config_filepath', ',', 'logger', '=', 'logging', '.', 'getLogger', '(', "'ProsperCommon'", ')', ',', ')', ':', 'config_parser', '=', 'configparser', '.', 'ConfigParser', '(', 'interpolation', '=', 'ExtendedInterpolation', '(', ')', ',', 'allow_no_value', '=', 'True', ',', 'delimiters', '=', '(', "'='", ')', ',', 'inline_comment_prefixes', '=', '(', "'#'", ')', ')', 'logger', '.', 'debug', '(', "'config_filepath=%s'", ',', 'config_filepath', ')', 'with', 'open', '(', 'config_filepath', ',', "'r'", ')', 'as', 'filehandle', ':', 'config_parser', '.', 'read_file', '(', 'filehandle', ')', 'return', 'config_parser'] | fetch and parse config file
Args:
config_filepath (str): path to config file. abspath > relpath
logger (:obj:`logging.Logger`): logger to catch error msgs | ['fetch', 'and', 'parse', 'config', 'file'] | train | https://github.com/EVEprosper/ProsperCommon/blob/bcada3b25420099e1f204db8d55eb268e7b4dc27/prosper/common/prosper_config.py#L265-L287 |
1,857 | b3j0f/conf | b3j0f/conf/model/base.py | CompositeModelElement.update | def update(self, other, copy=True, *args, **kwargs):
"""Update this composite model element with other element content.
:param other: element to update with this. Must be the same type of this
or this __contenttype__.
:param bool copy: copy other before updating.
:return: self"""
super(CompositeModelElement, self).update(
other, copy=copy, *args, **kwargs
)
if other: # dirty hack for python2.6
contents = []
if isinstance(other, self.__class__):
contents = list(other.values())
elif isinstance(other, self.__contenttype__):
contents = [other]
else:
raise TypeError(
'Wrong element to update with {0}: {1}'.format(self, other)
)
for content in contents:
selfcontent = self.get(content.name)
if selfcontent is None:
if copy:
content = content.copy(local=False)
self[content.name] = content
else:
selfcontent.update(content, copy=copy, *args, **kwargs)
return self | python | def update(self, other, copy=True, *args, **kwargs):
"""Update this composite model element with other element content.
:param other: element to update with this. Must be the same type of this
or this __contenttype__.
:param bool copy: copy other before updating.
:return: self"""
super(CompositeModelElement, self).update(
other, copy=copy, *args, **kwargs
)
if other: # dirty hack for python2.6
contents = []
if isinstance(other, self.__class__):
contents = list(other.values())
elif isinstance(other, self.__contenttype__):
contents = [other]
else:
raise TypeError(
'Wrong element to update with {0}: {1}'.format(self, other)
)
for content in contents:
selfcontent = self.get(content.name)
if selfcontent is None:
if copy:
content = content.copy(local=False)
self[content.name] = content
else:
selfcontent.update(content, copy=copy, *args, **kwargs)
return self | ['def', 'update', '(', 'self', ',', 'other', ',', 'copy', '=', 'True', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'super', '(', 'CompositeModelElement', ',', 'self', ')', '.', 'update', '(', 'other', ',', 'copy', '=', 'copy', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', 'if', 'other', ':', '# dirty hack for python2.6', 'contents', '=', '[', ']', 'if', 'isinstance', '(', 'other', ',', 'self', '.', '__class__', ')', ':', 'contents', '=', 'list', '(', 'other', '.', 'values', '(', ')', ')', 'elif', 'isinstance', '(', 'other', ',', 'self', '.', '__contenttype__', ')', ':', 'contents', '=', '[', 'other', ']', 'else', ':', 'raise', 'TypeError', '(', "'Wrong element to update with {0}: {1}'", '.', 'format', '(', 'self', ',', 'other', ')', ')', 'for', 'content', 'in', 'contents', ':', 'selfcontent', '=', 'self', '.', 'get', '(', 'content', '.', 'name', ')', 'if', 'selfcontent', 'is', 'None', ':', 'if', 'copy', ':', 'content', '=', 'content', '.', 'copy', '(', 'local', '=', 'False', ')', 'self', '[', 'content', '.', 'name', ']', '=', 'content', 'else', ':', 'selfcontent', '.', 'update', '(', 'content', ',', 'copy', '=', 'copy', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', 'return', 'self'] | Update this composite model element with other element content.
:param other: element to update with this. Must be the same type of this
or this __contenttype__.
:param bool copy: copy other before updating.
:return: self | ['Update', 'this', 'composite', 'model', 'element', 'with', 'other', 'element', 'content', '.'] | train | https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/model/base.py#L253-L292 |
1,858 | eqcorrscan/EQcorrscan | eqcorrscan/core/lag_calc.py | _channel_loop | def _channel_loop(detection, template, min_cc, detection_id, interpolate, i,
pre_lag_ccsum=None, detect_chans=0,
horizontal_chans=['E', 'N', '1', '2'], vertical_chans=['Z'],
debug=0):
"""
Inner loop for correlating and assigning picks.
Utility function to take a stream of data for the detected event and write
maximum correlation to absolute time as picks in an obspy.core.event.Event
object.
Only outputs picks for picks above min_cc.
:type detection: obspy.core.stream.Stream
:param detection:
Stream of data for the slave event detected using template.
:type template: obspy.core.stream.Stream
:param template: Stream of data as the template for the detection.
:type min_cc: float
:param min_cc: Minimum cross-correlation value to allow a pick to be made.
:type detection_id: str
:param detection_id: Detection ID to associate the event with.
:type interpolate: bool
:param interpolate:
Interpolate the correlation function to achieve sub-sample precision.
:type i: int
:param i:
Used to track which process has occurred when running in parallel.
:type pre_lag_ccsum: float
:param pre_lag_ccsum:
Cross-correlation sum before lag-calc, will check that the
cross-correlation sum is increased by lag-calc (using all channels,
ignoring min_cc)
:type detect_chans: int
:param detect_chans:
Number of channels originally used in detections, must match the number
used here to allow for cccsum checking.
:type horizontal_chans: list
:param horizontal_chans:
List of channel endings for horizontal-channels, on which S-picks will
be made.
:type vertical_chans: list
:param vertical_chans:
List of channel endings for vertical-channels, on which P-picks will
be made.
:type debug: int
:param debug: Debug output level 0-5.
:returns:
Event object containing network, station, channel and pick information.
:rtype: :class:`obspy.core.event.Event`
"""
from eqcorrscan.core.match_filter import normxcorr2
import math
event = Event()
s_stachans = {}
cccsum = 0
checksum = 0
used_chans = 0
for tr in template:
temp_net = tr.stats.network
temp_sta = tr.stats.station
temp_chan = tr.stats.channel
debug_print('Working on: %s.%s.%s' % (temp_net, temp_sta, temp_chan),
3, debug)
image = detection.select(station=temp_sta, channel=temp_chan)
if len(image) == 0 or sum(image[0].data) == 0:
print('No match in image.')
continue
if interpolate:
try:
ccc = normxcorr2(tr.data, image[0].data)
except Exception:
print('Could not calculate cc')
print('Image is %i long' % len(image[0].data))
print('Template is %i long' % len(tr.data))
continue
try:
shift, cc_max = _xcorr_interp(ccc=ccc, dt=image[0].stats.delta)
except IndexError:
print('Could not interpolate ccc, not smooth')
ccc = normxcorr2(tr.data, image[0].data)
cc_max = np.amax(ccc)
shift = np.argmax(ccc) * image[0].stats.delta
# Convert the maximum cross-correlation time to an actual time
if math.isnan(cc_max):
print('Problematic trace, no cross correlation possible')
continue
else:
picktime = image[0].stats.starttime + shift
else:
# Convert the maximum cross-correlation time to an actual time
try:
ccc = normxcorr2(tr.data, image[0].data)
except Exception:
print('Could not calculate cc')
print('Image is %i long' % len(image[0].data))
print('Template is %i long' % len(tr.data))
continue
cc_max = np.amax(ccc)
if math.isnan(cc_max):
print('Problematic trace, no cross correlation possible')
continue
else:
picktime = image[0].stats.starttime + (
np.argmax(ccc) * image[0].stats.delta)
debug_print('Maximum cross-corr=%s' % cc_max, 3, debug)
checksum += cc_max
used_chans += 1
if cc_max < min_cc:
debug_print('Correlation below threshold, not used', 3, debug)
continue
cccsum += cc_max
# Perhaps weight each pick by the cc val or cc val^2?
# weight = np.amax(ccc) ** 2
if temp_chan[-1] in vertical_chans:
phase = 'P'
# Only take the S-pick with the best correlation
elif temp_chan[-1] in horizontal_chans:
phase = 'S'
debug_print('Making S-pick on: %s.%s.%s' %
(temp_net, temp_sta, temp_chan), 4, debug)
if temp_sta not in s_stachans.keys():
s_stachans[temp_sta] = ((temp_chan, np.amax(ccc),
picktime))
elif temp_sta in s_stachans.keys():
if np.amax(ccc) > s_stachans[temp_sta][1]:
picktime = picktime
else:
continue
else:
phase = None
_waveform_id = WaveformStreamID(
network_code=temp_net, station_code=temp_sta,
channel_code=temp_chan)
event.picks.append(Pick(
waveform_id=_waveform_id, time=picktime,
method_id=ResourceIdentifier('EQcorrscan'), phase_hint=phase,
creation_info='eqcorrscan.core.lag_calc',
evaluation_mode='automatic',
comments=[Comment(text='cc_max=%s' % cc_max)]))
event.resource_id = detection_id
ccc_str = ("detect_val=%s" % cccsum)
event.comments.append(Comment(text=ccc_str))
if used_chans == detect_chans:
if pre_lag_ccsum is not None and\
checksum - pre_lag_ccsum < -(0.3 * pre_lag_ccsum):
msg = ('lag-calc has decreased cccsum from %f to %f - '
% (pre_lag_ccsum, checksum))
raise LagCalcError(msg)
else:
warnings.warn('Cannot check if cccsum is better, used %i channels '
'for detection, but %i are used here'
% (detect_chans, used_chans))
return i, event | python | def _channel_loop(detection, template, min_cc, detection_id, interpolate, i,
pre_lag_ccsum=None, detect_chans=0,
horizontal_chans=['E', 'N', '1', '2'], vertical_chans=['Z'],
debug=0):
"""
Inner loop for correlating and assigning picks.
Utility function to take a stream of data for the detected event and write
maximum correlation to absolute time as picks in an obspy.core.event.Event
object.
Only outputs picks for picks above min_cc.
:type detection: obspy.core.stream.Stream
:param detection:
Stream of data for the slave event detected using template.
:type template: obspy.core.stream.Stream
:param template: Stream of data as the template for the detection.
:type min_cc: float
:param min_cc: Minimum cross-correlation value to allow a pick to be made.
:type detection_id: str
:param detection_id: Detection ID to associate the event with.
:type interpolate: bool
:param interpolate:
Interpolate the correlation function to achieve sub-sample precision.
:type i: int
:param i:
Used to track which process has occurred when running in parallel.
:type pre_lag_ccsum: float
:param pre_lag_ccsum:
Cross-correlation sum before lag-calc, will check that the
cross-correlation sum is increased by lag-calc (using all channels,
ignoring min_cc)
:type detect_chans: int
:param detect_chans:
Number of channels originally used in detections, must match the number
used here to allow for cccsum checking.
:type horizontal_chans: list
:param horizontal_chans:
List of channel endings for horizontal-channels, on which S-picks will
be made.
:type vertical_chans: list
:param vertical_chans:
List of channel endings for vertical-channels, on which P-picks will
be made.
:type debug: int
:param debug: Debug output level 0-5.
:returns:
Event object containing network, station, channel and pick information.
:rtype: :class:`obspy.core.event.Event`
"""
from eqcorrscan.core.match_filter import normxcorr2
import math
event = Event()
s_stachans = {}
cccsum = 0
checksum = 0
used_chans = 0
for tr in template:
temp_net = tr.stats.network
temp_sta = tr.stats.station
temp_chan = tr.stats.channel
debug_print('Working on: %s.%s.%s' % (temp_net, temp_sta, temp_chan),
3, debug)
image = detection.select(station=temp_sta, channel=temp_chan)
if len(image) == 0 or sum(image[0].data) == 0:
print('No match in image.')
continue
if interpolate:
try:
ccc = normxcorr2(tr.data, image[0].data)
except Exception:
print('Could not calculate cc')
print('Image is %i long' % len(image[0].data))
print('Template is %i long' % len(tr.data))
continue
try:
shift, cc_max = _xcorr_interp(ccc=ccc, dt=image[0].stats.delta)
except IndexError:
print('Could not interpolate ccc, not smooth')
ccc = normxcorr2(tr.data, image[0].data)
cc_max = np.amax(ccc)
shift = np.argmax(ccc) * image[0].stats.delta
# Convert the maximum cross-correlation time to an actual time
if math.isnan(cc_max):
print('Problematic trace, no cross correlation possible')
continue
else:
picktime = image[0].stats.starttime + shift
else:
# Convert the maximum cross-correlation time to an actual time
try:
ccc = normxcorr2(tr.data, image[0].data)
except Exception:
print('Could not calculate cc')
print('Image is %i long' % len(image[0].data))
print('Template is %i long' % len(tr.data))
continue
cc_max = np.amax(ccc)
if math.isnan(cc_max):
print('Problematic trace, no cross correlation possible')
continue
else:
picktime = image[0].stats.starttime + (
np.argmax(ccc) * image[0].stats.delta)
debug_print('Maximum cross-corr=%s' % cc_max, 3, debug)
checksum += cc_max
used_chans += 1
if cc_max < min_cc:
debug_print('Correlation below threshold, not used', 3, debug)
continue
cccsum += cc_max
# Perhaps weight each pick by the cc val or cc val^2?
# weight = np.amax(ccc) ** 2
if temp_chan[-1] in vertical_chans:
phase = 'P'
# Only take the S-pick with the best correlation
elif temp_chan[-1] in horizontal_chans:
phase = 'S'
debug_print('Making S-pick on: %s.%s.%s' %
(temp_net, temp_sta, temp_chan), 4, debug)
if temp_sta not in s_stachans.keys():
s_stachans[temp_sta] = ((temp_chan, np.amax(ccc),
picktime))
elif temp_sta in s_stachans.keys():
if np.amax(ccc) > s_stachans[temp_sta][1]:
picktime = picktime
else:
continue
else:
phase = None
_waveform_id = WaveformStreamID(
network_code=temp_net, station_code=temp_sta,
channel_code=temp_chan)
event.picks.append(Pick(
waveform_id=_waveform_id, time=picktime,
method_id=ResourceIdentifier('EQcorrscan'), phase_hint=phase,
creation_info='eqcorrscan.core.lag_calc',
evaluation_mode='automatic',
comments=[Comment(text='cc_max=%s' % cc_max)]))
event.resource_id = detection_id
ccc_str = ("detect_val=%s" % cccsum)
event.comments.append(Comment(text=ccc_str))
if used_chans == detect_chans:
if pre_lag_ccsum is not None and\
checksum - pre_lag_ccsum < -(0.3 * pre_lag_ccsum):
msg = ('lag-calc has decreased cccsum from %f to %f - '
% (pre_lag_ccsum, checksum))
raise LagCalcError(msg)
else:
warnings.warn('Cannot check if cccsum is better, used %i channels '
'for detection, but %i are used here'
% (detect_chans, used_chans))
return i, event | ['def', '_channel_loop', '(', 'detection', ',', 'template', ',', 'min_cc', ',', 'detection_id', ',', 'interpolate', ',', 'i', ',', 'pre_lag_ccsum', '=', 'None', ',', 'detect_chans', '=', '0', ',', 'horizontal_chans', '=', '[', "'E'", ',', "'N'", ',', "'1'", ',', "'2'", ']', ',', 'vertical_chans', '=', '[', "'Z'", ']', ',', 'debug', '=', '0', ')', ':', 'from', 'eqcorrscan', '.', 'core', '.', 'match_filter', 'import', 'normxcorr2', 'import', 'math', 'event', '=', 'Event', '(', ')', 's_stachans', '=', '{', '}', 'cccsum', '=', '0', 'checksum', '=', '0', 'used_chans', '=', '0', 'for', 'tr', 'in', 'template', ':', 'temp_net', '=', 'tr', '.', 'stats', '.', 'network', 'temp_sta', '=', 'tr', '.', 'stats', '.', 'station', 'temp_chan', '=', 'tr', '.', 'stats', '.', 'channel', 'debug_print', '(', "'Working on: %s.%s.%s'", '%', '(', 'temp_net', ',', 'temp_sta', ',', 'temp_chan', ')', ',', '3', ',', 'debug', ')', 'image', '=', 'detection', '.', 'select', '(', 'station', '=', 'temp_sta', ',', 'channel', '=', 'temp_chan', ')', 'if', 'len', '(', 'image', ')', '==', '0', 'or', 'sum', '(', 'image', '[', '0', ']', '.', 'data', ')', '==', '0', ':', 'print', '(', "'No match in image.'", ')', 'continue', 'if', 'interpolate', ':', 'try', ':', 'ccc', '=', 'normxcorr2', '(', 'tr', '.', 'data', ',', 'image', '[', '0', ']', '.', 'data', ')', 'except', 'Exception', ':', 'print', '(', "'Could not calculate cc'", ')', 'print', '(', "'Image is %i long'", '%', 'len', '(', 'image', '[', '0', ']', '.', 'data', ')', ')', 'print', '(', "'Template is %i long'", '%', 'len', '(', 'tr', '.', 'data', ')', ')', 'continue', 'try', ':', 'shift', ',', 'cc_max', '=', '_xcorr_interp', '(', 'ccc', '=', 'ccc', ',', 'dt', '=', 'image', '[', '0', ']', '.', 'stats', '.', 'delta', ')', 'except', 'IndexError', ':', 'print', '(', "'Could not interpolate ccc, not smooth'", ')', 'ccc', '=', 'normxcorr2', '(', 'tr', '.', 'data', ',', 'image', '[', '0', ']', '.', 'data', ')', 'cc_max', '=', 'np', '.', 'amax', '(', 'ccc', ')', 'shift', '=', 'np', '.', 'argmax', '(', 'ccc', ')', '*', 'image', '[', '0', ']', '.', 'stats', '.', 'delta', '# Convert the maximum cross-correlation time to an actual time', 'if', 'math', '.', 'isnan', '(', 'cc_max', ')', ':', 'print', '(', "'Problematic trace, no cross correlation possible'", ')', 'continue', 'else', ':', 'picktime', '=', 'image', '[', '0', ']', '.', 'stats', '.', 'starttime', '+', 'shift', 'else', ':', '# Convert the maximum cross-correlation time to an actual time', 'try', ':', 'ccc', '=', 'normxcorr2', '(', 'tr', '.', 'data', ',', 'image', '[', '0', ']', '.', 'data', ')', 'except', 'Exception', ':', 'print', '(', "'Could not calculate cc'", ')', 'print', '(', "'Image is %i long'", '%', 'len', '(', 'image', '[', '0', ']', '.', 'data', ')', ')', 'print', '(', "'Template is %i long'", '%', 'len', '(', 'tr', '.', 'data', ')', ')', 'continue', 'cc_max', '=', 'np', '.', 'amax', '(', 'ccc', ')', 'if', 'math', '.', 'isnan', '(', 'cc_max', ')', ':', 'print', '(', "'Problematic trace, no cross correlation possible'", ')', 'continue', 'else', ':', 'picktime', '=', 'image', '[', '0', ']', '.', 'stats', '.', 'starttime', '+', '(', 'np', '.', 'argmax', '(', 'ccc', ')', '*', 'image', '[', '0', ']', '.', 'stats', '.', 'delta', ')', 'debug_print', '(', "'Maximum cross-corr=%s'", '%', 'cc_max', ',', '3', ',', 'debug', ')', 'checksum', '+=', 'cc_max', 'used_chans', '+=', '1', 'if', 'cc_max', '<', 'min_cc', ':', 'debug_print', '(', "'Correlation below threshold, not used'", ',', '3', ',', 'debug', ')', 'continue', 'cccsum', '+=', 'cc_max', '# Perhaps weight each pick by the cc val or cc val^2?', '# weight = np.amax(ccc) ** 2', 'if', 'temp_chan', '[', '-', '1', ']', 'in', 'vertical_chans', ':', 'phase', '=', "'P'", '# Only take the S-pick with the best correlation', 'elif', 'temp_chan', '[', '-', '1', ']', 'in', 'horizontal_chans', ':', 'phase', '=', "'S'", 'debug_print', '(', "'Making S-pick on: %s.%s.%s'", '%', '(', 'temp_net', ',', 'temp_sta', ',', 'temp_chan', ')', ',', '4', ',', 'debug', ')', 'if', 'temp_sta', 'not', 'in', 's_stachans', '.', 'keys', '(', ')', ':', 's_stachans', '[', 'temp_sta', ']', '=', '(', '(', 'temp_chan', ',', 'np', '.', 'amax', '(', 'ccc', ')', ',', 'picktime', ')', ')', 'elif', 'temp_sta', 'in', 's_stachans', '.', 'keys', '(', ')', ':', 'if', 'np', '.', 'amax', '(', 'ccc', ')', '>', 's_stachans', '[', 'temp_sta', ']', '[', '1', ']', ':', 'picktime', '=', 'picktime', 'else', ':', 'continue', 'else', ':', 'phase', '=', 'None', '_waveform_id', '=', 'WaveformStreamID', '(', 'network_code', '=', 'temp_net', ',', 'station_code', '=', 'temp_sta', ',', 'channel_code', '=', 'temp_chan', ')', 'event', '.', 'picks', '.', 'append', '(', 'Pick', '(', 'waveform_id', '=', '_waveform_id', ',', 'time', '=', 'picktime', ',', 'method_id', '=', 'ResourceIdentifier', '(', "'EQcorrscan'", ')', ',', 'phase_hint', '=', 'phase', ',', 'creation_info', '=', "'eqcorrscan.core.lag_calc'", ',', 'evaluation_mode', '=', "'automatic'", ',', 'comments', '=', '[', 'Comment', '(', 'text', '=', "'cc_max=%s'", '%', 'cc_max', ')', ']', ')', ')', 'event', '.', 'resource_id', '=', 'detection_id', 'ccc_str', '=', '(', '"detect_val=%s"', '%', 'cccsum', ')', 'event', '.', 'comments', '.', 'append', '(', 'Comment', '(', 'text', '=', 'ccc_str', ')', ')', 'if', 'used_chans', '==', 'detect_chans', ':', 'if', 'pre_lag_ccsum', 'is', 'not', 'None', 'and', 'checksum', '-', 'pre_lag_ccsum', '<', '-', '(', '0.3', '*', 'pre_lag_ccsum', ')', ':', 'msg', '=', '(', "'lag-calc has decreased cccsum from %f to %f - '", '%', '(', 'pre_lag_ccsum', ',', 'checksum', ')', ')', 'raise', 'LagCalcError', '(', 'msg', ')', 'else', ':', 'warnings', '.', 'warn', '(', "'Cannot check if cccsum is better, used %i channels '", "'for detection, but %i are used here'", '%', '(', 'detect_chans', ',', 'used_chans', ')', ')', 'return', 'i', ',', 'event'] | Inner loop for correlating and assigning picks.
Utility function to take a stream of data for the detected event and write
maximum correlation to absolute time as picks in an obspy.core.event.Event
object.
Only outputs picks for picks above min_cc.
:type detection: obspy.core.stream.Stream
:param detection:
Stream of data for the slave event detected using template.
:type template: obspy.core.stream.Stream
:param template: Stream of data as the template for the detection.
:type min_cc: float
:param min_cc: Minimum cross-correlation value to allow a pick to be made.
:type detection_id: str
:param detection_id: Detection ID to associate the event with.
:type interpolate: bool
:param interpolate:
Interpolate the correlation function to achieve sub-sample precision.
:type i: int
:param i:
Used to track which process has occurred when running in parallel.
:type pre_lag_ccsum: float
:param pre_lag_ccsum:
Cross-correlation sum before lag-calc, will check that the
cross-correlation sum is increased by lag-calc (using all channels,
ignoring min_cc)
:type detect_chans: int
:param detect_chans:
Number of channels originally used in detections, must match the number
used here to allow for cccsum checking.
:type horizontal_chans: list
:param horizontal_chans:
List of channel endings for horizontal-channels, on which S-picks will
be made.
:type vertical_chans: list
:param vertical_chans:
List of channel endings for vertical-channels, on which P-picks will
be made.
:type debug: int
:param debug: Debug output level 0-5.
:returns:
Event object containing network, station, channel and pick information.
:rtype: :class:`obspy.core.event.Event` | ['Inner', 'loop', 'for', 'correlating', 'and', 'assigning', 'picks', '.'] | train | https://github.com/eqcorrscan/EQcorrscan/blob/3121b4aca801ee5d38f56ca297ce1c0f9515d9ff/eqcorrscan/core/lag_calc.py#L101-L254 |
1,859 | undertherain/pycontextfree | contextfree/shapes.py | triangle | def triangle(rad=0.5):
"""Draw a triangle"""
# half_height = math.sqrt(3) * side / 6
# half_height = side / 2
ctx = _state["ctx"]
side = 3 * rad / math.sqrt(3)
ctx.move_to(0, -rad / 2)
ctx.line_to(-side / 2, -rad / 2)
ctx.line_to(0, rad)
ctx.line_to(side / 2, -rad / 2)
ctx.close_path()
ctx.fill() | python | def triangle(rad=0.5):
"""Draw a triangle"""
# half_height = math.sqrt(3) * side / 6
# half_height = side / 2
ctx = _state["ctx"]
side = 3 * rad / math.sqrt(3)
ctx.move_to(0, -rad / 2)
ctx.line_to(-side / 2, -rad / 2)
ctx.line_to(0, rad)
ctx.line_to(side / 2, -rad / 2)
ctx.close_path()
ctx.fill() | ['def', 'triangle', '(', 'rad', '=', '0.5', ')', ':', '# half_height = math.sqrt(3) * side / 6', '# half_height = side / 2', 'ctx', '=', '_state', '[', '"ctx"', ']', 'side', '=', '3', '*', 'rad', '/', 'math', '.', 'sqrt', '(', '3', ')', 'ctx', '.', 'move_to', '(', '0', ',', '-', 'rad', '/', '2', ')', 'ctx', '.', 'line_to', '(', '-', 'side', '/', '2', ',', '-', 'rad', '/', '2', ')', 'ctx', '.', 'line_to', '(', '0', ',', 'rad', ')', 'ctx', '.', 'line_to', '(', 'side', '/', '2', ',', '-', 'rad', '/', '2', ')', 'ctx', '.', 'close_path', '(', ')', 'ctx', '.', 'fill', '(', ')'] | Draw a triangle | ['Draw', 'a', 'triangle'] | train | https://github.com/undertherain/pycontextfree/blob/91505e978f6034863747c98d919ac11b029b1ac3/contextfree/shapes.py#L28-L39 |
1,860 | inveniosoftware-contrib/invenio-classifier | invenio_classifier/engine.py | _get_singlekws | def _get_singlekws(skw_matches, spires=False):
"""Get single keywords.
:var skw_matches: dict of {keyword: [info,...]}
:keyword spires: bool, to get the spires output
:return: list of formatted keywords
"""
output = {}
for single_keyword, info in skw_matches:
output[single_keyword.output(spires)] = len(info[0])
output = [{'keyword': key, 'number': value}
for key, value in output.iteritems()]
return sorted(output, key=lambda x: x['number'], reverse=True) | python | def _get_singlekws(skw_matches, spires=False):
"""Get single keywords.
:var skw_matches: dict of {keyword: [info,...]}
:keyword spires: bool, to get the spires output
:return: list of formatted keywords
"""
output = {}
for single_keyword, info in skw_matches:
output[single_keyword.output(spires)] = len(info[0])
output = [{'keyword': key, 'number': value}
for key, value in output.iteritems()]
return sorted(output, key=lambda x: x['number'], reverse=True) | ['def', '_get_singlekws', '(', 'skw_matches', ',', 'spires', '=', 'False', ')', ':', 'output', '=', '{', '}', 'for', 'single_keyword', ',', 'info', 'in', 'skw_matches', ':', 'output', '[', 'single_keyword', '.', 'output', '(', 'spires', ')', ']', '=', 'len', '(', 'info', '[', '0', ']', ')', 'output', '=', '[', '{', "'keyword'", ':', 'key', ',', "'number'", ':', 'value', '}', 'for', 'key', ',', 'value', 'in', 'output', '.', 'iteritems', '(', ')', ']', 'return', 'sorted', '(', 'output', ',', 'key', '=', 'lambda', 'x', ':', 'x', '[', "'number'", ']', ',', 'reverse', '=', 'True', ')'] | Get single keywords.
:var skw_matches: dict of {keyword: [info,...]}
:keyword spires: bool, to get the spires output
:return: list of formatted keywords | ['Get', 'single', 'keywords', '.'] | train | https://github.com/inveniosoftware-contrib/invenio-classifier/blob/3c758cf34dca6bf0548e7da5de34e5f72e3b255e/invenio_classifier/engine.py#L348-L360 |
1,861 | tornadoweb/tornado | tornado/web.py | RequestHandler.render_linked_css | def render_linked_css(self, css_files: Iterable[str]) -> str:
"""Default method used to render the final css links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
paths = []
unique_paths = set() # type: Set[str]
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
return "".join(
'<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths
) | python | def render_linked_css(self, css_files: Iterable[str]) -> str:
"""Default method used to render the final css links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
paths = []
unique_paths = set() # type: Set[str]
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
return "".join(
'<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths
) | ['def', 'render_linked_css', '(', 'self', ',', 'css_files', ':', 'Iterable', '[', 'str', ']', ')', '->', 'str', ':', 'paths', '=', '[', ']', 'unique_paths', '=', 'set', '(', ')', '# type: Set[str]', 'for', 'path', 'in', 'css_files', ':', 'if', 'not', 'is_absolute', '(', 'path', ')', ':', 'path', '=', 'self', '.', 'static_url', '(', 'path', ')', 'if', 'path', 'not', 'in', 'unique_paths', ':', 'paths', '.', 'append', '(', 'path', ')', 'unique_paths', '.', 'add', '(', 'path', ')', 'return', '""', '.', 'join', '(', '\'<link href="\'', '+', 'escape', '.', 'xhtml_escape', '(', 'p', ')', '+', '\'" \'', '\'type="text/css" rel="stylesheet"/>\'', 'for', 'p', 'in', 'paths', ')'] | Default method used to render the final css links for the
rendered webpage.
Override this method in a sub-classed controller to change the output. | ['Default', 'method', 'used', 'to', 'render', 'the', 'final', 'css', 'links', 'for', 'the', 'rendered', 'webpage', '.'] | train | https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/web.py#L951-L971 |
1,862 | AndrewAnnex/SpiceyPy | spiceypy/spiceypy.py | uddc | def uddc(udfunc, x, dx):
"""
SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool
"""
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
isdescr = ctypes.c_int()
libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr))
return bool(isdescr.value) | python | def uddc(udfunc, x, dx):
"""
SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool
"""
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
isdescr = ctypes.c_int()
libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr))
return bool(isdescr.value) | ['def', 'uddc', '(', 'udfunc', ',', 'x', ',', 'dx', ')', ':', 'x', '=', 'ctypes', '.', 'c_double', '(', 'x', ')', 'dx', '=', 'ctypes', '.', 'c_double', '(', 'dx', ')', 'isdescr', '=', 'ctypes', '.', 'c_int', '(', ')', 'libspice', '.', 'uddc_c', '(', 'udfunc', ',', 'x', ',', 'dx', ',', 'ctypes', '.', 'byref', '(', 'isdescr', ')', ')', 'return', 'bool', '(', 'isdescr', '.', 'value', ')'] | SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool | ['SPICE', 'private', 'routine', 'intended', 'solely', 'for', 'the', 'support', 'of', 'SPICE', 'routines', '.', 'Users', 'should', 'not', 'call', 'this', 'routine', 'directly', 'due', 'to', 'the', 'volatile', 'nature', 'of', 'this', 'routine', '.'] | train | https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L14221-L14257 |
1,863 | delph-in/pydelphin | delphin/mrs/query.py | select_nodeids | def select_nodeids(xmrs, iv=None, label=None, pred=None):
"""
Return the list of matching nodeids in *xmrs*.
Nodeids in *xmrs* match if their corresponding
:class:`~delphin.mrs.components.ElementaryPredication` object
matches its `intrinsic_variable` to *iv*, `label` to *label*,
and `pred` to *pred*. The *iv*, *label*, and *pred* filters are
ignored if they are `None`.
Args:
xmrs (:class:`~delphin.mrs.xmrs.Xmrs`): semantic structure to
query
iv (str, optional): intrinsic variable to match
label (str, optional): label to match
pred (str, :class:`~delphin.mrs.components.Pred`, optional):
predicate to match
Returns:
list: matching nodeids
"""
def datamatch(nid):
ep = xmrs.ep(nid)
return ((iv is None or ep.iv == iv) and
(pred is None or ep.pred == pred) and
(label is None or ep.label == label))
return list(filter(datamatch, xmrs.nodeids())) | python | def select_nodeids(xmrs, iv=None, label=None, pred=None):
"""
Return the list of matching nodeids in *xmrs*.
Nodeids in *xmrs* match if their corresponding
:class:`~delphin.mrs.components.ElementaryPredication` object
matches its `intrinsic_variable` to *iv*, `label` to *label*,
and `pred` to *pred*. The *iv*, *label*, and *pred* filters are
ignored if they are `None`.
Args:
xmrs (:class:`~delphin.mrs.xmrs.Xmrs`): semantic structure to
query
iv (str, optional): intrinsic variable to match
label (str, optional): label to match
pred (str, :class:`~delphin.mrs.components.Pred`, optional):
predicate to match
Returns:
list: matching nodeids
"""
def datamatch(nid):
ep = xmrs.ep(nid)
return ((iv is None or ep.iv == iv) and
(pred is None or ep.pred == pred) and
(label is None or ep.label == label))
return list(filter(datamatch, xmrs.nodeids())) | ['def', 'select_nodeids', '(', 'xmrs', ',', 'iv', '=', 'None', ',', 'label', '=', 'None', ',', 'pred', '=', 'None', ')', ':', 'def', 'datamatch', '(', 'nid', ')', ':', 'ep', '=', 'xmrs', '.', 'ep', '(', 'nid', ')', 'return', '(', '(', 'iv', 'is', 'None', 'or', 'ep', '.', 'iv', '==', 'iv', ')', 'and', '(', 'pred', 'is', 'None', 'or', 'ep', '.', 'pred', '==', 'pred', ')', 'and', '(', 'label', 'is', 'None', 'or', 'ep', '.', 'label', '==', 'label', ')', ')', 'return', 'list', '(', 'filter', '(', 'datamatch', ',', 'xmrs', '.', 'nodeids', '(', ')', ')', ')'] | Return the list of matching nodeids in *xmrs*.
Nodeids in *xmrs* match if their corresponding
:class:`~delphin.mrs.components.ElementaryPredication` object
matches its `intrinsic_variable` to *iv*, `label` to *label*,
and `pred` to *pred*. The *iv*, *label*, and *pred* filters are
ignored if they are `None`.
Args:
xmrs (:class:`~delphin.mrs.xmrs.Xmrs`): semantic structure to
query
iv (str, optional): intrinsic variable to match
label (str, optional): label to match
pred (str, :class:`~delphin.mrs.components.Pred`, optional):
predicate to match
Returns:
list: matching nodeids | ['Return', 'the', 'list', 'of', 'matching', 'nodeids', 'in', '*', 'xmrs', '*', '.'] | train | https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/query.py#L12-L37 |
1,864 | emilmont/pyStatParser | stat_parser/eval_parser.py | TreeOperations._convert_to_spans | def _convert_to_spans(self, tree, start, set, parent = None):
"Convert a tree into spans (X, i, j) and add to a set."
if len(tree) == 3:
# Binary rule.
# Remove unary collapsing.
current = self._remove_vertical_markovization(tree[0]).split("+")
split = self._convert_to_spans(tree[1], start, set, None)
end = self._convert_to_spans(tree[2], split + 1, set, current[-1])
# Add phrases to set
if current[0] != parent:
set.add((current[0], start, end))
for nt in current[1:]:
set.add((nt, start, end))
return end
elif len(tree) == 2:
# Unary rule.
# Can have a constituent if it is collapsed.
current = self._remove_vertical_markovization(tree[0]).split("+")
for nt in current[:-1]:
set.add((nt, start, start))
return start | python | def _convert_to_spans(self, tree, start, set, parent = None):
"Convert a tree into spans (X, i, j) and add to a set."
if len(tree) == 3:
# Binary rule.
# Remove unary collapsing.
current = self._remove_vertical_markovization(tree[0]).split("+")
split = self._convert_to_spans(tree[1], start, set, None)
end = self._convert_to_spans(tree[2], split + 1, set, current[-1])
# Add phrases to set
if current[0] != parent:
set.add((current[0], start, end))
for nt in current[1:]:
set.add((nt, start, end))
return end
elif len(tree) == 2:
# Unary rule.
# Can have a constituent if it is collapsed.
current = self._remove_vertical_markovization(tree[0]).split("+")
for nt in current[:-1]:
set.add((nt, start, start))
return start | ['def', '_convert_to_spans', '(', 'self', ',', 'tree', ',', 'start', ',', 'set', ',', 'parent', '=', 'None', ')', ':', 'if', 'len', '(', 'tree', ')', '==', '3', ':', '# Binary rule.', '# Remove unary collapsing.', 'current', '=', 'self', '.', '_remove_vertical_markovization', '(', 'tree', '[', '0', ']', ')', '.', 'split', '(', '"+"', ')', 'split', '=', 'self', '.', '_convert_to_spans', '(', 'tree', '[', '1', ']', ',', 'start', ',', 'set', ',', 'None', ')', 'end', '=', 'self', '.', '_convert_to_spans', '(', 'tree', '[', '2', ']', ',', 'split', '+', '1', ',', 'set', ',', 'current', '[', '-', '1', ']', ')', '# Add phrases to set', 'if', 'current', '[', '0', ']', '!=', 'parent', ':', 'set', '.', 'add', '(', '(', 'current', '[', '0', ']', ',', 'start', ',', 'end', ')', ')', 'for', 'nt', 'in', 'current', '[', '1', ':', ']', ':', 'set', '.', 'add', '(', '(', 'nt', ',', 'start', ',', 'end', ')', ')', 'return', 'end', 'elif', 'len', '(', 'tree', ')', '==', '2', ':', '# Unary rule.', '# Can have a constituent if it is collapsed.', 'current', '=', 'self', '.', '_remove_vertical_markovization', '(', 'tree', '[', '0', ']', ')', '.', 'split', '(', '"+"', ')', 'for', 'nt', 'in', 'current', '[', ':', '-', '1', ']', ':', 'set', '.', 'add', '(', '(', 'nt', ',', 'start', ',', 'start', ')', ')', 'return', 'start'] | Convert a tree into spans (X, i, j) and add to a set. | ['Convert', 'a', 'tree', 'into', 'spans', '(', 'X', 'i', 'j', ')', 'and', 'add', 'to', 'a', 'set', '.'] | train | https://github.com/emilmont/pyStatParser/blob/0e4990d7c1f0e3a0e0626ea2059ffd9030edf323/stat_parser/eval_parser.py#L30-L53 |
1,865 | jjgomera/iapws | iapws/humidAir.py | HumidAir._eq | def _eq(self, T, P):
"""Procedure for calculate the composition in saturation state
Parameters
----------
T : float
Temperature [K]
P : float
Pressure [MPa]
Returns
-------
Asat : float
Saturation mass fraction of dry air in humid air [kg/kg]
"""
if T <= 273.16:
ice = _Ice(T, P)
gw = ice["g"]
else:
water = IAPWS95(T=T, P=P)
gw = water.g
def f(parr):
rho, a = parr
if a > 1:
a = 1
fa = self._fav(T, rho, a)
muw = fa["fir"]+rho*fa["fird"]-a*fa["fira"]
return gw-muw, rho**2*fa["fird"]/1000-P
rinput = fsolve(f, [1, 0.95], full_output=True)
Asat = rinput[0][1]
return Asat | python | def _eq(self, T, P):
"""Procedure for calculate the composition in saturation state
Parameters
----------
T : float
Temperature [K]
P : float
Pressure [MPa]
Returns
-------
Asat : float
Saturation mass fraction of dry air in humid air [kg/kg]
"""
if T <= 273.16:
ice = _Ice(T, P)
gw = ice["g"]
else:
water = IAPWS95(T=T, P=P)
gw = water.g
def f(parr):
rho, a = parr
if a > 1:
a = 1
fa = self._fav(T, rho, a)
muw = fa["fir"]+rho*fa["fird"]-a*fa["fira"]
return gw-muw, rho**2*fa["fird"]/1000-P
rinput = fsolve(f, [1, 0.95], full_output=True)
Asat = rinput[0][1]
return Asat | ['def', '_eq', '(', 'self', ',', 'T', ',', 'P', ')', ':', 'if', 'T', '<=', '273.16', ':', 'ice', '=', '_Ice', '(', 'T', ',', 'P', ')', 'gw', '=', 'ice', '[', '"g"', ']', 'else', ':', 'water', '=', 'IAPWS95', '(', 'T', '=', 'T', ',', 'P', '=', 'P', ')', 'gw', '=', 'water', '.', 'g', 'def', 'f', '(', 'parr', ')', ':', 'rho', ',', 'a', '=', 'parr', 'if', 'a', '>', '1', ':', 'a', '=', '1', 'fa', '=', 'self', '.', '_fav', '(', 'T', ',', 'rho', ',', 'a', ')', 'muw', '=', 'fa', '[', '"fir"', ']', '+', 'rho', '*', 'fa', '[', '"fird"', ']', '-', 'a', '*', 'fa', '[', '"fira"', ']', 'return', 'gw', '-', 'muw', ',', 'rho', '**', '2', '*', 'fa', '[', '"fird"', ']', '/', '1000', '-', 'P', 'rinput', '=', 'fsolve', '(', 'f', ',', '[', '1', ',', '0.95', ']', ',', 'full_output', '=', 'True', ')', 'Asat', '=', 'rinput', '[', '0', ']', '[', '1', ']', 'return', 'Asat'] | Procedure for calculate the composition in saturation state
Parameters
----------
T : float
Temperature [K]
P : float
Pressure [MPa]
Returns
-------
Asat : float
Saturation mass fraction of dry air in humid air [kg/kg] | ['Procedure', 'for', 'calculate', 'the', 'composition', 'in', 'saturation', 'state'] | train | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/humidAir.py#L729-L761 |
1,866 | mottosso/be | be/vendor/click/decorators.py | version_option | def version_option(version=None, *param_decls, **attrs):
"""Adds a ``--version`` option which immediately ends the program
printing out the version number. This is implemented as an eager
option that prints the version and exits the program in the callback.
:param version: the version number to show. If not provided Click
attempts an auto discovery via setuptools.
:param prog_name: the name of the program (defaults to autodetection)
:param message: custom message to show instead of the default
(``'%(prog)s, version %(version)s'``)
:param others: everything else is forwarded to :func:`option`.
"""
if version is None:
module = sys._getframe(1).f_globals.get('__name__')
def decorator(f):
prog_name = attrs.pop('prog_name', None)
message = attrs.pop('message', '%(prog)s, version %(version)s')
def callback(ctx, param, value):
if not value or ctx.resilient_parsing:
return
prog = prog_name
if prog is None:
prog = ctx.find_root().info_name
ver = version
if ver is None:
try:
import pkg_resources
except ImportError:
pass
else:
for dist in pkg_resources.working_set:
scripts = dist.get_entry_map().get('console_scripts') or {}
for script_name, entry_point in iteritems(scripts):
if entry_point.module_name == module:
ver = dist.version
break
if ver is None:
raise RuntimeError('Could not determine version')
echo(message % {
'prog': prog,
'version': ver,
}, color=ctx.color)
ctx.exit()
attrs.setdefault('is_flag', True)
attrs.setdefault('expose_value', False)
attrs.setdefault('is_eager', True)
attrs.setdefault('help', 'Show the version and exit.')
attrs['callback'] = callback
return option(*(param_decls or ('--version',)), **attrs)(f)
return decorator | python | def version_option(version=None, *param_decls, **attrs):
"""Adds a ``--version`` option which immediately ends the program
printing out the version number. This is implemented as an eager
option that prints the version and exits the program in the callback.
:param version: the version number to show. If not provided Click
attempts an auto discovery via setuptools.
:param prog_name: the name of the program (defaults to autodetection)
:param message: custom message to show instead of the default
(``'%(prog)s, version %(version)s'``)
:param others: everything else is forwarded to :func:`option`.
"""
if version is None:
module = sys._getframe(1).f_globals.get('__name__')
def decorator(f):
prog_name = attrs.pop('prog_name', None)
message = attrs.pop('message', '%(prog)s, version %(version)s')
def callback(ctx, param, value):
if not value or ctx.resilient_parsing:
return
prog = prog_name
if prog is None:
prog = ctx.find_root().info_name
ver = version
if ver is None:
try:
import pkg_resources
except ImportError:
pass
else:
for dist in pkg_resources.working_set:
scripts = dist.get_entry_map().get('console_scripts') or {}
for script_name, entry_point in iteritems(scripts):
if entry_point.module_name == module:
ver = dist.version
break
if ver is None:
raise RuntimeError('Could not determine version')
echo(message % {
'prog': prog,
'version': ver,
}, color=ctx.color)
ctx.exit()
attrs.setdefault('is_flag', True)
attrs.setdefault('expose_value', False)
attrs.setdefault('is_eager', True)
attrs.setdefault('help', 'Show the version and exit.')
attrs['callback'] = callback
return option(*(param_decls or ('--version',)), **attrs)(f)
return decorator | ['def', 'version_option', '(', 'version', '=', 'None', ',', '*', 'param_decls', ',', '*', '*', 'attrs', ')', ':', 'if', 'version', 'is', 'None', ':', 'module', '=', 'sys', '.', '_getframe', '(', '1', ')', '.', 'f_globals', '.', 'get', '(', "'__name__'", ')', 'def', 'decorator', '(', 'f', ')', ':', 'prog_name', '=', 'attrs', '.', 'pop', '(', "'prog_name'", ',', 'None', ')', 'message', '=', 'attrs', '.', 'pop', '(', "'message'", ',', "'%(prog)s, version %(version)s'", ')', 'def', 'callback', '(', 'ctx', ',', 'param', ',', 'value', ')', ':', 'if', 'not', 'value', 'or', 'ctx', '.', 'resilient_parsing', ':', 'return', 'prog', '=', 'prog_name', 'if', 'prog', 'is', 'None', ':', 'prog', '=', 'ctx', '.', 'find_root', '(', ')', '.', 'info_name', 'ver', '=', 'version', 'if', 'ver', 'is', 'None', ':', 'try', ':', 'import', 'pkg_resources', 'except', 'ImportError', ':', 'pass', 'else', ':', 'for', 'dist', 'in', 'pkg_resources', '.', 'working_set', ':', 'scripts', '=', 'dist', '.', 'get_entry_map', '(', ')', '.', 'get', '(', "'console_scripts'", ')', 'or', '{', '}', 'for', 'script_name', ',', 'entry_point', 'in', 'iteritems', '(', 'scripts', ')', ':', 'if', 'entry_point', '.', 'module_name', '==', 'module', ':', 'ver', '=', 'dist', '.', 'version', 'break', 'if', 'ver', 'is', 'None', ':', 'raise', 'RuntimeError', '(', "'Could not determine version'", ')', 'echo', '(', 'message', '%', '{', "'prog'", ':', 'prog', ',', "'version'", ':', 'ver', ',', '}', ',', 'color', '=', 'ctx', '.', 'color', ')', 'ctx', '.', 'exit', '(', ')', 'attrs', '.', 'setdefault', '(', "'is_flag'", ',', 'True', ')', 'attrs', '.', 'setdefault', '(', "'expose_value'", ',', 'False', ')', 'attrs', '.', 'setdefault', '(', "'is_eager'", ',', 'True', ')', 'attrs', '.', 'setdefault', '(', "'help'", ',', "'Show the version and exit.'", ')', 'attrs', '[', "'callback'", ']', '=', 'callback', 'return', 'option', '(', '*', '(', 'param_decls', 'or', '(', "'--version'", ',', ')', ')', ',', '*', '*', 'attrs', ')', '(', 'f', ')', 'return', 'decorator'] | Adds a ``--version`` option which immediately ends the program
printing out the version number. This is implemented as an eager
option that prints the version and exits the program in the callback.
:param version: the version number to show. If not provided Click
attempts an auto discovery via setuptools.
:param prog_name: the name of the program (defaults to autodetection)
:param message: custom message to show instead of the default
(``'%(prog)s, version %(version)s'``)
:param others: everything else is forwarded to :func:`option`. | ['Adds', 'a', '--', 'version', 'option', 'which', 'immediately', 'ends', 'the', 'program', 'printing', 'out', 'the', 'version', 'number', '.', 'This', 'is', 'implemented', 'as', 'an', 'eager', 'option', 'that', 'prints', 'the', 'version', 'and', 'exits', 'the', 'program', 'in', 'the', 'callback', '.'] | train | https://github.com/mottosso/be/blob/0f3d4f3597c71223f616d78c6d9b2c8dffcd8a71/be/vendor/click/decorators.py#L222-L273 |
1,867 | bcbio/bcbio-nextgen | bcbio/heterogeneity/bubbletree.py | _freqs_by_chromosome | def _freqs_by_chromosome(in_file, params, somatic_info):
"""Retrieve frequencies across each chromosome as inputs to HMM.
"""
freqs = []
coords = []
cur_chrom = None
with pysam.VariantFile(in_file) as bcf_in:
for rec in bcf_in:
if _is_biallelic_snp(rec) and _passes_plus_germline(rec) and chromhacks.is_autosomal(rec.chrom):
if cur_chrom is None or rec.chrom != cur_chrom:
if cur_chrom and len(freqs) > 0:
yield cur_chrom, freqs, coords
cur_chrom = rec.chrom
freqs = []
coords = []
stats = _tumor_normal_stats(rec, somatic_info)
if tz.get_in(["tumor", "depth"], stats, 0) > params["min_depth"]:
# not a ref only call
if len(rec.samples) == 0 or sum(rec.samples[somatic_info.tumor_name].allele_indices) > 0:
freqs.append(tz.get_in(["tumor", "freq"], stats))
coords.append(rec.start)
if cur_chrom and len(freqs) > 0:
yield cur_chrom, freqs, coords | python | def _freqs_by_chromosome(in_file, params, somatic_info):
"""Retrieve frequencies across each chromosome as inputs to HMM.
"""
freqs = []
coords = []
cur_chrom = None
with pysam.VariantFile(in_file) as bcf_in:
for rec in bcf_in:
if _is_biallelic_snp(rec) and _passes_plus_germline(rec) and chromhacks.is_autosomal(rec.chrom):
if cur_chrom is None or rec.chrom != cur_chrom:
if cur_chrom and len(freqs) > 0:
yield cur_chrom, freqs, coords
cur_chrom = rec.chrom
freqs = []
coords = []
stats = _tumor_normal_stats(rec, somatic_info)
if tz.get_in(["tumor", "depth"], stats, 0) > params["min_depth"]:
# not a ref only call
if len(rec.samples) == 0 or sum(rec.samples[somatic_info.tumor_name].allele_indices) > 0:
freqs.append(tz.get_in(["tumor", "freq"], stats))
coords.append(rec.start)
if cur_chrom and len(freqs) > 0:
yield cur_chrom, freqs, coords | ['def', '_freqs_by_chromosome', '(', 'in_file', ',', 'params', ',', 'somatic_info', ')', ':', 'freqs', '=', '[', ']', 'coords', '=', '[', ']', 'cur_chrom', '=', 'None', 'with', 'pysam', '.', 'VariantFile', '(', 'in_file', ')', 'as', 'bcf_in', ':', 'for', 'rec', 'in', 'bcf_in', ':', 'if', '_is_biallelic_snp', '(', 'rec', ')', 'and', '_passes_plus_germline', '(', 'rec', ')', 'and', 'chromhacks', '.', 'is_autosomal', '(', 'rec', '.', 'chrom', ')', ':', 'if', 'cur_chrom', 'is', 'None', 'or', 'rec', '.', 'chrom', '!=', 'cur_chrom', ':', 'if', 'cur_chrom', 'and', 'len', '(', 'freqs', ')', '>', '0', ':', 'yield', 'cur_chrom', ',', 'freqs', ',', 'coords', 'cur_chrom', '=', 'rec', '.', 'chrom', 'freqs', '=', '[', ']', 'coords', '=', '[', ']', 'stats', '=', '_tumor_normal_stats', '(', 'rec', ',', 'somatic_info', ')', 'if', 'tz', '.', 'get_in', '(', '[', '"tumor"', ',', '"depth"', ']', ',', 'stats', ',', '0', ')', '>', 'params', '[', '"min_depth"', ']', ':', '# not a ref only call', 'if', 'len', '(', 'rec', '.', 'samples', ')', '==', '0', 'or', 'sum', '(', 'rec', '.', 'samples', '[', 'somatic_info', '.', 'tumor_name', ']', '.', 'allele_indices', ')', '>', '0', ':', 'freqs', '.', 'append', '(', 'tz', '.', 'get_in', '(', '[', '"tumor"', ',', '"freq"', ']', ',', 'stats', ')', ')', 'coords', '.', 'append', '(', 'rec', '.', 'start', ')', 'if', 'cur_chrom', 'and', 'len', '(', 'freqs', ')', '>', '0', ':', 'yield', 'cur_chrom', ',', 'freqs', ',', 'coords'] | Retrieve frequencies across each chromosome as inputs to HMM. | ['Retrieve', 'frequencies', 'across', 'each', 'chromosome', 'as', 'inputs', 'to', 'HMM', '.'] | train | https://github.com/bcbio/bcbio-nextgen/blob/6a9348c0054ccd5baffd22f1bb7d0422f6978b20/bcbio/heterogeneity/bubbletree.py#L248-L270 |
1,868 | monarch-initiative/dipper | dipper/sources/UDP.py | UDP.fetch | def fetch(self, is_dl_forced=True):
"""
Fetches data from udp collaboration server,
see top level comments for class for more information
:return:
"""
username = config.get_config()['dbauth']['udp']['user']
password = config.get_config()['dbauth']['udp']['password']
credentials = (username, password)
# Get patient map file:
patient_id_map = self.open_and_parse_yaml(self.map_files['patient_ids'])
udp_internal_ids = patient_id_map.keys()
phenotype_fields = ['Patient', 'HPID', 'Present']
# Get phenotype ids for each patient
phenotype_params = {
'method': 'search_subjects',
'subject_type': 'Phenotype',
'search_mode': 'DEEP',
'fields': 'Patient',
'conditions': 'equals',
'values': ','.join(udp_internal_ids),
'user_fields': ','.join(phenotype_fields)
}
prioritized_variants = [
'Patient', 'Gene', 'Chromosome Position', 'Variant Allele', 'Transcript']
prioritized_params = {
'method': 'search_subjects',
'subject_type': 'Variant Prioritization',
'search_mode': 'DEEP',
'fields': 'Patient',
'conditions': 'equals',
'values': ','.join(udp_internal_ids),
'user_fields': ','.join(prioritized_variants),
'format': 'json'}
variant_fields = [
'Patient', 'Family', 'Chr', 'Build', 'Chromosome Position',
'Reference Allele', 'Variant Allele', 'Parent of origin',
'Allele Type', 'Mutation Type', 'Gene', 'Transcript', 'Original Amino Acid',
'Variant Amino Acid', 'Amino Acid Change', 'Segregates with',
'Position', 'Exon', 'Inheritance model', 'Zygosity', 'dbSNP ID',
'1K Frequency', 'Number of Alleles']
variant_params = {
'method': 'search_subjects',
'subject_type': 'Exome Analysis Results',
'search_mode': 'DEEP',
'fields': 'Patient',
'conditions': 'equals',
'user_fields': ','.join(variant_fields),
'format': 'json'}
pheno_file = open(
'/'.join((self.rawdir, self.files['patient_phenotypes']['file'])), 'w')
variant_file = open(
'/'.join((self.rawdir, self.files['patient_variants']['file'])), 'w')
pheno_file.write('{0}\n'.format('\t'.join(phenotype_fields)))
variant_file.write('{0}\n'.format('\t'.join(variant_fields)))
variant_gene = self._fetch_data_from_udp(
udp_internal_ids, prioritized_params, prioritized_variants, credentials)
variant_gene_map = dict()
for line in variant_gene:
variant_gene_map.setdefault(line[0], []).append(
# Try to make a unique value based on gene-pos-variantAlele-transcript
# TODO make this a dict for readability purposes
"{0}-{1}-{2}-{3}".format(line[1], line[2], line[3], line[4]))
variant_info = self._fetch_data_from_udp(
udp_internal_ids, variant_params, variant_fields, credentials)
for line in variant_info:
variant = "{0}-{1}-{2}-{3}".format(line[10], line[4], line[6], line[11])
if variant in variant_gene_map[line[0]]:
line[0] = patient_id_map[line[0]]
line[4] = re.sub(r'\.0$', '', line[4])
variant_file.write('{0}\n'.format('\t'.join(line)))
phenotype_info = self._fetch_data_from_udp(
udp_internal_ids, phenotype_params, phenotype_fields, credentials)
for line in phenotype_info:
line[0] = patient_id_map[line[0]]
pheno_file.write('{0}\n'.format('\t'.join(line)))
variant_file.close()
pheno_file.close()
return | python | def fetch(self, is_dl_forced=True):
"""
Fetches data from udp collaboration server,
see top level comments for class for more information
:return:
"""
username = config.get_config()['dbauth']['udp']['user']
password = config.get_config()['dbauth']['udp']['password']
credentials = (username, password)
# Get patient map file:
patient_id_map = self.open_and_parse_yaml(self.map_files['patient_ids'])
udp_internal_ids = patient_id_map.keys()
phenotype_fields = ['Patient', 'HPID', 'Present']
# Get phenotype ids for each patient
phenotype_params = {
'method': 'search_subjects',
'subject_type': 'Phenotype',
'search_mode': 'DEEP',
'fields': 'Patient',
'conditions': 'equals',
'values': ','.join(udp_internal_ids),
'user_fields': ','.join(phenotype_fields)
}
prioritized_variants = [
'Patient', 'Gene', 'Chromosome Position', 'Variant Allele', 'Transcript']
prioritized_params = {
'method': 'search_subjects',
'subject_type': 'Variant Prioritization',
'search_mode': 'DEEP',
'fields': 'Patient',
'conditions': 'equals',
'values': ','.join(udp_internal_ids),
'user_fields': ','.join(prioritized_variants),
'format': 'json'}
variant_fields = [
'Patient', 'Family', 'Chr', 'Build', 'Chromosome Position',
'Reference Allele', 'Variant Allele', 'Parent of origin',
'Allele Type', 'Mutation Type', 'Gene', 'Transcript', 'Original Amino Acid',
'Variant Amino Acid', 'Amino Acid Change', 'Segregates with',
'Position', 'Exon', 'Inheritance model', 'Zygosity', 'dbSNP ID',
'1K Frequency', 'Number of Alleles']
variant_params = {
'method': 'search_subjects',
'subject_type': 'Exome Analysis Results',
'search_mode': 'DEEP',
'fields': 'Patient',
'conditions': 'equals',
'user_fields': ','.join(variant_fields),
'format': 'json'}
pheno_file = open(
'/'.join((self.rawdir, self.files['patient_phenotypes']['file'])), 'w')
variant_file = open(
'/'.join((self.rawdir, self.files['patient_variants']['file'])), 'w')
pheno_file.write('{0}\n'.format('\t'.join(phenotype_fields)))
variant_file.write('{0}\n'.format('\t'.join(variant_fields)))
variant_gene = self._fetch_data_from_udp(
udp_internal_ids, prioritized_params, prioritized_variants, credentials)
variant_gene_map = dict()
for line in variant_gene:
variant_gene_map.setdefault(line[0], []).append(
# Try to make a unique value based on gene-pos-variantAlele-transcript
# TODO make this a dict for readability purposes
"{0}-{1}-{2}-{3}".format(line[1], line[2], line[3], line[4]))
variant_info = self._fetch_data_from_udp(
udp_internal_ids, variant_params, variant_fields, credentials)
for line in variant_info:
variant = "{0}-{1}-{2}-{3}".format(line[10], line[4], line[6], line[11])
if variant in variant_gene_map[line[0]]:
line[0] = patient_id_map[line[0]]
line[4] = re.sub(r'\.0$', '', line[4])
variant_file.write('{0}\n'.format('\t'.join(line)))
phenotype_info = self._fetch_data_from_udp(
udp_internal_ids, phenotype_params, phenotype_fields, credentials)
for line in phenotype_info:
line[0] = patient_id_map[line[0]]
pheno_file.write('{0}\n'.format('\t'.join(line)))
variant_file.close()
pheno_file.close()
return | ['def', 'fetch', '(', 'self', ',', 'is_dl_forced', '=', 'True', ')', ':', 'username', '=', 'config', '.', 'get_config', '(', ')', '[', "'dbauth'", ']', '[', "'udp'", ']', '[', "'user'", ']', 'password', '=', 'config', '.', 'get_config', '(', ')', '[', "'dbauth'", ']', '[', "'udp'", ']', '[', "'password'", ']', 'credentials', '=', '(', 'username', ',', 'password', ')', '# Get patient map file:', 'patient_id_map', '=', 'self', '.', 'open_and_parse_yaml', '(', 'self', '.', 'map_files', '[', "'patient_ids'", ']', ')', 'udp_internal_ids', '=', 'patient_id_map', '.', 'keys', '(', ')', 'phenotype_fields', '=', '[', "'Patient'", ',', "'HPID'", ',', "'Present'", ']', '# Get phenotype ids for each patient', 'phenotype_params', '=', '{', "'method'", ':', "'search_subjects'", ',', "'subject_type'", ':', "'Phenotype'", ',', "'search_mode'", ':', "'DEEP'", ',', "'fields'", ':', "'Patient'", ',', "'conditions'", ':', "'equals'", ',', "'values'", ':', "','", '.', 'join', '(', 'udp_internal_ids', ')', ',', "'user_fields'", ':', "','", '.', 'join', '(', 'phenotype_fields', ')', '}', 'prioritized_variants', '=', '[', "'Patient'", ',', "'Gene'", ',', "'Chromosome Position'", ',', "'Variant Allele'", ',', "'Transcript'", ']', 'prioritized_params', '=', '{', "'method'", ':', "'search_subjects'", ',', "'subject_type'", ':', "'Variant Prioritization'", ',', "'search_mode'", ':', "'DEEP'", ',', "'fields'", ':', "'Patient'", ',', "'conditions'", ':', "'equals'", ',', "'values'", ':', "','", '.', 'join', '(', 'udp_internal_ids', ')', ',', "'user_fields'", ':', "','", '.', 'join', '(', 'prioritized_variants', ')', ',', "'format'", ':', "'json'", '}', 'variant_fields', '=', '[', "'Patient'", ',', "'Family'", ',', "'Chr'", ',', "'Build'", ',', "'Chromosome Position'", ',', "'Reference Allele'", ',', "'Variant Allele'", ',', "'Parent of origin'", ',', "'Allele Type'", ',', "'Mutation Type'", ',', "'Gene'", ',', "'Transcript'", ',', "'Original Amino Acid'", ',', "'Variant Amino Acid'", ',', "'Amino Acid Change'", ',', "'Segregates with'", ',', "'Position'", ',', "'Exon'", ',', "'Inheritance model'", ',', "'Zygosity'", ',', "'dbSNP ID'", ',', "'1K Frequency'", ',', "'Number of Alleles'", ']', 'variant_params', '=', '{', "'method'", ':', "'search_subjects'", ',', "'subject_type'", ':', "'Exome Analysis Results'", ',', "'search_mode'", ':', "'DEEP'", ',', "'fields'", ':', "'Patient'", ',', "'conditions'", ':', "'equals'", ',', "'user_fields'", ':', "','", '.', 'join', '(', 'variant_fields', ')', ',', "'format'", ':', "'json'", '}', 'pheno_file', '=', 'open', '(', "'/'", '.', 'join', '(', '(', 'self', '.', 'rawdir', ',', 'self', '.', 'files', '[', "'patient_phenotypes'", ']', '[', "'file'", ']', ')', ')', ',', "'w'", ')', 'variant_file', '=', 'open', '(', "'/'", '.', 'join', '(', '(', 'self', '.', 'rawdir', ',', 'self', '.', 'files', '[', "'patient_variants'", ']', '[', "'file'", ']', ')', ')', ',', "'w'", ')', 'pheno_file', '.', 'write', '(', "'{0}\\n'", '.', 'format', '(', "'\\t'", '.', 'join', '(', 'phenotype_fields', ')', ')', ')', 'variant_file', '.', 'write', '(', "'{0}\\n'", '.', 'format', '(', "'\\t'", '.', 'join', '(', 'variant_fields', ')', ')', ')', 'variant_gene', '=', 'self', '.', '_fetch_data_from_udp', '(', 'udp_internal_ids', ',', 'prioritized_params', ',', 'prioritized_variants', ',', 'credentials', ')', 'variant_gene_map', '=', 'dict', '(', ')', 'for', 'line', 'in', 'variant_gene', ':', 'variant_gene_map', '.', 'setdefault', '(', 'line', '[', '0', ']', ',', '[', ']', ')', '.', 'append', '(', '# Try to make a unique value based on gene-pos-variantAlele-transcript', '# TODO make this a dict for readability purposes', '"{0}-{1}-{2}-{3}"', '.', 'format', '(', 'line', '[', '1', ']', ',', 'line', '[', '2', ']', ',', 'line', '[', '3', ']', ',', 'line', '[', '4', ']', ')', ')', 'variant_info', '=', 'self', '.', '_fetch_data_from_udp', '(', 'udp_internal_ids', ',', 'variant_params', ',', 'variant_fields', ',', 'credentials', ')', 'for', 'line', 'in', 'variant_info', ':', 'variant', '=', '"{0}-{1}-{2}-{3}"', '.', 'format', '(', 'line', '[', '10', ']', ',', 'line', '[', '4', ']', ',', 'line', '[', '6', ']', ',', 'line', '[', '11', ']', ')', 'if', 'variant', 'in', 'variant_gene_map', '[', 'line', '[', '0', ']', ']', ':', 'line', '[', '0', ']', '=', 'patient_id_map', '[', 'line', '[', '0', ']', ']', 'line', '[', '4', ']', '=', 're', '.', 'sub', '(', "r'\\.0$'", ',', "''", ',', 'line', '[', '4', ']', ')', 'variant_file', '.', 'write', '(', "'{0}\\n'", '.', 'format', '(', "'\\t'", '.', 'join', '(', 'line', ')', ')', ')', 'phenotype_info', '=', 'self', '.', '_fetch_data_from_udp', '(', 'udp_internal_ids', ',', 'phenotype_params', ',', 'phenotype_fields', ',', 'credentials', ')', 'for', 'line', 'in', 'phenotype_info', ':', 'line', '[', '0', ']', '=', 'patient_id_map', '[', 'line', '[', '0', ']', ']', 'pheno_file', '.', 'write', '(', "'{0}\\n'", '.', 'format', '(', "'\\t'", '.', 'join', '(', 'line', ')', ')', ')', 'variant_file', '.', 'close', '(', ')', 'pheno_file', '.', 'close', '(', ')', 'return'] | Fetches data from udp collaboration server,
see top level comments for class for more information
:return: | ['Fetches', 'data', 'from', 'udp', 'collaboration', 'server', 'see', 'top', 'level', 'comments', 'for', 'class', 'for', 'more', 'information', ':', 'return', ':'] | train | https://github.com/monarch-initiative/dipper/blob/24cc80db355bbe15776edc5c7b41e0886959ba41/dipper/sources/UDP.py#L93-L190 |
1,869 | ND-CSE-30151/tock | tock/machines.py | Configuration.match | def match(self, other):
"""Returns true iff self (as a pattern) matches other (as a
configuration). Note that this is asymmetric: other is allowed
to have symbols that aren't found in self."""
if len(self) != len(other):
raise ValueError()
for s1, s2 in zip(self, other):
i = s2.position - s1.position
if i < 0:
return False
n = len(s1)
while i+n > len(s2) and s1[n-1] == syntax.BLANK:
n -= 1
if s2.values[i:i+n] != s1.values[:n]:
return False
return True | python | def match(self, other):
"""Returns true iff self (as a pattern) matches other (as a
configuration). Note that this is asymmetric: other is allowed
to have symbols that aren't found in self."""
if len(self) != len(other):
raise ValueError()
for s1, s2 in zip(self, other):
i = s2.position - s1.position
if i < 0:
return False
n = len(s1)
while i+n > len(s2) and s1[n-1] == syntax.BLANK:
n -= 1
if s2.values[i:i+n] != s1.values[:n]:
return False
return True | ['def', 'match', '(', 'self', ',', 'other', ')', ':', 'if', 'len', '(', 'self', ')', '!=', 'len', '(', 'other', ')', ':', 'raise', 'ValueError', '(', ')', 'for', 's1', ',', 's2', 'in', 'zip', '(', 'self', ',', 'other', ')', ':', 'i', '=', 's2', '.', 'position', '-', 's1', '.', 'position', 'if', 'i', '<', '0', ':', 'return', 'False', 'n', '=', 'len', '(', 's1', ')', 'while', 'i', '+', 'n', '>', 'len', '(', 's2', ')', 'and', 's1', '[', 'n', '-', '1', ']', '==', 'syntax', '.', 'BLANK', ':', 'n', '-=', '1', 'if', 's2', '.', 'values', '[', 'i', ':', 'i', '+', 'n', ']', '!=', 's1', '.', 'values', '[', ':', 'n', ']', ':', 'return', 'False', 'return', 'True'] | Returns true iff self (as a pattern) matches other (as a
configuration). Note that this is asymmetric: other is allowed
to have symbols that aren't found in self. | ['Returns', 'true', 'iff', 'self', '(', 'as', 'a', 'pattern', ')', 'matches', 'other', '(', 'as', 'a', 'configuration', ')', '.', 'Note', 'that', 'this', 'is', 'asymmetric', ':', 'other', 'is', 'allowed', 'to', 'have', 'symbols', 'that', 'aren', 't', 'found', 'in', 'self', '.'] | train | https://github.com/ND-CSE-30151/tock/blob/b8d21901aaf0e6ac913c2afa855f5b5a882a16c6/tock/machines.py#L107-L123 |
1,870 | fhs/pyhdf | pyhdf/HDF.py | HDF.getfileversion | def getfileversion(self):
"""Get file version info.
Args:
no argument
Returns:
4-element tuple with the following components:
-major version number (int)
-minor version number (int)
-complete library version number (int)
-additional information (string)
C library equivalent : Hgetlibversion
"""
status, major_v, minor_v, release, info = _C.Hgetfileversion(self._id)
_checkErr('getfileversion', status, "cannot get file version")
return major_v, minor_v, release, info | python | def getfileversion(self):
"""Get file version info.
Args:
no argument
Returns:
4-element tuple with the following components:
-major version number (int)
-minor version number (int)
-complete library version number (int)
-additional information (string)
C library equivalent : Hgetlibversion
"""
status, major_v, minor_v, release, info = _C.Hgetfileversion(self._id)
_checkErr('getfileversion', status, "cannot get file version")
return major_v, minor_v, release, info | ['def', 'getfileversion', '(', 'self', ')', ':', 'status', ',', 'major_v', ',', 'minor_v', ',', 'release', ',', 'info', '=', '_C', '.', 'Hgetfileversion', '(', 'self', '.', '_id', ')', '_checkErr', '(', "'getfileversion'", ',', 'status', ',', '"cannot get file version"', ')', 'return', 'major_v', ',', 'minor_v', ',', 'release', ',', 'info'] | Get file version info.
Args:
no argument
Returns:
4-element tuple with the following components:
-major version number (int)
-minor version number (int)
-complete library version number (int)
-additional information (string)
C library equivalent : Hgetlibversion | ['Get', 'file', 'version', 'info', '.'] | train | https://github.com/fhs/pyhdf/blob/dbdc1810a74a38df50dcad81fe903e239d2b388d/pyhdf/HDF.py#L244-L261 |
1,871 | tensorflow/hub | tensorflow_hub/resolver.py | atomic_download | def atomic_download(handle,
download_fn,
module_dir,
lock_file_timeout_sec=10 * 60):
"""Returns the path to a Module directory for a given TF-Hub Module handle.
Args:
handle: (string) Location of a TF-Hub Module.
download_fn: Callback function that actually performs download. The callback
receives two arguments, handle and the location of a temporary
directory to download the content into.
module_dir: Directory where to download the module files to.
lock_file_timeout_sec: The amount of time we give the current holder of
the lock to make progress in downloading a module.
If no progress is made, the lock is revoked.
Returns:
A string containing the path to a TF-Hub Module directory.
Raises:
ValueError: if the Module is not found.
"""
lock_file = _lock_filename(module_dir)
task_uid = uuid.uuid4().hex
lock_contents = _lock_file_contents(task_uid)
tmp_dir = _temp_download_dir(module_dir, task_uid)
# Attempt to protect against cases of processes being cancelled with
# KeyboardInterrupt by using a try/finally clause to remove the lock
# and tmp_dir.
try:
while True:
try:
tf_utils.atomic_write_string_to_file(lock_file, lock_contents,
overwrite=False)
# Must test condition again, since another process could have created
# the module and deleted the old lock file since last test.
if tf_v1.gfile.Exists(module_dir):
# Lock file will be deleted in the finally-clause.
return module_dir
break # Proceed to downloading the module.
except tf.errors.OpError:
pass
# Wait for lock file to disappear.
_wait_for_lock_to_disappear(handle, lock_file, lock_file_timeout_sec)
# At this point we either deleted a lock or a lock got removed by the
# owner or another process. Perform one more iteration of the while-loop,
# we would either terminate due tf_v1.gfile.Exists(module_dir) or because
# we would obtain a lock ourselves, or wait again for the lock to
# disappear.
# Lock file acquired.
logging.info("Downloading TF-Hub Module '%s'.", handle)
tf_v1.gfile.MakeDirs(tmp_dir)
download_fn(handle, tmp_dir)
# Write module descriptor to capture information about which module was
# downloaded by whom and when. The file stored at the same level as a
# directory in order to keep the content of the 'model_dir' exactly as it
# was define by the module publisher.
#
# Note: The descriptor is written purely to help the end-user to identify
# which directory belongs to which module. The descriptor is not part of the
# module caching protocol and no code in the TF-Hub library reads its
# content.
_write_module_descriptor_file(handle, module_dir)
try:
tf_v1.gfile.Rename(tmp_dir, module_dir)
logging.info("Downloaded TF-Hub Module '%s'.", handle)
except tf.errors.AlreadyExistsError:
logging.warning("Module already exists in %s", module_dir)
finally:
try:
# Temp directory is owned by the current process, remove it.
tf_v1.gfile.DeleteRecursively(tmp_dir)
except tf.errors.NotFoundError:
pass
try:
contents = tf_utils.read_file_to_string(lock_file)
except tf.errors.NotFoundError:
contents = ""
if contents == lock_contents:
# Lock file exists and is owned by this process.
try:
tf_v1.gfile.Remove(lock_file)
except tf.errors.NotFoundError:
pass
return module_dir | python | def atomic_download(handle,
download_fn,
module_dir,
lock_file_timeout_sec=10 * 60):
"""Returns the path to a Module directory for a given TF-Hub Module handle.
Args:
handle: (string) Location of a TF-Hub Module.
download_fn: Callback function that actually performs download. The callback
receives two arguments, handle and the location of a temporary
directory to download the content into.
module_dir: Directory where to download the module files to.
lock_file_timeout_sec: The amount of time we give the current holder of
the lock to make progress in downloading a module.
If no progress is made, the lock is revoked.
Returns:
A string containing the path to a TF-Hub Module directory.
Raises:
ValueError: if the Module is not found.
"""
lock_file = _lock_filename(module_dir)
task_uid = uuid.uuid4().hex
lock_contents = _lock_file_contents(task_uid)
tmp_dir = _temp_download_dir(module_dir, task_uid)
# Attempt to protect against cases of processes being cancelled with
# KeyboardInterrupt by using a try/finally clause to remove the lock
# and tmp_dir.
try:
while True:
try:
tf_utils.atomic_write_string_to_file(lock_file, lock_contents,
overwrite=False)
# Must test condition again, since another process could have created
# the module and deleted the old lock file since last test.
if tf_v1.gfile.Exists(module_dir):
# Lock file will be deleted in the finally-clause.
return module_dir
break # Proceed to downloading the module.
except tf.errors.OpError:
pass
# Wait for lock file to disappear.
_wait_for_lock_to_disappear(handle, lock_file, lock_file_timeout_sec)
# At this point we either deleted a lock or a lock got removed by the
# owner or another process. Perform one more iteration of the while-loop,
# we would either terminate due tf_v1.gfile.Exists(module_dir) or because
# we would obtain a lock ourselves, or wait again for the lock to
# disappear.
# Lock file acquired.
logging.info("Downloading TF-Hub Module '%s'.", handle)
tf_v1.gfile.MakeDirs(tmp_dir)
download_fn(handle, tmp_dir)
# Write module descriptor to capture information about which module was
# downloaded by whom and when. The file stored at the same level as a
# directory in order to keep the content of the 'model_dir' exactly as it
# was define by the module publisher.
#
# Note: The descriptor is written purely to help the end-user to identify
# which directory belongs to which module. The descriptor is not part of the
# module caching protocol and no code in the TF-Hub library reads its
# content.
_write_module_descriptor_file(handle, module_dir)
try:
tf_v1.gfile.Rename(tmp_dir, module_dir)
logging.info("Downloaded TF-Hub Module '%s'.", handle)
except tf.errors.AlreadyExistsError:
logging.warning("Module already exists in %s", module_dir)
finally:
try:
# Temp directory is owned by the current process, remove it.
tf_v1.gfile.DeleteRecursively(tmp_dir)
except tf.errors.NotFoundError:
pass
try:
contents = tf_utils.read_file_to_string(lock_file)
except tf.errors.NotFoundError:
contents = ""
if contents == lock_contents:
# Lock file exists and is owned by this process.
try:
tf_v1.gfile.Remove(lock_file)
except tf.errors.NotFoundError:
pass
return module_dir | ['def', 'atomic_download', '(', 'handle', ',', 'download_fn', ',', 'module_dir', ',', 'lock_file_timeout_sec', '=', '10', '*', '60', ')', ':', 'lock_file', '=', '_lock_filename', '(', 'module_dir', ')', 'task_uid', '=', 'uuid', '.', 'uuid4', '(', ')', '.', 'hex', 'lock_contents', '=', '_lock_file_contents', '(', 'task_uid', ')', 'tmp_dir', '=', '_temp_download_dir', '(', 'module_dir', ',', 'task_uid', ')', '# Attempt to protect against cases of processes being cancelled with', '# KeyboardInterrupt by using a try/finally clause to remove the lock', '# and tmp_dir.', 'try', ':', 'while', 'True', ':', 'try', ':', 'tf_utils', '.', 'atomic_write_string_to_file', '(', 'lock_file', ',', 'lock_contents', ',', 'overwrite', '=', 'False', ')', '# Must test condition again, since another process could have created', '# the module and deleted the old lock file since last test.', 'if', 'tf_v1', '.', 'gfile', '.', 'Exists', '(', 'module_dir', ')', ':', '# Lock file will be deleted in the finally-clause.', 'return', 'module_dir', 'break', '# Proceed to downloading the module.', 'except', 'tf', '.', 'errors', '.', 'OpError', ':', 'pass', '# Wait for lock file to disappear.', '_wait_for_lock_to_disappear', '(', 'handle', ',', 'lock_file', ',', 'lock_file_timeout_sec', ')', '# At this point we either deleted a lock or a lock got removed by the', '# owner or another process. Perform one more iteration of the while-loop,', '# we would either terminate due tf_v1.gfile.Exists(module_dir) or because', '# we would obtain a lock ourselves, or wait again for the lock to', '# disappear.', '# Lock file acquired.', 'logging', '.', 'info', '(', '"Downloading TF-Hub Module \'%s\'."', ',', 'handle', ')', 'tf_v1', '.', 'gfile', '.', 'MakeDirs', '(', 'tmp_dir', ')', 'download_fn', '(', 'handle', ',', 'tmp_dir', ')', '# Write module descriptor to capture information about which module was', '# downloaded by whom and when. The file stored at the same level as a', "# directory in order to keep the content of the 'model_dir' exactly as it", '# was define by the module publisher.', '#', '# Note: The descriptor is written purely to help the end-user to identify', '# which directory belongs to which module. The descriptor is not part of the', '# module caching protocol and no code in the TF-Hub library reads its', '# content.', '_write_module_descriptor_file', '(', 'handle', ',', 'module_dir', ')', 'try', ':', 'tf_v1', '.', 'gfile', '.', 'Rename', '(', 'tmp_dir', ',', 'module_dir', ')', 'logging', '.', 'info', '(', '"Downloaded TF-Hub Module \'%s\'."', ',', 'handle', ')', 'except', 'tf', '.', 'errors', '.', 'AlreadyExistsError', ':', 'logging', '.', 'warning', '(', '"Module already exists in %s"', ',', 'module_dir', ')', 'finally', ':', 'try', ':', '# Temp directory is owned by the current process, remove it.', 'tf_v1', '.', 'gfile', '.', 'DeleteRecursively', '(', 'tmp_dir', ')', 'except', 'tf', '.', 'errors', '.', 'NotFoundError', ':', 'pass', 'try', ':', 'contents', '=', 'tf_utils', '.', 'read_file_to_string', '(', 'lock_file', ')', 'except', 'tf', '.', 'errors', '.', 'NotFoundError', ':', 'contents', '=', '""', 'if', 'contents', '==', 'lock_contents', ':', '# Lock file exists and is owned by this process.', 'try', ':', 'tf_v1', '.', 'gfile', '.', 'Remove', '(', 'lock_file', ')', 'except', 'tf', '.', 'errors', '.', 'NotFoundError', ':', 'pass', 'return', 'module_dir'] | Returns the path to a Module directory for a given TF-Hub Module handle.
Args:
handle: (string) Location of a TF-Hub Module.
download_fn: Callback function that actually performs download. The callback
receives two arguments, handle and the location of a temporary
directory to download the content into.
module_dir: Directory where to download the module files to.
lock_file_timeout_sec: The amount of time we give the current holder of
the lock to make progress in downloading a module.
If no progress is made, the lock is revoked.
Returns:
A string containing the path to a TF-Hub Module directory.
Raises:
ValueError: if the Module is not found. | ['Returns', 'the', 'path', 'to', 'a', 'Module', 'directory', 'for', 'a', 'given', 'TF', '-', 'Hub', 'Module', 'handle', '.'] | train | https://github.com/tensorflow/hub/blob/09f45963f6787322967b6fec61459f3ac56fbb27/tensorflow_hub/resolver.py#L345-L434 |
1,872 | LedgerHQ/btchip-python | btchip/msqr.py | legendre_symbol | def legendre_symbol(a, p):
""" Compute the Legendre symbol a|p using
Euler's criterion. p is a prime, a is
relatively prime to p (if p divides
a, then a|p = 0)
Returns 1 if a has a square root modulo
p, -1 otherwise.
"""
ls = pow(a, (p - 1) / 2, p)
return -1 if ls == p - 1 else ls | python | def legendre_symbol(a, p):
""" Compute the Legendre symbol a|p using
Euler's criterion. p is a prime, a is
relatively prime to p (if p divides
a, then a|p = 0)
Returns 1 if a has a square root modulo
p, -1 otherwise.
"""
ls = pow(a, (p - 1) / 2, p)
return -1 if ls == p - 1 else ls | ['def', 'legendre_symbol', '(', 'a', ',', 'p', ')', ':', 'ls', '=', 'pow', '(', 'a', ',', '(', 'p', '-', '1', ')', '/', '2', ',', 'p', ')', 'return', '-', '1', 'if', 'ls', '==', 'p', '-', '1', 'else', 'ls'] | Compute the Legendre symbol a|p using
Euler's criterion. p is a prime, a is
relatively prime to p (if p divides
a, then a|p = 0)
Returns 1 if a has a square root modulo
p, -1 otherwise. | ['Compute', 'the', 'Legendre', 'symbol', 'a|p', 'using', 'Euler', 's', 'criterion', '.', 'p', 'is', 'a', 'prime', 'a', 'is', 'relatively', 'prime', 'to', 'p', '(', 'if', 'p', 'divides', 'a', 'then', 'a|p', '=', '0', ')'] | train | https://github.com/LedgerHQ/btchip-python/blob/fe82d7f5638169f583a445b8e200fd1c9f3ea218/btchip/msqr.py#L84-L94 |
1,873 | roboogle/gtkmvc3 | gtkmvco/gtkmvc3/support/metaclasses.py | ObservablePropertyMeta.get_getter | def get_getter(cls, prop_name, # @NoSelf
user_getter=None, getter_takes_name=False):
"""This implementation returns the PROP_NAME value if there
exists such property. Otherwise there must exist a logical
getter (user_getter) which the value is taken from. If no
getter is found, None is returned (i.e. the property cannot
be created)"""
has_prop_variable = cls.has_prop_attribute(prop_name)
# WARNING! Deprecated
has_specific_getter = hasattr(cls, GET_PROP_NAME % \
{'prop_name' : prop_name})
has_general_getter = hasattr(cls, GET_GENERIC_NAME)
if not (has_prop_variable or
has_specific_getter or
has_general_getter or
user_getter):
return None
# when property variable is given, it overrides all the getters
if has_prop_variable:
if has_specific_getter or user_getter:
logger.warning("In class %s.%s ignoring custom logical getter "
"for property '%s' as a corresponding "
"attribute exists" \
% (cls.__module__, cls.__name__, prop_name))
# user_getter is ignored here, so it has not to be passed up
user_getter = None
getter_takes_name = False
else:
# uses logical getter. Sees if the getter needs to receive the
# property name (i.e. if the getter is used for multiple
# properties)
if user_getter:
pass
else:
if has_specific_getter:
_getter = getattr(cls, GET_PROP_NAME % \
{'prop_name' : prop_name})
_deps = type(cls)._get_old_style_getter_deps(cls,
prop_name,
_getter)
# this is done to delay getter call, to have
# bound methods to allow overloading of getter in
# derived classes
def __getter(self, deps=_deps):
_getter = getattr(self, GET_PROP_NAME % \
{'prop_name' : prop_name})
return _getter()
user_getter = __getter
getter_takes_name = False
else:
assert has_general_getter
_getter = getattr(cls, GET_GENERIC_NAME)
_deps = type(cls)._get_old_style_getter_deps(cls,
prop_name,
_getter)
def __getter(self, name, deps=_deps):
_getter = getattr(self, GET_GENERIC_NAME)
return _getter(name)
user_getter = __getter
getter_takes_name = True
return PropertyMeta.get_getter(cls, prop_name, user_getter,
getter_takes_name) | python | def get_getter(cls, prop_name, # @NoSelf
user_getter=None, getter_takes_name=False):
"""This implementation returns the PROP_NAME value if there
exists such property. Otherwise there must exist a logical
getter (user_getter) which the value is taken from. If no
getter is found, None is returned (i.e. the property cannot
be created)"""
has_prop_variable = cls.has_prop_attribute(prop_name)
# WARNING! Deprecated
has_specific_getter = hasattr(cls, GET_PROP_NAME % \
{'prop_name' : prop_name})
has_general_getter = hasattr(cls, GET_GENERIC_NAME)
if not (has_prop_variable or
has_specific_getter or
has_general_getter or
user_getter):
return None
# when property variable is given, it overrides all the getters
if has_prop_variable:
if has_specific_getter or user_getter:
logger.warning("In class %s.%s ignoring custom logical getter "
"for property '%s' as a corresponding "
"attribute exists" \
% (cls.__module__, cls.__name__, prop_name))
# user_getter is ignored here, so it has not to be passed up
user_getter = None
getter_takes_name = False
else:
# uses logical getter. Sees if the getter needs to receive the
# property name (i.e. if the getter is used for multiple
# properties)
if user_getter:
pass
else:
if has_specific_getter:
_getter = getattr(cls, GET_PROP_NAME % \
{'prop_name' : prop_name})
_deps = type(cls)._get_old_style_getter_deps(cls,
prop_name,
_getter)
# this is done to delay getter call, to have
# bound methods to allow overloading of getter in
# derived classes
def __getter(self, deps=_deps):
_getter = getattr(self, GET_PROP_NAME % \
{'prop_name' : prop_name})
return _getter()
user_getter = __getter
getter_takes_name = False
else:
assert has_general_getter
_getter = getattr(cls, GET_GENERIC_NAME)
_deps = type(cls)._get_old_style_getter_deps(cls,
prop_name,
_getter)
def __getter(self, name, deps=_deps):
_getter = getattr(self, GET_GENERIC_NAME)
return _getter(name)
user_getter = __getter
getter_takes_name = True
return PropertyMeta.get_getter(cls, prop_name, user_getter,
getter_takes_name) | ['def', 'get_getter', '(', 'cls', ',', 'prop_name', ',', '# @NoSelf', 'user_getter', '=', 'None', ',', 'getter_takes_name', '=', 'False', ')', ':', 'has_prop_variable', '=', 'cls', '.', 'has_prop_attribute', '(', 'prop_name', ')', '# WARNING! Deprecated', 'has_specific_getter', '=', 'hasattr', '(', 'cls', ',', 'GET_PROP_NAME', '%', '{', "'prop_name'", ':', 'prop_name', '}', ')', 'has_general_getter', '=', 'hasattr', '(', 'cls', ',', 'GET_GENERIC_NAME', ')', 'if', 'not', '(', 'has_prop_variable', 'or', 'has_specific_getter', 'or', 'has_general_getter', 'or', 'user_getter', ')', ':', 'return', 'None', '# when property variable is given, it overrides all the getters', 'if', 'has_prop_variable', ':', 'if', 'has_specific_getter', 'or', 'user_getter', ':', 'logger', '.', 'warning', '(', '"In class %s.%s ignoring custom logical getter "', '"for property \'%s\' as a corresponding "', '"attribute exists"', '%', '(', 'cls', '.', '__module__', ',', 'cls', '.', '__name__', ',', 'prop_name', ')', ')', '# user_getter is ignored here, so it has not to be passed up', 'user_getter', '=', 'None', 'getter_takes_name', '=', 'False', 'else', ':', '# uses logical getter. Sees if the getter needs to receive the', '# property name (i.e. if the getter is used for multiple', '# properties)', 'if', 'user_getter', ':', 'pass', 'else', ':', 'if', 'has_specific_getter', ':', '_getter', '=', 'getattr', '(', 'cls', ',', 'GET_PROP_NAME', '%', '{', "'prop_name'", ':', 'prop_name', '}', ')', '_deps', '=', 'type', '(', 'cls', ')', '.', '_get_old_style_getter_deps', '(', 'cls', ',', 'prop_name', ',', '_getter', ')', '# this is done to delay getter call, to have', '# bound methods to allow overloading of getter in', '# derived classes', 'def', '__getter', '(', 'self', ',', 'deps', '=', '_deps', ')', ':', '_getter', '=', 'getattr', '(', 'self', ',', 'GET_PROP_NAME', '%', '{', "'prop_name'", ':', 'prop_name', '}', ')', 'return', '_getter', '(', ')', 'user_getter', '=', '__getter', 'getter_takes_name', '=', 'False', 'else', ':', 'assert', 'has_general_getter', '_getter', '=', 'getattr', '(', 'cls', ',', 'GET_GENERIC_NAME', ')', '_deps', '=', 'type', '(', 'cls', ')', '.', '_get_old_style_getter_deps', '(', 'cls', ',', 'prop_name', ',', '_getter', ')', 'def', '__getter', '(', 'self', ',', 'name', ',', 'deps', '=', '_deps', ')', ':', '_getter', '=', 'getattr', '(', 'self', ',', 'GET_GENERIC_NAME', ')', 'return', '_getter', '(', 'name', ')', 'user_getter', '=', '__getter', 'getter_takes_name', '=', 'True', 'return', 'PropertyMeta', '.', 'get_getter', '(', 'cls', ',', 'prop_name', ',', 'user_getter', ',', 'getter_takes_name', ')'] | This implementation returns the PROP_NAME value if there
exists such property. Otherwise there must exist a logical
getter (user_getter) which the value is taken from. If no
getter is found, None is returned (i.e. the property cannot
be created) | ['This', 'implementation', 'returns', 'the', 'PROP_NAME', 'value', 'if', 'there', 'exists', 'such', 'property', '.', 'Otherwise', 'there', 'must', 'exist', 'a', 'logical', 'getter', '(', 'user_getter', ')', 'which', 'the', 'value', 'is', 'taken', 'from', '.', 'If', 'no', 'getter', 'is', 'found', 'None', 'is', 'returned', '(', 'i', '.', 'e', '.', 'the', 'property', 'cannot', 'be', 'created', ')'] | train | https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/gtkmvc3/support/metaclasses.py#L555-L626 |
1,874 | RaRe-Technologies/smart_open | smart_open/http.py | open | def open(uri, mode, kerberos=False, user=None, password=None):
"""Implement streamed reader from a web site.
Supports Kerberos and Basic HTTP authentication.
Parameters
----------
url: str
The URL to open.
mode: str
The mode to open using.
kerberos: boolean, optional
If True, will attempt to use the local Kerberos credentials
user: str, optional
The username for authenticating over HTTP
password: str, optional
The password for authenticating over HTTP
Note
----
If neither kerberos or (user, password) are set, will connect unauthenticated.
"""
if mode == 'rb':
return BufferedInputBase(uri, mode, kerberos=kerberos, user=user, password=password)
else:
raise NotImplementedError('http support for mode %r not implemented' % mode) | python | def open(uri, mode, kerberos=False, user=None, password=None):
"""Implement streamed reader from a web site.
Supports Kerberos and Basic HTTP authentication.
Parameters
----------
url: str
The URL to open.
mode: str
The mode to open using.
kerberos: boolean, optional
If True, will attempt to use the local Kerberos credentials
user: str, optional
The username for authenticating over HTTP
password: str, optional
The password for authenticating over HTTP
Note
----
If neither kerberos or (user, password) are set, will connect unauthenticated.
"""
if mode == 'rb':
return BufferedInputBase(uri, mode, kerberos=kerberos, user=user, password=password)
else:
raise NotImplementedError('http support for mode %r not implemented' % mode) | ['def', 'open', '(', 'uri', ',', 'mode', ',', 'kerberos', '=', 'False', ',', 'user', '=', 'None', ',', 'password', '=', 'None', ')', ':', 'if', 'mode', '==', "'rb'", ':', 'return', 'BufferedInputBase', '(', 'uri', ',', 'mode', ',', 'kerberos', '=', 'kerberos', ',', 'user', '=', 'user', ',', 'password', '=', 'password', ')', 'else', ':', 'raise', 'NotImplementedError', '(', "'http support for mode %r not implemented'", '%', 'mode', ')'] | Implement streamed reader from a web site.
Supports Kerberos and Basic HTTP authentication.
Parameters
----------
url: str
The URL to open.
mode: str
The mode to open using.
kerberos: boolean, optional
If True, will attempt to use the local Kerberos credentials
user: str, optional
The username for authenticating over HTTP
password: str, optional
The password for authenticating over HTTP
Note
----
If neither kerberos or (user, password) are set, will connect unauthenticated. | ['Implement', 'streamed', 'reader', 'from', 'a', 'web', 'site', '.'] | train | https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/http.py#L25-L51 |
1,875 | jorgenschaefer/elpy | elpy/jedibackend.py | linecol_to_pos | def linecol_to_pos(text, line, col):
"""Return the offset of this line and column in text.
Lines are one-based, columns zero-based.
This is how Jedi wants it. Don't ask me why.
"""
nth_newline_offset = 0
for i in range(line - 1):
new_offset = text.find("\n", nth_newline_offset)
if new_offset < 0:
raise ValueError("Text does not have {0} lines."
.format(line))
nth_newline_offset = new_offset + 1
offset = nth_newline_offset + col
if offset > len(text):
raise ValueError("Line {0} column {1} is not within the text"
.format(line, col))
return offset | python | def linecol_to_pos(text, line, col):
"""Return the offset of this line and column in text.
Lines are one-based, columns zero-based.
This is how Jedi wants it. Don't ask me why.
"""
nth_newline_offset = 0
for i in range(line - 1):
new_offset = text.find("\n", nth_newline_offset)
if new_offset < 0:
raise ValueError("Text does not have {0} lines."
.format(line))
nth_newline_offset = new_offset + 1
offset = nth_newline_offset + col
if offset > len(text):
raise ValueError("Line {0} column {1} is not within the text"
.format(line, col))
return offset | ['def', 'linecol_to_pos', '(', 'text', ',', 'line', ',', 'col', ')', ':', 'nth_newline_offset', '=', '0', 'for', 'i', 'in', 'range', '(', 'line', '-', '1', ')', ':', 'new_offset', '=', 'text', '.', 'find', '(', '"\\n"', ',', 'nth_newline_offset', ')', 'if', 'new_offset', '<', '0', ':', 'raise', 'ValueError', '(', '"Text does not have {0} lines."', '.', 'format', '(', 'line', ')', ')', 'nth_newline_offset', '=', 'new_offset', '+', '1', 'offset', '=', 'nth_newline_offset', '+', 'col', 'if', 'offset', '>', 'len', '(', 'text', ')', ':', 'raise', 'ValueError', '(', '"Line {0} column {1} is not within the text"', '.', 'format', '(', 'line', ',', 'col', ')', ')', 'return', 'offset'] | Return the offset of this line and column in text.
Lines are one-based, columns zero-based.
This is how Jedi wants it. Don't ask me why. | ['Return', 'the', 'offset', 'of', 'this', 'line', 'and', 'column', 'in', 'text', '.'] | train | https://github.com/jorgenschaefer/elpy/blob/ffd982f829b11e53f2be187c7b770423341f29bc/elpy/jedibackend.py#L286-L305 |
1,876 | gabrielfalcao/dominic | dominic/xpath/expr.py | Function.function | def function(minargs, maxargs, implicit=False, first=False, convert=None):
"""Function decorator.
minargs -- Minimum number of arguments taken by the function.
maxargs -- Maximum number of arguments taken by the function.
implicit -- True for functions which operate on a nodeset consisting
of the current context node when passed no argument.
(e.g., string() and number().)
convert -- When non-None, a function used to filter function arguments.
"""
def decorator(f):
def new_f(self, node, pos, size, context):
if implicit and len(self.args) == 0:
args = [[node]]
else:
args = [x.evaluate(node, pos, size, context)
for x in self.args]
if first:
args[0] = nodeset(args[0])
if len(args[0]) > 0:
args[0] = args[0][0]
else:
args[0] = None
if convert is not None:
args = [convert(x) for x in args]
return f(self, node, pos, size, context, *args)
new_f.minargs = minargs
new_f.maxargs = maxargs
new_f.__name__ = f.__name__
new_f.__doc__ = f.__doc__
return new_f
return decorator | python | def function(minargs, maxargs, implicit=False, first=False, convert=None):
"""Function decorator.
minargs -- Minimum number of arguments taken by the function.
maxargs -- Maximum number of arguments taken by the function.
implicit -- True for functions which operate on a nodeset consisting
of the current context node when passed no argument.
(e.g., string() and number().)
convert -- When non-None, a function used to filter function arguments.
"""
def decorator(f):
def new_f(self, node, pos, size, context):
if implicit and len(self.args) == 0:
args = [[node]]
else:
args = [x.evaluate(node, pos, size, context)
for x in self.args]
if first:
args[0] = nodeset(args[0])
if len(args[0]) > 0:
args[0] = args[0][0]
else:
args[0] = None
if convert is not None:
args = [convert(x) for x in args]
return f(self, node, pos, size, context, *args)
new_f.minargs = minargs
new_f.maxargs = maxargs
new_f.__name__ = f.__name__
new_f.__doc__ = f.__doc__
return new_f
return decorator | ['def', 'function', '(', 'minargs', ',', 'maxargs', ',', 'implicit', '=', 'False', ',', 'first', '=', 'False', ',', 'convert', '=', 'None', ')', ':', 'def', 'decorator', '(', 'f', ')', ':', 'def', 'new_f', '(', 'self', ',', 'node', ',', 'pos', ',', 'size', ',', 'context', ')', ':', 'if', 'implicit', 'and', 'len', '(', 'self', '.', 'args', ')', '==', '0', ':', 'args', '=', '[', '[', 'node', ']', ']', 'else', ':', 'args', '=', '[', 'x', '.', 'evaluate', '(', 'node', ',', 'pos', ',', 'size', ',', 'context', ')', 'for', 'x', 'in', 'self', '.', 'args', ']', 'if', 'first', ':', 'args', '[', '0', ']', '=', 'nodeset', '(', 'args', '[', '0', ']', ')', 'if', 'len', '(', 'args', '[', '0', ']', ')', '>', '0', ':', 'args', '[', '0', ']', '=', 'args', '[', '0', ']', '[', '0', ']', 'else', ':', 'args', '[', '0', ']', '=', 'None', 'if', 'convert', 'is', 'not', 'None', ':', 'args', '=', '[', 'convert', '(', 'x', ')', 'for', 'x', 'in', 'args', ']', 'return', 'f', '(', 'self', ',', 'node', ',', 'pos', ',', 'size', ',', 'context', ',', '*', 'args', ')', 'new_f', '.', 'minargs', '=', 'minargs', 'new_f', '.', 'maxargs', '=', 'maxargs', 'new_f', '.', '__name__', '=', 'f', '.', '__name__', 'new_f', '.', '__doc__', '=', 'f', '.', '__doc__', 'return', 'new_f', 'return', 'decorator'] | Function decorator.
minargs -- Minimum number of arguments taken by the function.
maxargs -- Maximum number of arguments taken by the function.
implicit -- True for functions which operate on a nodeset consisting
of the current context node when passed no argument.
(e.g., string() and number().)
convert -- When non-None, a function used to filter function arguments. | ['Function', 'decorator', '.'] | train | https://github.com/gabrielfalcao/dominic/blob/a42f418fc288f3b70cb95847b405eaf7b83bb3a0/dominic/xpath/expr.py#L344-L376 |
1,877 | MolSSI-BSE/basis_set_exchange | basis_set_exchange/curate/readers/turbomole.py | read_turbomole | def read_turbomole(basis_lines, fname):
'''Reads turbomole-formatted file data and converts it to a dictionary with the
usual BSE fields
Note that the turbomole format does not store all the fields we
have, so some fields are left blank
'''
skipchars = '*#$'
basis_lines = [l for l in basis_lines if l and not l[0] in skipchars]
bs_data = create_skel('component')
i = 0
while i < len(basis_lines):
line = basis_lines[i]
elementsym = line.split()[0]
element_Z = lut.element_Z_from_sym(elementsym)
element_Z = str(element_Z)
if not element_Z in bs_data['elements']:
bs_data['elements'][element_Z] = {}
element_data = bs_data['elements'][element_Z]
if "ecp" in line.lower():
if not 'ecp_potentials' in element_data:
element_data['ecp_potentials'] = []
i += 1
line = basis_lines[i]
lsplt = line.split('=')
maxam = int(lsplt[2])
n_elec = int(lsplt[1].split()[0])
amlist = [maxam]
amlist.extend(list(range(0, maxam)))
i += 1
for shell_am in amlist:
shell_am2 = lut.amchar_to_int(basis_lines[i][0])[0]
if shell_am2 != shell_am:
raise RuntimeError("AM not in expected order?")
i += 1
ecp_shell = {
'ecp_type': 'scalar_ecp',
'angular_momentum': [shell_am],
}
ecp_exponents = []
ecp_rexponents = []
ecp_coefficients = []
while i < len(basis_lines) and basis_lines[i][0].isalpha() is False:
lsplt = basis_lines[i].split()
ecp_exponents.append(lsplt[2])
ecp_rexponents.append(int(lsplt[1]))
ecp_coefficients.append(lsplt[0])
i += 1
ecp_shell['r_exponents'] = ecp_rexponents
ecp_shell['gaussian_exponents'] = ecp_exponents
ecp_shell['coefficients'] = [ecp_coefficients]
element_data['ecp_potentials'].append(ecp_shell)
element_data['ecp_electrons'] = n_elec
else:
if not 'electron_shells' in element_data:
element_data['electron_shells'] = []
i += 1
while i < len(basis_lines) and basis_lines[i][0].isalpha() == False:
lsplt = basis_lines[i].split()
shell_am = lut.amchar_to_int(lsplt[1])
nprim = int(lsplt[0])
if max(shell_am) <= 1:
func_type = 'gto'
else:
func_type = 'gto_spherical'
shell = {
'function_type': func_type,
'region': '',
'angular_momentum': shell_am
}
exponents = []
coefficients = []
i += 1
for j in range(nprim):
line = basis_lines[i].replace('D', 'E')
line = line.replace('d', 'E')
lsplt = line.split()
exponents.append(lsplt[0])
coefficients.append(lsplt[1:])
i += 1
shell['exponents'] = exponents
# We need to transpose the coefficient matrix
# (we store a matrix with primitives being the column index and
# general contraction being the row index)
shell['coefficients'] = list(map(list, zip(*coefficients)))
element_data['electron_shells'].append(shell)
return bs_data | python | def read_turbomole(basis_lines, fname):
'''Reads turbomole-formatted file data and converts it to a dictionary with the
usual BSE fields
Note that the turbomole format does not store all the fields we
have, so some fields are left blank
'''
skipchars = '*#$'
basis_lines = [l for l in basis_lines if l and not l[0] in skipchars]
bs_data = create_skel('component')
i = 0
while i < len(basis_lines):
line = basis_lines[i]
elementsym = line.split()[0]
element_Z = lut.element_Z_from_sym(elementsym)
element_Z = str(element_Z)
if not element_Z in bs_data['elements']:
bs_data['elements'][element_Z] = {}
element_data = bs_data['elements'][element_Z]
if "ecp" in line.lower():
if not 'ecp_potentials' in element_data:
element_data['ecp_potentials'] = []
i += 1
line = basis_lines[i]
lsplt = line.split('=')
maxam = int(lsplt[2])
n_elec = int(lsplt[1].split()[0])
amlist = [maxam]
amlist.extend(list(range(0, maxam)))
i += 1
for shell_am in amlist:
shell_am2 = lut.amchar_to_int(basis_lines[i][0])[0]
if shell_am2 != shell_am:
raise RuntimeError("AM not in expected order?")
i += 1
ecp_shell = {
'ecp_type': 'scalar_ecp',
'angular_momentum': [shell_am],
}
ecp_exponents = []
ecp_rexponents = []
ecp_coefficients = []
while i < len(basis_lines) and basis_lines[i][0].isalpha() is False:
lsplt = basis_lines[i].split()
ecp_exponents.append(lsplt[2])
ecp_rexponents.append(int(lsplt[1]))
ecp_coefficients.append(lsplt[0])
i += 1
ecp_shell['r_exponents'] = ecp_rexponents
ecp_shell['gaussian_exponents'] = ecp_exponents
ecp_shell['coefficients'] = [ecp_coefficients]
element_data['ecp_potentials'].append(ecp_shell)
element_data['ecp_electrons'] = n_elec
else:
if not 'electron_shells' in element_data:
element_data['electron_shells'] = []
i += 1
while i < len(basis_lines) and basis_lines[i][0].isalpha() == False:
lsplt = basis_lines[i].split()
shell_am = lut.amchar_to_int(lsplt[1])
nprim = int(lsplt[0])
if max(shell_am) <= 1:
func_type = 'gto'
else:
func_type = 'gto_spherical'
shell = {
'function_type': func_type,
'region': '',
'angular_momentum': shell_am
}
exponents = []
coefficients = []
i += 1
for j in range(nprim):
line = basis_lines[i].replace('D', 'E')
line = line.replace('d', 'E')
lsplt = line.split()
exponents.append(lsplt[0])
coefficients.append(lsplt[1:])
i += 1
shell['exponents'] = exponents
# We need to transpose the coefficient matrix
# (we store a matrix with primitives being the column index and
# general contraction being the row index)
shell['coefficients'] = list(map(list, zip(*coefficients)))
element_data['electron_shells'].append(shell)
return bs_data | ['def', 'read_turbomole', '(', 'basis_lines', ',', 'fname', ')', ':', 'skipchars', '=', "'*#$'", 'basis_lines', '=', '[', 'l', 'for', 'l', 'in', 'basis_lines', 'if', 'l', 'and', 'not', 'l', '[', '0', ']', 'in', 'skipchars', ']', 'bs_data', '=', 'create_skel', '(', "'component'", ')', 'i', '=', '0', 'while', 'i', '<', 'len', '(', 'basis_lines', ')', ':', 'line', '=', 'basis_lines', '[', 'i', ']', 'elementsym', '=', 'line', '.', 'split', '(', ')', '[', '0', ']', 'element_Z', '=', 'lut', '.', 'element_Z_from_sym', '(', 'elementsym', ')', 'element_Z', '=', 'str', '(', 'element_Z', ')', 'if', 'not', 'element_Z', 'in', 'bs_data', '[', "'elements'", ']', ':', 'bs_data', '[', "'elements'", ']', '[', 'element_Z', ']', '=', '{', '}', 'element_data', '=', 'bs_data', '[', "'elements'", ']', '[', 'element_Z', ']', 'if', '"ecp"', 'in', 'line', '.', 'lower', '(', ')', ':', 'if', 'not', "'ecp_potentials'", 'in', 'element_data', ':', 'element_data', '[', "'ecp_potentials'", ']', '=', '[', ']', 'i', '+=', '1', 'line', '=', 'basis_lines', '[', 'i', ']', 'lsplt', '=', 'line', '.', 'split', '(', "'='", ')', 'maxam', '=', 'int', '(', 'lsplt', '[', '2', ']', ')', 'n_elec', '=', 'int', '(', 'lsplt', '[', '1', ']', '.', 'split', '(', ')', '[', '0', ']', ')', 'amlist', '=', '[', 'maxam', ']', 'amlist', '.', 'extend', '(', 'list', '(', 'range', '(', '0', ',', 'maxam', ')', ')', ')', 'i', '+=', '1', 'for', 'shell_am', 'in', 'amlist', ':', 'shell_am2', '=', 'lut', '.', 'amchar_to_int', '(', 'basis_lines', '[', 'i', ']', '[', '0', ']', ')', '[', '0', ']', 'if', 'shell_am2', '!=', 'shell_am', ':', 'raise', 'RuntimeError', '(', '"AM not in expected order?"', ')', 'i', '+=', '1', 'ecp_shell', '=', '{', "'ecp_type'", ':', "'scalar_ecp'", ',', "'angular_momentum'", ':', '[', 'shell_am', ']', ',', '}', 'ecp_exponents', '=', '[', ']', 'ecp_rexponents', '=', '[', ']', 'ecp_coefficients', '=', '[', ']', 'while', 'i', '<', 'len', '(', 'basis_lines', ')', 'and', 'basis_lines', '[', 'i', ']', '[', '0', ']', '.', 'isalpha', '(', ')', 'is', 'False', ':', 'lsplt', '=', 'basis_lines', '[', 'i', ']', '.', 'split', '(', ')', 'ecp_exponents', '.', 'append', '(', 'lsplt', '[', '2', ']', ')', 'ecp_rexponents', '.', 'append', '(', 'int', '(', 'lsplt', '[', '1', ']', ')', ')', 'ecp_coefficients', '.', 'append', '(', 'lsplt', '[', '0', ']', ')', 'i', '+=', '1', 'ecp_shell', '[', "'r_exponents'", ']', '=', 'ecp_rexponents', 'ecp_shell', '[', "'gaussian_exponents'", ']', '=', 'ecp_exponents', 'ecp_shell', '[', "'coefficients'", ']', '=', '[', 'ecp_coefficients', ']', 'element_data', '[', "'ecp_potentials'", ']', '.', 'append', '(', 'ecp_shell', ')', 'element_data', '[', "'ecp_electrons'", ']', '=', 'n_elec', 'else', ':', 'if', 'not', "'electron_shells'", 'in', 'element_data', ':', 'element_data', '[', "'electron_shells'", ']', '=', '[', ']', 'i', '+=', '1', 'while', 'i', '<', 'len', '(', 'basis_lines', ')', 'and', 'basis_lines', '[', 'i', ']', '[', '0', ']', '.', 'isalpha', '(', ')', '==', 'False', ':', 'lsplt', '=', 'basis_lines', '[', 'i', ']', '.', 'split', '(', ')', 'shell_am', '=', 'lut', '.', 'amchar_to_int', '(', 'lsplt', '[', '1', ']', ')', 'nprim', '=', 'int', '(', 'lsplt', '[', '0', ']', ')', 'if', 'max', '(', 'shell_am', ')', '<=', '1', ':', 'func_type', '=', "'gto'", 'else', ':', 'func_type', '=', "'gto_spherical'", 'shell', '=', '{', "'function_type'", ':', 'func_type', ',', "'region'", ':', "''", ',', "'angular_momentum'", ':', 'shell_am', '}', 'exponents', '=', '[', ']', 'coefficients', '=', '[', ']', 'i', '+=', '1', 'for', 'j', 'in', 'range', '(', 'nprim', ')', ':', 'line', '=', 'basis_lines', '[', 'i', ']', '.', 'replace', '(', "'D'", ',', "'E'", ')', 'line', '=', 'line', '.', 'replace', '(', "'d'", ',', "'E'", ')', 'lsplt', '=', 'line', '.', 'split', '(', ')', 'exponents', '.', 'append', '(', 'lsplt', '[', '0', ']', ')', 'coefficients', '.', 'append', '(', 'lsplt', '[', '1', ':', ']', ')', 'i', '+=', '1', 'shell', '[', "'exponents'", ']', '=', 'exponents', '# We need to transpose the coefficient matrix', '# (we store a matrix with primitives being the column index and', '# general contraction being the row index)', 'shell', '[', "'coefficients'", ']', '=', 'list', '(', 'map', '(', 'list', ',', 'zip', '(', '*', 'coefficients', ')', ')', ')', 'element_data', '[', "'electron_shells'", ']', '.', 'append', '(', 'shell', ')', 'return', 'bs_data'] | Reads turbomole-formatted file data and converts it to a dictionary with the
usual BSE fields
Note that the turbomole format does not store all the fields we
have, so some fields are left blank | ['Reads', 'turbomole', '-', 'formatted', 'file', 'data', 'and', 'converts', 'it', 'to', 'a', 'dictionary', 'with', 'the', 'usual', 'BSE', 'fields'] | train | https://github.com/MolSSI-BSE/basis_set_exchange/blob/e79110aaeb65f392ed5032420322dee3336948f7/basis_set_exchange/curate/readers/turbomole.py#L5-L117 |
1,878 | dogoncouch/logdissect | logdissect/output/log.py | OutputModule.write_output | def write_output(self, data, args=None, filename=None, label=None):
"""Write log data to a log file"""
if args:
if not args.outlog:
return 0
if not filename: filename=args.outlog
lastpath = ''
with open(str(filename), 'w') as output_file:
for entry in data['entries']:
if args.label:
if entry['source_path'] == lastpath:
output_file.write(entry['raw_text'] + '\n')
elif args.label == 'fname':
output_file.write('======== ' + \
entry['source_path'].split('/')[-1] + \
' >>>>\n' + entry['raw_text'] + '\n')
elif args.label == 'fpath':
output_file.write('======== ' + \
entry['source_path'] + \
' >>>>\n' + entry['raw_text'] + '\n')
else: output_file.write(entry['raw_text'] + '\n')
lastpath = entry['source_path'] | python | def write_output(self, data, args=None, filename=None, label=None):
"""Write log data to a log file"""
if args:
if not args.outlog:
return 0
if not filename: filename=args.outlog
lastpath = ''
with open(str(filename), 'w') as output_file:
for entry in data['entries']:
if args.label:
if entry['source_path'] == lastpath:
output_file.write(entry['raw_text'] + '\n')
elif args.label == 'fname':
output_file.write('======== ' + \
entry['source_path'].split('/')[-1] + \
' >>>>\n' + entry['raw_text'] + '\n')
elif args.label == 'fpath':
output_file.write('======== ' + \
entry['source_path'] + \
' >>>>\n' + entry['raw_text'] + '\n')
else: output_file.write(entry['raw_text'] + '\n')
lastpath = entry['source_path'] | ['def', 'write_output', '(', 'self', ',', 'data', ',', 'args', '=', 'None', ',', 'filename', '=', 'None', ',', 'label', '=', 'None', ')', ':', 'if', 'args', ':', 'if', 'not', 'args', '.', 'outlog', ':', 'return', '0', 'if', 'not', 'filename', ':', 'filename', '=', 'args', '.', 'outlog', 'lastpath', '=', "''", 'with', 'open', '(', 'str', '(', 'filename', ')', ',', "'w'", ')', 'as', 'output_file', ':', 'for', 'entry', 'in', 'data', '[', "'entries'", ']', ':', 'if', 'args', '.', 'label', ':', 'if', 'entry', '[', "'source_path'", ']', '==', 'lastpath', ':', 'output_file', '.', 'write', '(', 'entry', '[', "'raw_text'", ']', '+', "'\\n'", ')', 'elif', 'args', '.', 'label', '==', "'fname'", ':', 'output_file', '.', 'write', '(', "'======== '", '+', 'entry', '[', "'source_path'", ']', '.', 'split', '(', "'/'", ')', '[', '-', '1', ']', '+', "' >>>>\\n'", '+', 'entry', '[', "'raw_text'", ']', '+', "'\\n'", ')', 'elif', 'args', '.', 'label', '==', "'fpath'", ':', 'output_file', '.', 'write', '(', "'======== '", '+', 'entry', '[', "'source_path'", ']', '+', "' >>>>\\n'", '+', 'entry', '[', "'raw_text'", ']', '+', "'\\n'", ')', 'else', ':', 'output_file', '.', 'write', '(', 'entry', '[', "'raw_text'", ']', '+', "'\\n'", ')', 'lastpath', '=', 'entry', '[', "'source_path'", ']'] | Write log data to a log file | ['Write', 'log', 'data', 'to', 'a', 'log', 'file'] | train | https://github.com/dogoncouch/logdissect/blob/426b50264cbfa9665c86df3781e1e415ba8dbbd3/logdissect/output/log.py#L37-L58 |
1,879 | brocade/pynos | pynos/versions/ver_7/ver_7_1_0/yang/brocade_vcs.py | brocade_vcs.get_last_config_update_time_for_xpaths_output_last_config_update_time_for_xpaths_last_config_update_time | def get_last_config_update_time_for_xpaths_output_last_config_update_time_for_xpaths_last_config_update_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_last_config_update_time_for_xpaths = ET.Element("get_last_config_update_time_for_xpaths")
config = get_last_config_update_time_for_xpaths
output = ET.SubElement(get_last_config_update_time_for_xpaths, "output")
last_config_update_time_for_xpaths = ET.SubElement(output, "last-config-update-time-for-xpaths")
xpath_string_key = ET.SubElement(last_config_update_time_for_xpaths, "xpath-string")
xpath_string_key.text = kwargs.pop('xpath_string')
last_config_update_time = ET.SubElement(last_config_update_time_for_xpaths, "last-config-update-time")
last_config_update_time.text = kwargs.pop('last_config_update_time')
callback = kwargs.pop('callback', self._callback)
return callback(config) | python | def get_last_config_update_time_for_xpaths_output_last_config_update_time_for_xpaths_last_config_update_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_last_config_update_time_for_xpaths = ET.Element("get_last_config_update_time_for_xpaths")
config = get_last_config_update_time_for_xpaths
output = ET.SubElement(get_last_config_update_time_for_xpaths, "output")
last_config_update_time_for_xpaths = ET.SubElement(output, "last-config-update-time-for-xpaths")
xpath_string_key = ET.SubElement(last_config_update_time_for_xpaths, "xpath-string")
xpath_string_key.text = kwargs.pop('xpath_string')
last_config_update_time = ET.SubElement(last_config_update_time_for_xpaths, "last-config-update-time")
last_config_update_time.text = kwargs.pop('last_config_update_time')
callback = kwargs.pop('callback', self._callback)
return callback(config) | ['def', 'get_last_config_update_time_for_xpaths_output_last_config_update_time_for_xpaths_last_config_update_time', '(', 'self', ',', '*', '*', 'kwargs', ')', ':', 'config', '=', 'ET', '.', 'Element', '(', '"config"', ')', 'get_last_config_update_time_for_xpaths', '=', 'ET', '.', 'Element', '(', '"get_last_config_update_time_for_xpaths"', ')', 'config', '=', 'get_last_config_update_time_for_xpaths', 'output', '=', 'ET', '.', 'SubElement', '(', 'get_last_config_update_time_for_xpaths', ',', '"output"', ')', 'last_config_update_time_for_xpaths', '=', 'ET', '.', 'SubElement', '(', 'output', ',', '"last-config-update-time-for-xpaths"', ')', 'xpath_string_key', '=', 'ET', '.', 'SubElement', '(', 'last_config_update_time_for_xpaths', ',', '"xpath-string"', ')', 'xpath_string_key', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'xpath_string'", ')', 'last_config_update_time', '=', 'ET', '.', 'SubElement', '(', 'last_config_update_time_for_xpaths', ',', '"last-config-update-time"', ')', 'last_config_update_time', '.', 'text', '=', 'kwargs', '.', 'pop', '(', "'last_config_update_time'", ')', 'callback', '=', 'kwargs', '.', 'pop', '(', "'callback'", ',', 'self', '.', '_callback', ')', 'return', 'callback', '(', 'config', ')'] | Auto Generated Code | ['Auto', 'Generated', 'Code'] | train | https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_7/ver_7_1_0/yang/brocade_vcs.py#L716-L730 |
1,880 | Capitains/MyCapytain | MyCapytain/resources/collections/cts.py | XmlCtsCitation.ingest | def ingest(cls, resource, element=None, xpath="ti:citation"):
""" Ingest xml to create a citation
:param resource: XML on which to do xpath
:param element: Element where the citation should be stored
:param xpath: XPath to use to retrieve citation
:return: XmlCtsCitation
"""
# Reuse of of find citation
results = resource.xpath(xpath, namespaces=XPATH_NAMESPACES)
if len(results) > 0:
citation = cls(
name=results[0].get("label"),
xpath=results[0].get("xpath"),
scope=results[0].get("scope")
)
if isinstance(element, cls):
element.child = citation
cls.ingest(
resource=results[0],
element=element.child
)
else:
element = citation
cls.ingest(
resource=results[0],
element=element
)
return citation
return None | python | def ingest(cls, resource, element=None, xpath="ti:citation"):
""" Ingest xml to create a citation
:param resource: XML on which to do xpath
:param element: Element where the citation should be stored
:param xpath: XPath to use to retrieve citation
:return: XmlCtsCitation
"""
# Reuse of of find citation
results = resource.xpath(xpath, namespaces=XPATH_NAMESPACES)
if len(results) > 0:
citation = cls(
name=results[0].get("label"),
xpath=results[0].get("xpath"),
scope=results[0].get("scope")
)
if isinstance(element, cls):
element.child = citation
cls.ingest(
resource=results[0],
element=element.child
)
else:
element = citation
cls.ingest(
resource=results[0],
element=element
)
return citation
return None | ['def', 'ingest', '(', 'cls', ',', 'resource', ',', 'element', '=', 'None', ',', 'xpath', '=', '"ti:citation"', ')', ':', '# Reuse of of find citation', 'results', '=', 'resource', '.', 'xpath', '(', 'xpath', ',', 'namespaces', '=', 'XPATH_NAMESPACES', ')', 'if', 'len', '(', 'results', ')', '>', '0', ':', 'citation', '=', 'cls', '(', 'name', '=', 'results', '[', '0', ']', '.', 'get', '(', '"label"', ')', ',', 'xpath', '=', 'results', '[', '0', ']', '.', 'get', '(', '"xpath"', ')', ',', 'scope', '=', 'results', '[', '0', ']', '.', 'get', '(', '"scope"', ')', ')', 'if', 'isinstance', '(', 'element', ',', 'cls', ')', ':', 'element', '.', 'child', '=', 'citation', 'cls', '.', 'ingest', '(', 'resource', '=', 'results', '[', '0', ']', ',', 'element', '=', 'element', '.', 'child', ')', 'else', ':', 'element', '=', 'citation', 'cls', '.', 'ingest', '(', 'resource', '=', 'results', '[', '0', ']', ',', 'element', '=', 'element', ')', 'return', 'citation', 'return', 'None'] | Ingest xml to create a citation
:param resource: XML on which to do xpath
:param element: Element where the citation should be stored
:param xpath: XPath to use to retrieve citation
:return: XmlCtsCitation | ['Ingest', 'xml', 'to', 'create', 'a', 'citation'] | train | https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/collections/cts.py#L43-L76 |
1,881 | oceanprotocol/squid-py | squid_py/aquarius/aquarius.py | Aquarius.get_asset_ddo | def get_asset_ddo(self, did):
"""
Retrieve asset ddo for a given did.
:param did: Asset DID string
:return: DDO instance
"""
response = self.requests_session.get(f'{self.url}/{did}').content
if not response:
return {}
try:
parsed_response = json.loads(response)
except TypeError:
parsed_response = None
except ValueError:
raise ValueError(response.decode('UTF-8'))
if parsed_response is None:
return {}
return Asset(dictionary=parsed_response) | python | def get_asset_ddo(self, did):
"""
Retrieve asset ddo for a given did.
:param did: Asset DID string
:return: DDO instance
"""
response = self.requests_session.get(f'{self.url}/{did}').content
if not response:
return {}
try:
parsed_response = json.loads(response)
except TypeError:
parsed_response = None
except ValueError:
raise ValueError(response.decode('UTF-8'))
if parsed_response is None:
return {}
return Asset(dictionary=parsed_response) | ['def', 'get_asset_ddo', '(', 'self', ',', 'did', ')', ':', 'response', '=', 'self', '.', 'requests_session', '.', 'get', '(', "f'{self.url}/{did}'", ')', '.', 'content', 'if', 'not', 'response', ':', 'return', '{', '}', 'try', ':', 'parsed_response', '=', 'json', '.', 'loads', '(', 'response', ')', 'except', 'TypeError', ':', 'parsed_response', '=', 'None', 'except', 'ValueError', ':', 'raise', 'ValueError', '(', 'response', '.', 'decode', '(', "'UTF-8'", ')', ')', 'if', 'parsed_response', 'is', 'None', ':', 'return', '{', '}', 'return', 'Asset', '(', 'dictionary', '=', 'parsed_response', ')'] | Retrieve asset ddo for a given did.
:param did: Asset DID string
:return: DDO instance | ['Retrieve', 'asset', 'ddo', 'for', 'a', 'given', 'did', '.'] | train | https://github.com/oceanprotocol/squid-py/blob/43a5b7431627e4c9ab7382ed9eb8153e96ed4483/squid_py/aquarius/aquarius.py#L79-L97 |
1,882 | opengridcc/opengrid | opengrid/library/regression.py | MultiVarLinReg._do_analysis_cross_validation | def _do_analysis_cross_validation(self):
"""
Find the best model (fit) based on cross-valiation (leave one out)
"""
assert len(self.df) < 15, "Cross-validation is not implemented if your sample contains more than 15 datapoints"
# initialization: first model is the mean, but compute cv correctly.
errors = []
response_term = [Term([LookupFactor(self.y)])]
model_terms = [Term([])] # empty term is the intercept
model_desc = ModelDesc(response_term, model_terms)
for i in self.df.index:
# make new_fit, compute cross-validation and store error
df_ = self.df.drop(i, axis=0)
fit = fm.ols(model_desc, data=df_).fit()
cross_prediction = self._predict(fit=fit, df=self.df.loc[[i], :])
errors.append(cross_prediction['predicted'] - cross_prediction[self.y])
self._list_of_fits = [fm.ols(model_desc, data=self.df).fit()]
self.list_of_cverrors = [np.mean(np.abs(np.array(errors)))]
# try to improve the model until no improvements can be found
all_model_terms_dict = {x: Term([LookupFactor(x)]) for x in self.list_of_x}
while all_model_terms_dict:
# import pdb;pdb.set_trace()
# try each x in all_exog and overwrite if we find a better one
# at the end of iteration (and not earlier), save the best of the iteration
better_model_found = False
best = dict(fit=self._list_of_fits[-1], cverror=self.list_of_cverrors[-1])
for x, term in all_model_terms_dict.items():
model_desc = ModelDesc(response_term, self._list_of_fits[-1].model.formula.rhs_termlist + [term])
# cross_validation, currently only implemented for monthly data
# compute the mean error for a given formula based on leave-one-out.
errors = []
for i in self.df.index:
# make new_fit, compute cross-validation and store error
df_ = self.df.drop(i, axis=0)
fit = fm.ols(model_desc, data=df_).fit()
cross_prediction = self._predict(fit=fit, df=self.df.loc[[i], :])
errors.append(cross_prediction['predicted'] - cross_prediction[self.y])
cverror = np.mean(np.abs(np.array(errors)))
# compare the model with the current fit
if cverror < best['cverror']:
# better model, keep it
# first, reidentify using all the datapoints
best['fit'] = fm.ols(model_desc, data=self.df).fit()
best['cverror'] = cverror
better_model_found = True
best_x = x
if better_model_found:
self._list_of_fits.append(best['fit'])
self.list_of_cverrors.append(best['cverror'])
else:
# if we did not find a better model, exit
break
# next iteration with the found exog removed
all_model_terms_dict.pop(best_x)
self._fit = self._list_of_fits[-1] | python | def _do_analysis_cross_validation(self):
"""
Find the best model (fit) based on cross-valiation (leave one out)
"""
assert len(self.df) < 15, "Cross-validation is not implemented if your sample contains more than 15 datapoints"
# initialization: first model is the mean, but compute cv correctly.
errors = []
response_term = [Term([LookupFactor(self.y)])]
model_terms = [Term([])] # empty term is the intercept
model_desc = ModelDesc(response_term, model_terms)
for i in self.df.index:
# make new_fit, compute cross-validation and store error
df_ = self.df.drop(i, axis=0)
fit = fm.ols(model_desc, data=df_).fit()
cross_prediction = self._predict(fit=fit, df=self.df.loc[[i], :])
errors.append(cross_prediction['predicted'] - cross_prediction[self.y])
self._list_of_fits = [fm.ols(model_desc, data=self.df).fit()]
self.list_of_cverrors = [np.mean(np.abs(np.array(errors)))]
# try to improve the model until no improvements can be found
all_model_terms_dict = {x: Term([LookupFactor(x)]) for x in self.list_of_x}
while all_model_terms_dict:
# import pdb;pdb.set_trace()
# try each x in all_exog and overwrite if we find a better one
# at the end of iteration (and not earlier), save the best of the iteration
better_model_found = False
best = dict(fit=self._list_of_fits[-1], cverror=self.list_of_cverrors[-1])
for x, term in all_model_terms_dict.items():
model_desc = ModelDesc(response_term, self._list_of_fits[-1].model.formula.rhs_termlist + [term])
# cross_validation, currently only implemented for monthly data
# compute the mean error for a given formula based on leave-one-out.
errors = []
for i in self.df.index:
# make new_fit, compute cross-validation and store error
df_ = self.df.drop(i, axis=0)
fit = fm.ols(model_desc, data=df_).fit()
cross_prediction = self._predict(fit=fit, df=self.df.loc[[i], :])
errors.append(cross_prediction['predicted'] - cross_prediction[self.y])
cverror = np.mean(np.abs(np.array(errors)))
# compare the model with the current fit
if cverror < best['cverror']:
# better model, keep it
# first, reidentify using all the datapoints
best['fit'] = fm.ols(model_desc, data=self.df).fit()
best['cverror'] = cverror
better_model_found = True
best_x = x
if better_model_found:
self._list_of_fits.append(best['fit'])
self.list_of_cverrors.append(best['cverror'])
else:
# if we did not find a better model, exit
break
# next iteration with the found exog removed
all_model_terms_dict.pop(best_x)
self._fit = self._list_of_fits[-1] | ['def', '_do_analysis_cross_validation', '(', 'self', ')', ':', 'assert', 'len', '(', 'self', '.', 'df', ')', '<', '15', ',', '"Cross-validation is not implemented if your sample contains more than 15 datapoints"', '# initialization: first model is the mean, but compute cv correctly.', 'errors', '=', '[', ']', 'response_term', '=', '[', 'Term', '(', '[', 'LookupFactor', '(', 'self', '.', 'y', ')', ']', ')', ']', 'model_terms', '=', '[', 'Term', '(', '[', ']', ')', ']', '# empty term is the intercept', 'model_desc', '=', 'ModelDesc', '(', 'response_term', ',', 'model_terms', ')', 'for', 'i', 'in', 'self', '.', 'df', '.', 'index', ':', '# make new_fit, compute cross-validation and store error', 'df_', '=', 'self', '.', 'df', '.', 'drop', '(', 'i', ',', 'axis', '=', '0', ')', 'fit', '=', 'fm', '.', 'ols', '(', 'model_desc', ',', 'data', '=', 'df_', ')', '.', 'fit', '(', ')', 'cross_prediction', '=', 'self', '.', '_predict', '(', 'fit', '=', 'fit', ',', 'df', '=', 'self', '.', 'df', '.', 'loc', '[', '[', 'i', ']', ',', ':', ']', ')', 'errors', '.', 'append', '(', 'cross_prediction', '[', "'predicted'", ']', '-', 'cross_prediction', '[', 'self', '.', 'y', ']', ')', 'self', '.', '_list_of_fits', '=', '[', 'fm', '.', 'ols', '(', 'model_desc', ',', 'data', '=', 'self', '.', 'df', ')', '.', 'fit', '(', ')', ']', 'self', '.', 'list_of_cverrors', '=', '[', 'np', '.', 'mean', '(', 'np', '.', 'abs', '(', 'np', '.', 'array', '(', 'errors', ')', ')', ')', ']', '# try to improve the model until no improvements can be found', 'all_model_terms_dict', '=', '{', 'x', ':', 'Term', '(', '[', 'LookupFactor', '(', 'x', ')', ']', ')', 'for', 'x', 'in', 'self', '.', 'list_of_x', '}', 'while', 'all_model_terms_dict', ':', '# import pdb;pdb.set_trace()', '# try each x in all_exog and overwrite if we find a better one', '# at the end of iteration (and not earlier), save the best of the iteration', 'better_model_found', '=', 'False', 'best', '=', 'dict', '(', 'fit', '=', 'self', '.', '_list_of_fits', '[', '-', '1', ']', ',', 'cverror', '=', 'self', '.', 'list_of_cverrors', '[', '-', '1', ']', ')', 'for', 'x', ',', 'term', 'in', 'all_model_terms_dict', '.', 'items', '(', ')', ':', 'model_desc', '=', 'ModelDesc', '(', 'response_term', ',', 'self', '.', '_list_of_fits', '[', '-', '1', ']', '.', 'model', '.', 'formula', '.', 'rhs_termlist', '+', '[', 'term', ']', ')', '# cross_validation, currently only implemented for monthly data', '# compute the mean error for a given formula based on leave-one-out.', 'errors', '=', '[', ']', 'for', 'i', 'in', 'self', '.', 'df', '.', 'index', ':', '# make new_fit, compute cross-validation and store error', 'df_', '=', 'self', '.', 'df', '.', 'drop', '(', 'i', ',', 'axis', '=', '0', ')', 'fit', '=', 'fm', '.', 'ols', '(', 'model_desc', ',', 'data', '=', 'df_', ')', '.', 'fit', '(', ')', 'cross_prediction', '=', 'self', '.', '_predict', '(', 'fit', '=', 'fit', ',', 'df', '=', 'self', '.', 'df', '.', 'loc', '[', '[', 'i', ']', ',', ':', ']', ')', 'errors', '.', 'append', '(', 'cross_prediction', '[', "'predicted'", ']', '-', 'cross_prediction', '[', 'self', '.', 'y', ']', ')', 'cverror', '=', 'np', '.', 'mean', '(', 'np', '.', 'abs', '(', 'np', '.', 'array', '(', 'errors', ')', ')', ')', '# compare the model with the current fit', 'if', 'cverror', '<', 'best', '[', "'cverror'", ']', ':', '# better model, keep it', '# first, reidentify using all the datapoints', 'best', '[', "'fit'", ']', '=', 'fm', '.', 'ols', '(', 'model_desc', ',', 'data', '=', 'self', '.', 'df', ')', '.', 'fit', '(', ')', 'best', '[', "'cverror'", ']', '=', 'cverror', 'better_model_found', '=', 'True', 'best_x', '=', 'x', 'if', 'better_model_found', ':', 'self', '.', '_list_of_fits', '.', 'append', '(', 'best', '[', "'fit'", ']', ')', 'self', '.', 'list_of_cverrors', '.', 'append', '(', 'best', '[', "'cverror'", ']', ')', 'else', ':', '# if we did not find a better model, exit', 'break', '# next iteration with the found exog removed', 'all_model_terms_dict', '.', 'pop', '(', 'best_x', ')', 'self', '.', '_fit', '=', 'self', '.', '_list_of_fits', '[', '-', '1', ']'] | Find the best model (fit) based on cross-valiation (leave one out) | ['Find', 'the', 'best', 'model', '(', 'fit', ')', 'based', 'on', 'cross', '-', 'valiation', '(', 'leave', 'one', 'out', ')'] | train | https://github.com/opengridcc/opengrid/blob/69b8da3c8fcea9300226c45ef0628cd6d4307651/opengrid/library/regression.py#L157-L219 |
1,883 | andycasey/sick | sick/models/create.py | create | def create(output_prefix, grid_flux_filename, wavelength_filenames,
clobber=False, grid_flux_filename_format="csv", **kwargs):
"""
Create a new *sick* model from files describing the parameter names, fluxes,
and wavelengths.
"""
if not clobber:
# Check to make sure the output files won't exist already.
output_suffixes = (".yaml", ".pkl", "-wavelengths.memmap",
"-intensities.memmap")
for path in [output_prefix + suffix for suffix in output_suffixes]:
if os.path.exists(path):
raise IOError("output filename {} already exists".format(path))
# Read the grid_flux filename.
# param1 param2 param3 param4 channelname1 channelname2
kwds = kwargs.pop("__grid_flux_filename_kwargs", {})
kwds.update({"format": grid_flux_filename_format})
grid_flux_tbl = Table.read(grid_flux_filename, **kwds)
# Distinguish column names between parameters (real numbers) and filenames
str_columns = \
np.array([_[1].startswith("|S") for _ in grid_flux_tbl.dtype.descr])
# Check the number of channels provided.
if str_columns.sum() != len(wavelength_filenames):
raise ValueError("expected {0} wavelength filenames because {1} has {0}"
" string columns ({2}) but found {3} wavelength filenames".format(
sum(str_columns), grid_flux_filename,
", ".join(np.array(grid_flux_tbl.colnames)[str_columns]),
len(wavelength_filenames)))
# Create a record array of the grid points.
grid_points = \
grid_flux_tbl.as_array()[np.array(grid_flux_tbl.colnames)[~str_columns]]
# To-do: make sure they are all floats.
# Sort the grid points.
grid_indices = grid_points.argsort(order=grid_points.dtype.names)
grid_points = grid_points[grid_indices]
grid_flux_tbl = grid_flux_tbl[grid_indices]
# Check the wavelength filenames.
channel_wavelengths = np.array(map(load_simple_data, wavelength_filenames))
# Sort the channels by starting wavelength.
c_indices = np.argsort([each.min() for each in channel_wavelengths])
channel_names = np.array(grid_flux_tbl.colnames)[str_columns][c_indices]
channel_wavelengths = channel_wavelengths[c_indices]
channel_sizes = [len(_) for _ in channel_wavelengths]
num_pixels = sum(channel_sizes)
# Create the model YAML file.
with open(output_prefix + ".yaml", "w") as fp:
header = "\n".join([
"# Model created on {0}".format(strftime("%Y-%m-%d %H:%M:%S")),
"# Grid parameters: {0}".format(", ".join(grid_points.dtype.names)),
"# Channel names: {0}".format(", ".join(channel_names))
])
fp.write(header + "\n" + yaml.safe_dump({ "model_grid": {
"grid_points": output_prefix + ".pkl",
"intensities": output_prefix + "-intensities.memmap",
"wavelengths": output_prefix + "-wavelengths.memmap"
}}, stream=None, allow_unicode=True, default_flow_style=False))
# Create the pickled model file, with meta data.
metadata = {
"grid_flux_filename": grid_flux_filename,
"wavelength_filenames": wavelength_filenames,
"channel_names": channel_names,
"channel_sizes": channel_sizes,
"channel_resolutions": [float("inf")] * len(channel_names),
"sick_version": sick_version
}
logger.debug("Dumping grid points and metadata to file")
with open(output_prefix + ".pkl", "wb") as fp:
pickle.dump((grid_points, metadata), fp, -1)
# Create the memory-mapped dispersion file.
logger.debug("Creating memory-mapped dispersion file.")
wavelengths_memmap = np.memmap(output_prefix + "-wavelengths.memmap",
dtype="float32", mode="w+", shape=(num_pixels, ))
wavelengths_memmap[:] = np.hstack(channel_wavelengths)
wavelengths_memmap.flush()
del wavelengths_memmap
# Create the memory-mapped intensities file.
logger.debug("Creating memory-mapped intensities file.")
intensities_memmap = np.memmap(output_prefix + "-intensities.memmap",
shape=(grid_points.size, num_pixels), dtype="float32",
mode="w+")
n = len(grid_flux_tbl)
for i, row in enumerate(grid_flux_tbl):
logger.debug("Loading point {0}/{1} into the intensities map"\
.format(i + 1, n))
j = 0
for channel_name in channel_names:
try:
data = load_simple_data(row[channel_name])
except:
logger.exception("Could not load data from {0} for channel {1}"\
.format(row[channel_name], channel_name))
raise
intensities_memmap[i, j:j + data.size] = data
j += data.size
intensities_memmap.flush()
del intensities_memmap
return True | python | def create(output_prefix, grid_flux_filename, wavelength_filenames,
clobber=False, grid_flux_filename_format="csv", **kwargs):
"""
Create a new *sick* model from files describing the parameter names, fluxes,
and wavelengths.
"""
if not clobber:
# Check to make sure the output files won't exist already.
output_suffixes = (".yaml", ".pkl", "-wavelengths.memmap",
"-intensities.memmap")
for path in [output_prefix + suffix for suffix in output_suffixes]:
if os.path.exists(path):
raise IOError("output filename {} already exists".format(path))
# Read the grid_flux filename.
# param1 param2 param3 param4 channelname1 channelname2
kwds = kwargs.pop("__grid_flux_filename_kwargs", {})
kwds.update({"format": grid_flux_filename_format})
grid_flux_tbl = Table.read(grid_flux_filename, **kwds)
# Distinguish column names between parameters (real numbers) and filenames
str_columns = \
np.array([_[1].startswith("|S") for _ in grid_flux_tbl.dtype.descr])
# Check the number of channels provided.
if str_columns.sum() != len(wavelength_filenames):
raise ValueError("expected {0} wavelength filenames because {1} has {0}"
" string columns ({2}) but found {3} wavelength filenames".format(
sum(str_columns), grid_flux_filename,
", ".join(np.array(grid_flux_tbl.colnames)[str_columns]),
len(wavelength_filenames)))
# Create a record array of the grid points.
grid_points = \
grid_flux_tbl.as_array()[np.array(grid_flux_tbl.colnames)[~str_columns]]
# To-do: make sure they are all floats.
# Sort the grid points.
grid_indices = grid_points.argsort(order=grid_points.dtype.names)
grid_points = grid_points[grid_indices]
grid_flux_tbl = grid_flux_tbl[grid_indices]
# Check the wavelength filenames.
channel_wavelengths = np.array(map(load_simple_data, wavelength_filenames))
# Sort the channels by starting wavelength.
c_indices = np.argsort([each.min() for each in channel_wavelengths])
channel_names = np.array(grid_flux_tbl.colnames)[str_columns][c_indices]
channel_wavelengths = channel_wavelengths[c_indices]
channel_sizes = [len(_) for _ in channel_wavelengths]
num_pixels = sum(channel_sizes)
# Create the model YAML file.
with open(output_prefix + ".yaml", "w") as fp:
header = "\n".join([
"# Model created on {0}".format(strftime("%Y-%m-%d %H:%M:%S")),
"# Grid parameters: {0}".format(", ".join(grid_points.dtype.names)),
"# Channel names: {0}".format(", ".join(channel_names))
])
fp.write(header + "\n" + yaml.safe_dump({ "model_grid": {
"grid_points": output_prefix + ".pkl",
"intensities": output_prefix + "-intensities.memmap",
"wavelengths": output_prefix + "-wavelengths.memmap"
}}, stream=None, allow_unicode=True, default_flow_style=False))
# Create the pickled model file, with meta data.
metadata = {
"grid_flux_filename": grid_flux_filename,
"wavelength_filenames": wavelength_filenames,
"channel_names": channel_names,
"channel_sizes": channel_sizes,
"channel_resolutions": [float("inf")] * len(channel_names),
"sick_version": sick_version
}
logger.debug("Dumping grid points and metadata to file")
with open(output_prefix + ".pkl", "wb") as fp:
pickle.dump((grid_points, metadata), fp, -1)
# Create the memory-mapped dispersion file.
logger.debug("Creating memory-mapped dispersion file.")
wavelengths_memmap = np.memmap(output_prefix + "-wavelengths.memmap",
dtype="float32", mode="w+", shape=(num_pixels, ))
wavelengths_memmap[:] = np.hstack(channel_wavelengths)
wavelengths_memmap.flush()
del wavelengths_memmap
# Create the memory-mapped intensities file.
logger.debug("Creating memory-mapped intensities file.")
intensities_memmap = np.memmap(output_prefix + "-intensities.memmap",
shape=(grid_points.size, num_pixels), dtype="float32",
mode="w+")
n = len(grid_flux_tbl)
for i, row in enumerate(grid_flux_tbl):
logger.debug("Loading point {0}/{1} into the intensities map"\
.format(i + 1, n))
j = 0
for channel_name in channel_names:
try:
data = load_simple_data(row[channel_name])
except:
logger.exception("Could not load data from {0} for channel {1}"\
.format(row[channel_name], channel_name))
raise
intensities_memmap[i, j:j + data.size] = data
j += data.size
intensities_memmap.flush()
del intensities_memmap
return True | ['def', 'create', '(', 'output_prefix', ',', 'grid_flux_filename', ',', 'wavelength_filenames', ',', 'clobber', '=', 'False', ',', 'grid_flux_filename_format', '=', '"csv"', ',', '*', '*', 'kwargs', ')', ':', 'if', 'not', 'clobber', ':', "# Check to make sure the output files won't exist already.", 'output_suffixes', '=', '(', '".yaml"', ',', '".pkl"', ',', '"-wavelengths.memmap"', ',', '"-intensities.memmap"', ')', 'for', 'path', 'in', '[', 'output_prefix', '+', 'suffix', 'for', 'suffix', 'in', 'output_suffixes', ']', ':', 'if', 'os', '.', 'path', '.', 'exists', '(', 'path', ')', ':', 'raise', 'IOError', '(', '"output filename {} already exists"', '.', 'format', '(', 'path', ')', ')', '# Read the grid_flux filename.', '# param1 param2 param3 param4 channelname1 channelname2', 'kwds', '=', 'kwargs', '.', 'pop', '(', '"__grid_flux_filename_kwargs"', ',', '{', '}', ')', 'kwds', '.', 'update', '(', '{', '"format"', ':', 'grid_flux_filename_format', '}', ')', 'grid_flux_tbl', '=', 'Table', '.', 'read', '(', 'grid_flux_filename', ',', '*', '*', 'kwds', ')', '# Distinguish column names between parameters (real numbers) and filenames', 'str_columns', '=', 'np', '.', 'array', '(', '[', '_', '[', '1', ']', '.', 'startswith', '(', '"|S"', ')', 'for', '_', 'in', 'grid_flux_tbl', '.', 'dtype', '.', 'descr', ']', ')', '# Check the number of channels provided.', 'if', 'str_columns', '.', 'sum', '(', ')', '!=', 'len', '(', 'wavelength_filenames', ')', ':', 'raise', 'ValueError', '(', '"expected {0} wavelength filenames because {1} has {0}"', '" string columns ({2}) but found {3} wavelength filenames"', '.', 'format', '(', 'sum', '(', 'str_columns', ')', ',', 'grid_flux_filename', ',', '", "', '.', 'join', '(', 'np', '.', 'array', '(', 'grid_flux_tbl', '.', 'colnames', ')', '[', 'str_columns', ']', ')', ',', 'len', '(', 'wavelength_filenames', ')', ')', ')', '# Create a record array of the grid points.', 'grid_points', '=', 'grid_flux_tbl', '.', 'as_array', '(', ')', '[', 'np', '.', 'array', '(', 'grid_flux_tbl', '.', 'colnames', ')', '[', '~', 'str_columns', ']', ']', '# To-do: make sure they are all floats.', '# Sort the grid points.', 'grid_indices', '=', 'grid_points', '.', 'argsort', '(', 'order', '=', 'grid_points', '.', 'dtype', '.', 'names', ')', 'grid_points', '=', 'grid_points', '[', 'grid_indices', ']', 'grid_flux_tbl', '=', 'grid_flux_tbl', '[', 'grid_indices', ']', '# Check the wavelength filenames.', 'channel_wavelengths', '=', 'np', '.', 'array', '(', 'map', '(', 'load_simple_data', ',', 'wavelength_filenames', ')', ')', '# Sort the channels by starting wavelength.', 'c_indices', '=', 'np', '.', 'argsort', '(', '[', 'each', '.', 'min', '(', ')', 'for', 'each', 'in', 'channel_wavelengths', ']', ')', 'channel_names', '=', 'np', '.', 'array', '(', 'grid_flux_tbl', '.', 'colnames', ')', '[', 'str_columns', ']', '[', 'c_indices', ']', 'channel_wavelengths', '=', 'channel_wavelengths', '[', 'c_indices', ']', 'channel_sizes', '=', '[', 'len', '(', '_', ')', 'for', '_', 'in', 'channel_wavelengths', ']', 'num_pixels', '=', 'sum', '(', 'channel_sizes', ')', '# Create the model YAML file.', 'with', 'open', '(', 'output_prefix', '+', '".yaml"', ',', '"w"', ')', 'as', 'fp', ':', 'header', '=', '"\\n"', '.', 'join', '(', '[', '"# Model created on {0}"', '.', 'format', '(', 'strftime', '(', '"%Y-%m-%d %H:%M:%S"', ')', ')', ',', '"# Grid parameters: {0}"', '.', 'format', '(', '", "', '.', 'join', '(', 'grid_points', '.', 'dtype', '.', 'names', ')', ')', ',', '"# Channel names: {0}"', '.', 'format', '(', '", "', '.', 'join', '(', 'channel_names', ')', ')', ']', ')', 'fp', '.', 'write', '(', 'header', '+', '"\\n"', '+', 'yaml', '.', 'safe_dump', '(', '{', '"model_grid"', ':', '{', '"grid_points"', ':', 'output_prefix', '+', '".pkl"', ',', '"intensities"', ':', 'output_prefix', '+', '"-intensities.memmap"', ',', '"wavelengths"', ':', 'output_prefix', '+', '"-wavelengths.memmap"', '}', '}', ',', 'stream', '=', 'None', ',', 'allow_unicode', '=', 'True', ',', 'default_flow_style', '=', 'False', ')', ')', '# Create the pickled model file, with meta data.', 'metadata', '=', '{', '"grid_flux_filename"', ':', 'grid_flux_filename', ',', '"wavelength_filenames"', ':', 'wavelength_filenames', ',', '"channel_names"', ':', 'channel_names', ',', '"channel_sizes"', ':', 'channel_sizes', ',', '"channel_resolutions"', ':', '[', 'float', '(', '"inf"', ')', ']', '*', 'len', '(', 'channel_names', ')', ',', '"sick_version"', ':', 'sick_version', '}', 'logger', '.', 'debug', '(', '"Dumping grid points and metadata to file"', ')', 'with', 'open', '(', 'output_prefix', '+', '".pkl"', ',', '"wb"', ')', 'as', 'fp', ':', 'pickle', '.', 'dump', '(', '(', 'grid_points', ',', 'metadata', ')', ',', 'fp', ',', '-', '1', ')', '# Create the memory-mapped dispersion file.', 'logger', '.', 'debug', '(', '"Creating memory-mapped dispersion file."', ')', 'wavelengths_memmap', '=', 'np', '.', 'memmap', '(', 'output_prefix', '+', '"-wavelengths.memmap"', ',', 'dtype', '=', '"float32"', ',', 'mode', '=', '"w+"', ',', 'shape', '=', '(', 'num_pixels', ',', ')', ')', 'wavelengths_memmap', '[', ':', ']', '=', 'np', '.', 'hstack', '(', 'channel_wavelengths', ')', 'wavelengths_memmap', '.', 'flush', '(', ')', 'del', 'wavelengths_memmap', '# Create the memory-mapped intensities file.', 'logger', '.', 'debug', '(', '"Creating memory-mapped intensities file."', ')', 'intensities_memmap', '=', 'np', '.', 'memmap', '(', 'output_prefix', '+', '"-intensities.memmap"', ',', 'shape', '=', '(', 'grid_points', '.', 'size', ',', 'num_pixels', ')', ',', 'dtype', '=', '"float32"', ',', 'mode', '=', '"w+"', ')', 'n', '=', 'len', '(', 'grid_flux_tbl', ')', 'for', 'i', ',', 'row', 'in', 'enumerate', '(', 'grid_flux_tbl', ')', ':', 'logger', '.', 'debug', '(', '"Loading point {0}/{1} into the intensities map"', '.', 'format', '(', 'i', '+', '1', ',', 'n', ')', ')', 'j', '=', '0', 'for', 'channel_name', 'in', 'channel_names', ':', 'try', ':', 'data', '=', 'load_simple_data', '(', 'row', '[', 'channel_name', ']', ')', 'except', ':', 'logger', '.', 'exception', '(', '"Could not load data from {0} for channel {1}"', '.', 'format', '(', 'row', '[', 'channel_name', ']', ',', 'channel_name', ')', ')', 'raise', 'intensities_memmap', '[', 'i', ',', 'j', ':', 'j', '+', 'data', '.', 'size', ']', '=', 'data', 'j', '+=', 'data', '.', 'size', 'intensities_memmap', '.', 'flush', '(', ')', 'del', 'intensities_memmap', 'return', 'True'] | Create a new *sick* model from files describing the parameter names, fluxes,
and wavelengths. | ['Create', 'a', 'new', '*', 'sick', '*', 'model', 'from', 'files', 'describing', 'the', 'parameter', 'names', 'fluxes', 'and', 'wavelengths', '.'] | train | https://github.com/andycasey/sick/blob/6c37686182794c4cafea45abf7062b30b789b1a2/sick/models/create.py#L49-L161 |
1,884 | googleapis/google-cloud-python | error_reporting/google/cloud/error_reporting/_logging.py | _ErrorReportingLoggingAPI.report_error_event | def report_error_event(self, error_report):
"""Report error payload.
:type error_report: dict
:param: error_report:
dict payload of the error report formatted according to
https://cloud.google.com/error-reporting/docs/formatting-error-messages
This object should be built using
:meth:~`google.cloud.error_reporting.client._build_error_report`
"""
logger = self.logging_client.logger("errors")
logger.log_struct(error_report) | python | def report_error_event(self, error_report):
"""Report error payload.
:type error_report: dict
:param: error_report:
dict payload of the error report formatted according to
https://cloud.google.com/error-reporting/docs/formatting-error-messages
This object should be built using
:meth:~`google.cloud.error_reporting.client._build_error_report`
"""
logger = self.logging_client.logger("errors")
logger.log_struct(error_report) | ['def', 'report_error_event', '(', 'self', ',', 'error_report', ')', ':', 'logger', '=', 'self', '.', 'logging_client', '.', 'logger', '(', '"errors"', ')', 'logger', '.', 'log_struct', '(', 'error_report', ')'] | Report error payload.
:type error_report: dict
:param: error_report:
dict payload of the error report formatted according to
https://cloud.google.com/error-reporting/docs/formatting-error-messages
This object should be built using
:meth:~`google.cloud.error_reporting.client._build_error_report` | ['Report', 'error', 'payload', '.'] | train | https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/error_reporting/google/cloud/error_reporting/_logging.py#L55-L66 |
1,885 | tensorflow/tensor2tensor | tensor2tensor/data_generators/problem.py | Problem.get_hparams | def get_hparams(self, model_hparams=None):
"""Returns problem_hparams."""
if self._hparams is not None:
return self._hparams
if model_hparams is None:
model_hparams = default_model_hparams()
if self._encoders is None:
data_dir = (model_hparams and hasattr(model_hparams, "data_dir") and
model_hparams.data_dir) or None
self.get_feature_encoders(data_dir)
hp = _default_hparams()
ret = self.hparams(hp, model_hparams)
if ret is not None:
raise ValueError("The Problem subclass hparams function should mutate "
"the defaults passed in and return None.")
hp.add_hparam("vocabulary", self._encoders)
hp.add_hparam("was_reversed", self._was_reversed)
hp.add_hparam("was_copy", self._was_copy)
if self._was_reversed:
_reverse_problem_hparams(hp)
if self._was_copy:
_copy_problem_hparams(hp)
self._hparams = hp
return self._hparams | python | def get_hparams(self, model_hparams=None):
"""Returns problem_hparams."""
if self._hparams is not None:
return self._hparams
if model_hparams is None:
model_hparams = default_model_hparams()
if self._encoders is None:
data_dir = (model_hparams and hasattr(model_hparams, "data_dir") and
model_hparams.data_dir) or None
self.get_feature_encoders(data_dir)
hp = _default_hparams()
ret = self.hparams(hp, model_hparams)
if ret is not None:
raise ValueError("The Problem subclass hparams function should mutate "
"the defaults passed in and return None.")
hp.add_hparam("vocabulary", self._encoders)
hp.add_hparam("was_reversed", self._was_reversed)
hp.add_hparam("was_copy", self._was_copy)
if self._was_reversed:
_reverse_problem_hparams(hp)
if self._was_copy:
_copy_problem_hparams(hp)
self._hparams = hp
return self._hparams | ['def', 'get_hparams', '(', 'self', ',', 'model_hparams', '=', 'None', ')', ':', 'if', 'self', '.', '_hparams', 'is', 'not', 'None', ':', 'return', 'self', '.', '_hparams', 'if', 'model_hparams', 'is', 'None', ':', 'model_hparams', '=', 'default_model_hparams', '(', ')', 'if', 'self', '.', '_encoders', 'is', 'None', ':', 'data_dir', '=', '(', 'model_hparams', 'and', 'hasattr', '(', 'model_hparams', ',', '"data_dir"', ')', 'and', 'model_hparams', '.', 'data_dir', ')', 'or', 'None', 'self', '.', 'get_feature_encoders', '(', 'data_dir', ')', 'hp', '=', '_default_hparams', '(', ')', 'ret', '=', 'self', '.', 'hparams', '(', 'hp', ',', 'model_hparams', ')', 'if', 'ret', 'is', 'not', 'None', ':', 'raise', 'ValueError', '(', '"The Problem subclass hparams function should mutate "', '"the defaults passed in and return None."', ')', 'hp', '.', 'add_hparam', '(', '"vocabulary"', ',', 'self', '.', '_encoders', ')', 'hp', '.', 'add_hparam', '(', '"was_reversed"', ',', 'self', '.', '_was_reversed', ')', 'hp', '.', 'add_hparam', '(', '"was_copy"', ',', 'self', '.', '_was_copy', ')', 'if', 'self', '.', '_was_reversed', ':', '_reverse_problem_hparams', '(', 'hp', ')', 'if', 'self', '.', '_was_copy', ':', '_copy_problem_hparams', '(', 'hp', ')', 'self', '.', '_hparams', '=', 'hp', 'return', 'self', '.', '_hparams'] | Returns problem_hparams. | ['Returns', 'problem_hparams', '.'] | train | https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/data_generators/problem.py#L513-L542 |
1,886 | cozy/python_cozy_management | cozy_management/weboob.py | install | def install():
'''
Install weboob system-wide
'''
tmp_weboob_dir = '/tmp/weboob'
# Check that the directory does not already exists
while (os.path.exists(tmp_weboob_dir)):
tmp_weboob_dir += '1'
# Clone the repository
print 'Fetching sources in temporary dir {}'.format(tmp_weboob_dir)
result = cmd_exec('git clone {} {}'.format(WEBOOB_REPO, tmp_weboob_dir))
if (result['error']):
print result['stderr']
print 'Weboob installation failed: could not clone repository'
exit()
print 'Sources fetched, will now process to installation'
# Launch the installation
result = cmd_exec('cd {} && ./setup.py install'.format(tmp_weboob_dir))
# Remove the weboob directory
shutil.rmtree(tmp_weboob_dir)
if (result['error']):
print result['stderr']
print 'Weboob installation failed: setup failed'
exit()
print result['stdout']
# Check weboob version
weboob_version = get_weboob_version()
if (not weboob_version):
print 'Weboob installation failed: version not detected'
exit()
print 'Weboob (version: {}) installation succeeded'.format(weboob_version)
update() | python | def install():
'''
Install weboob system-wide
'''
tmp_weboob_dir = '/tmp/weboob'
# Check that the directory does not already exists
while (os.path.exists(tmp_weboob_dir)):
tmp_weboob_dir += '1'
# Clone the repository
print 'Fetching sources in temporary dir {}'.format(tmp_weboob_dir)
result = cmd_exec('git clone {} {}'.format(WEBOOB_REPO, tmp_weboob_dir))
if (result['error']):
print result['stderr']
print 'Weboob installation failed: could not clone repository'
exit()
print 'Sources fetched, will now process to installation'
# Launch the installation
result = cmd_exec('cd {} && ./setup.py install'.format(tmp_weboob_dir))
# Remove the weboob directory
shutil.rmtree(tmp_weboob_dir)
if (result['error']):
print result['stderr']
print 'Weboob installation failed: setup failed'
exit()
print result['stdout']
# Check weboob version
weboob_version = get_weboob_version()
if (not weboob_version):
print 'Weboob installation failed: version not detected'
exit()
print 'Weboob (version: {}) installation succeeded'.format(weboob_version)
update() | ['def', 'install', '(', ')', ':', 'tmp_weboob_dir', '=', "'/tmp/weboob'", '# Check that the directory does not already exists', 'while', '(', 'os', '.', 'path', '.', 'exists', '(', 'tmp_weboob_dir', ')', ')', ':', 'tmp_weboob_dir', '+=', "'1'", '# Clone the repository', 'print', "'Fetching sources in temporary dir {}'", '.', 'format', '(', 'tmp_weboob_dir', ')', 'result', '=', 'cmd_exec', '(', "'git clone {} {}'", '.', 'format', '(', 'WEBOOB_REPO', ',', 'tmp_weboob_dir', ')', ')', 'if', '(', 'result', '[', "'error'", ']', ')', ':', 'print', 'result', '[', "'stderr'", ']', 'print', "'Weboob installation failed: could not clone repository'", 'exit', '(', ')', 'print', "'Sources fetched, will now process to installation'", '# Launch the installation', 'result', '=', 'cmd_exec', '(', "'cd {} && ./setup.py install'", '.', 'format', '(', 'tmp_weboob_dir', ')', ')', '# Remove the weboob directory', 'shutil', '.', 'rmtree', '(', 'tmp_weboob_dir', ')', 'if', '(', 'result', '[', "'error'", ']', ')', ':', 'print', 'result', '[', "'stderr'", ']', 'print', "'Weboob installation failed: setup failed'", 'exit', '(', ')', 'print', 'result', '[', "'stdout'", ']', '# Check weboob version', 'weboob_version', '=', 'get_weboob_version', '(', ')', 'if', '(', 'not', 'weboob_version', ')', ':', 'print', "'Weboob installation failed: version not detected'", 'exit', '(', ')', 'print', "'Weboob (version: {}) installation succeeded'", '.', 'format', '(', 'weboob_version', ')', 'update', '(', ')'] | Install weboob system-wide | ['Install', 'weboob', 'system', '-', 'wide'] | train | https://github.com/cozy/python_cozy_management/blob/820cea58458ae3e067fa8cc2da38edbda4681dac/cozy_management/weboob.py#L32-L72 |
1,887 | ConsenSys/mythril-classic | mythril/laser/smt/__init__.py | _SmtSymbolFactory.BitVecSym | def BitVecSym(name: str, size: int, annotations: Annotations = None) -> BitVec:
"""Creates a new bit vector with a symbolic value."""
raw = z3.BitVec(name, size)
return BitVec(raw, annotations) | python | def BitVecSym(name: str, size: int, annotations: Annotations = None) -> BitVec:
"""Creates a new bit vector with a symbolic value."""
raw = z3.BitVec(name, size)
return BitVec(raw, annotations) | ['def', 'BitVecSym', '(', 'name', ':', 'str', ',', 'size', ':', 'int', ',', 'annotations', ':', 'Annotations', '=', 'None', ')', '->', 'BitVec', ':', 'raw', '=', 'z3', '.', 'BitVec', '(', 'name', ',', 'size', ')', 'return', 'BitVec', '(', 'raw', ',', 'annotations', ')'] | Creates a new bit vector with a symbolic value. | ['Creates', 'a', 'new', 'bit', 'vector', 'with', 'a', 'symbolic', 'value', '.'] | train | https://github.com/ConsenSys/mythril-classic/blob/27af71c34b2ce94f4fae5613ec457f93df1a8f56/mythril/laser/smt/__init__.py#L132-L135 |
1,888 | SeanOC/sharpy | sharpy/product.py | PricingPlan.initial_bill_date | def initial_bill_date(self):
'''
An estimated initial bill date for an account created today,
based on available plan info.
'''
time_to_start = None
if self.initial_bill_count_unit == 'months':
time_to_start = relativedelta(months=self.initial_bill_count)
else:
time_to_start = relativedelta(days=self.initial_bill_count)
initial_bill_date = datetime.utcnow().date() + time_to_start
return initial_bill_date | python | def initial_bill_date(self):
'''
An estimated initial bill date for an account created today,
based on available plan info.
'''
time_to_start = None
if self.initial_bill_count_unit == 'months':
time_to_start = relativedelta(months=self.initial_bill_count)
else:
time_to_start = relativedelta(days=self.initial_bill_count)
initial_bill_date = datetime.utcnow().date() + time_to_start
return initial_bill_date | ['def', 'initial_bill_date', '(', 'self', ')', ':', 'time_to_start', '=', 'None', 'if', 'self', '.', 'initial_bill_count_unit', '==', "'months'", ':', 'time_to_start', '=', 'relativedelta', '(', 'months', '=', 'self', '.', 'initial_bill_count', ')', 'else', ':', 'time_to_start', '=', 'relativedelta', '(', 'days', '=', 'self', '.', 'initial_bill_count', ')', 'initial_bill_date', '=', 'datetime', '.', 'utcnow', '(', ')', '.', 'date', '(', ')', '+', 'time_to_start', 'return', 'initial_bill_date'] | An estimated initial bill date for an account created today,
based on available plan info. | ['An', 'estimated', 'initial', 'bill', 'date', 'for', 'an', 'account', 'created', 'today', 'based', 'on', 'available', 'plan', 'info', '.'] | train | https://github.com/SeanOC/sharpy/blob/935943ca86034255f0a93c1a84734814be176ed4/sharpy/product.py#L331-L345 |
1,889 | mixmastamyk/console | console/utils.py | reset_terminal | def reset_terminal():
''' Reset the terminal/console screen. (Also aliased to cls.)
Greater than a fullscreen terminal clear, also clears the scrollback
buffer. May expose bugs in dumb terminals.
'''
if os.name == 'nt':
from .windows import cls
cls()
else:
text = sc.reset
_write(text)
return text | python | def reset_terminal():
''' Reset the terminal/console screen. (Also aliased to cls.)
Greater than a fullscreen terminal clear, also clears the scrollback
buffer. May expose bugs in dumb terminals.
'''
if os.name == 'nt':
from .windows import cls
cls()
else:
text = sc.reset
_write(text)
return text | ['def', 'reset_terminal', '(', ')', ':', 'if', 'os', '.', 'name', '==', "'nt'", ':', 'from', '.', 'windows', 'import', 'cls', 'cls', '(', ')', 'else', ':', 'text', '=', 'sc', '.', 'reset', '_write', '(', 'text', ')', 'return', 'text'] | Reset the terminal/console screen. (Also aliased to cls.)
Greater than a fullscreen terminal clear, also clears the scrollback
buffer. May expose bugs in dumb terminals. | ['Reset', 'the', 'terminal', '/', 'console', 'screen', '.', '(', 'Also', 'aliased', 'to', 'cls', '.', ')'] | train | https://github.com/mixmastamyk/console/blob/afe6c95d5a7b83d85376f450454e3769e4a5c3d0/console/utils.py#L88-L100 |
1,890 | qwiglydee/drf-mongo-filters | drf_mongo_filters/filters.py | Filter.filter_params | def filter_params(self, value):
""" return filtering params """
if value is None:
return {}
key = self.target
if self.lookup_type is not None:
key += '__' + self.lookup_type
return { key: value } | python | def filter_params(self, value):
""" return filtering params """
if value is None:
return {}
key = self.target
if self.lookup_type is not None:
key += '__' + self.lookup_type
return { key: value } | ['def', 'filter_params', '(', 'self', ',', 'value', ')', ':', 'if', 'value', 'is', 'None', ':', 'return', '{', '}', 'key', '=', 'self', '.', 'target', 'if', 'self', '.', 'lookup_type', 'is', 'not', 'None', ':', 'key', '+=', "'__'", '+', 'self', '.', 'lookup_type', 'return', '{', 'key', ':', 'value', '}'] | return filtering params | ['return', 'filtering', 'params'] | train | https://github.com/qwiglydee/drf-mongo-filters/blob/f7e397c329bac6d7b8cbb1df70d96eccdcfbc1ec/drf_mongo_filters/filters.py#L78-L86 |
1,891 | rigetti/pyquil | pyquil/_parser/PyQuilListener.py | CustomErrorListener.get_expected_tokens | def get_expected_tokens(self, parser, interval_set):
# type: (QuilParser, IntervalSet) -> Iterator
"""
Like the default getExpectedTokens method except that it will fallback to the rule name if the token isn't a
literal. For instance, instead of <INVALID> for integer it will return the rule name: INT
"""
for tok in interval_set:
literal_name = parser.literalNames[tok]
symbolic_name = parser.symbolicNames[tok]
if literal_name != '<INVALID>':
yield literal_name
else:
yield symbolic_name | python | def get_expected_tokens(self, parser, interval_set):
# type: (QuilParser, IntervalSet) -> Iterator
"""
Like the default getExpectedTokens method except that it will fallback to the rule name if the token isn't a
literal. For instance, instead of <INVALID> for integer it will return the rule name: INT
"""
for tok in interval_set:
literal_name = parser.literalNames[tok]
symbolic_name = parser.symbolicNames[tok]
if literal_name != '<INVALID>':
yield literal_name
else:
yield symbolic_name | ['def', 'get_expected_tokens', '(', 'self', ',', 'parser', ',', 'interval_set', ')', ':', '# type: (QuilParser, IntervalSet) -> Iterator', 'for', 'tok', 'in', 'interval_set', ':', 'literal_name', '=', 'parser', '.', 'literalNames', '[', 'tok', ']', 'symbolic_name', '=', 'parser', '.', 'symbolicNames', '[', 'tok', ']', 'if', 'literal_name', '!=', "'<INVALID>'", ':', 'yield', 'literal_name', 'else', ':', 'yield', 'symbolic_name'] | Like the default getExpectedTokens method except that it will fallback to the rule name if the token isn't a
literal. For instance, instead of <INVALID> for integer it will return the rule name: INT | ['Like', 'the', 'default', 'getExpectedTokens', 'method', 'except', 'that', 'it', 'will', 'fallback', 'to', 'the', 'rule', 'name', 'if', 'the', 'token', 'isn', 't', 'a', 'literal', '.', 'For', 'instance', 'instead', 'of', '<INVALID', '>', 'for', 'integer', 'it', 'will', 'return', 'the', 'rule', 'name', ':', 'INT'] | train | https://github.com/rigetti/pyquil/blob/ec98e453084b0037d69d8c3245f6822a5422593d/pyquil/_parser/PyQuilListener.py#L87-L100 |
1,892 | orbingol/NURBS-Python | geomdl/_operations.py | normal_surface_single | def normal_surface_single(obj, uv, normalize):
""" Evaluates the surface normal vector at the given (u, v) parameter pair.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input surface
:type obj: abstract.Surface
:param uv: (u,v) parameter pair
:type uv: list or tuple
:param normalize: if True, the returned normal vector is converted to a unit vector
:type normalize: bool
:return: a list in the order of "surface point" and "normal vector"
:rtype: list
"""
# Take the 1st derivative of the surface
skl = obj.derivatives(uv[0], uv[1], 1)
point = skl[0][0]
vector = linalg.vector_cross(skl[1][0], skl[0][1])
vector = linalg.vector_normalize(vector) if normalize else vector
return tuple(point), tuple(vector) | python | def normal_surface_single(obj, uv, normalize):
""" Evaluates the surface normal vector at the given (u, v) parameter pair.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input surface
:type obj: abstract.Surface
:param uv: (u,v) parameter pair
:type uv: list or tuple
:param normalize: if True, the returned normal vector is converted to a unit vector
:type normalize: bool
:return: a list in the order of "surface point" and "normal vector"
:rtype: list
"""
# Take the 1st derivative of the surface
skl = obj.derivatives(uv[0], uv[1], 1)
point = skl[0][0]
vector = linalg.vector_cross(skl[1][0], skl[0][1])
vector = linalg.vector_normalize(vector) if normalize else vector
return tuple(point), tuple(vector) | ['def', 'normal_surface_single', '(', 'obj', ',', 'uv', ',', 'normalize', ')', ':', '# Take the 1st derivative of the surface', 'skl', '=', 'obj', '.', 'derivatives', '(', 'uv', '[', '0', ']', ',', 'uv', '[', '1', ']', ',', '1', ')', 'point', '=', 'skl', '[', '0', ']', '[', '0', ']', 'vector', '=', 'linalg', '.', 'vector_cross', '(', 'skl', '[', '1', ']', '[', '0', ']', ',', 'skl', '[', '0', ']', '[', '1', ']', ')', 'vector', '=', 'linalg', '.', 'vector_normalize', '(', 'vector', ')', 'if', 'normalize', 'else', 'vector', 'return', 'tuple', '(', 'point', ')', ',', 'tuple', '(', 'vector', ')'] | Evaluates the surface normal vector at the given (u, v) parameter pair.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input surface
:type obj: abstract.Surface
:param uv: (u,v) parameter pair
:type uv: list or tuple
:param normalize: if True, the returned normal vector is converted to a unit vector
:type normalize: bool
:return: a list in the order of "surface point" and "normal vector"
:rtype: list | ['Evaluates', 'the', 'surface', 'normal', 'vector', 'at', 'the', 'given', '(', 'u', 'v', ')', 'parameter', 'pair', '.'] | train | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L191-L212 |
1,893 | joeyespo/gitpress | gitpress/config.py | set_value | def set_value(repo_directory, key, value, strict=True):
"""Sets the value of a particular key in the config file. This has no effect when setting to the same value."""
if value is None:
raise ValueError('Argument "value" must not be None.')
# Read values and do nothing if not making any changes
config = read_config(repo_directory)
old = config.get(key)
if old == value:
return old
# Check schema
if strict and old is not None and not isinstance(old, type(value)):
raise ConfigSchemaError('Expected config variable %s to be type %s, got %s'
% (repr(key), repr(type(value)), repr(type(old))))
# Set new value and save results
config[key] = value
write_config(repo_directory, config)
return old | python | def set_value(repo_directory, key, value, strict=True):
"""Sets the value of a particular key in the config file. This has no effect when setting to the same value."""
if value is None:
raise ValueError('Argument "value" must not be None.')
# Read values and do nothing if not making any changes
config = read_config(repo_directory)
old = config.get(key)
if old == value:
return old
# Check schema
if strict and old is not None and not isinstance(old, type(value)):
raise ConfigSchemaError('Expected config variable %s to be type %s, got %s'
% (repr(key), repr(type(value)), repr(type(old))))
# Set new value and save results
config[key] = value
write_config(repo_directory, config)
return old | ['def', 'set_value', '(', 'repo_directory', ',', 'key', ',', 'value', ',', 'strict', '=', 'True', ')', ':', 'if', 'value', 'is', 'None', ':', 'raise', 'ValueError', '(', '\'Argument "value" must not be None.\'', ')', '# Read values and do nothing if not making any changes', 'config', '=', 'read_config', '(', 'repo_directory', ')', 'old', '=', 'config', '.', 'get', '(', 'key', ')', 'if', 'old', '==', 'value', ':', 'return', 'old', '# Check schema', 'if', 'strict', 'and', 'old', 'is', 'not', 'None', 'and', 'not', 'isinstance', '(', 'old', ',', 'type', '(', 'value', ')', ')', ':', 'raise', 'ConfigSchemaError', '(', "'Expected config variable %s to be type %s, got %s'", '%', '(', 'repr', '(', 'key', ')', ',', 'repr', '(', 'type', '(', 'value', ')', ')', ',', 'repr', '(', 'type', '(', 'old', ')', ')', ')', ')', '# Set new value and save results', 'config', '[', 'key', ']', '=', 'value', 'write_config', '(', 'repo_directory', ',', 'config', ')', 'return', 'old'] | Sets the value of a particular key in the config file. This has no effect when setting to the same value. | ['Sets', 'the', 'value', 'of', 'a', 'particular', 'key', 'in', 'the', 'config', 'file', '.', 'This', 'has', 'no', 'effect', 'when', 'setting', 'to', 'the', 'same', 'value', '.'] | train | https://github.com/joeyespo/gitpress/blob/a23edb80b6e4a113d167217475344a01c92b5c6d/gitpress/config.py#L62-L81 |
1,894 | F5Networks/f5-common-python | f5/bigip/mixins.py | CheckExistenceMixin._return_object | def _return_object(self, container, item_name):
"""Helper method to retrieve the object"""
coll = container.get_collection()
for item in coll:
if item.name == item_name:
return item | python | def _return_object(self, container, item_name):
"""Helper method to retrieve the object"""
coll = container.get_collection()
for item in coll:
if item.name == item_name:
return item | ['def', '_return_object', '(', 'self', ',', 'container', ',', 'item_name', ')', ':', 'coll', '=', 'container', '.', 'get_collection', '(', ')', 'for', 'item', 'in', 'coll', ':', 'if', 'item', '.', 'name', '==', 'item_name', ':', 'return', 'item'] | Helper method to retrieve the object | ['Helper', 'method', 'to', 'retrieve', 'the', 'object'] | train | https://github.com/F5Networks/f5-common-python/blob/7e67d5acd757a60e3d5f8c88c534bd72208f5494/f5/bigip/mixins.py#L463-L468 |
1,895 | pypa/pipenv | pipenv/vendor/distlib/database.py | InstalledDistribution.read_exports | def read_exports(self):
"""
Read exports data from a file in .ini format.
:return: A dictionary of exports, mapping an export category to a list
of :class:`ExportEntry` instances describing the individual
export entries.
"""
result = {}
r = self.get_distinfo_resource(EXPORTS_FILENAME)
if r:
with contextlib.closing(r.as_stream()) as stream:
result = read_exports(stream)
return result | python | def read_exports(self):
"""
Read exports data from a file in .ini format.
:return: A dictionary of exports, mapping an export category to a list
of :class:`ExportEntry` instances describing the individual
export entries.
"""
result = {}
r = self.get_distinfo_resource(EXPORTS_FILENAME)
if r:
with contextlib.closing(r.as_stream()) as stream:
result = read_exports(stream)
return result | ['def', 'read_exports', '(', 'self', ')', ':', 'result', '=', '{', '}', 'r', '=', 'self', '.', 'get_distinfo_resource', '(', 'EXPORTS_FILENAME', ')', 'if', 'r', ':', 'with', 'contextlib', '.', 'closing', '(', 'r', '.', 'as_stream', '(', ')', ')', 'as', 'stream', ':', 'result', '=', 'read_exports', '(', 'stream', ')', 'return', 'result'] | Read exports data from a file in .ini format.
:return: A dictionary of exports, mapping an export category to a list
of :class:`ExportEntry` instances describing the individual
export entries. | ['Read', 'exports', 'data', 'from', 'a', 'file', 'in', '.', 'ini', 'format', '.'] | train | https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/vendor/distlib/database.py#L617-L630 |
1,896 | bitesofcode/projexui | projexui/widgets/xorbcolumnnavigator.py | XOrbColumnItem.setCurrentSchemaPath | def setCurrentSchemaPath(self, path):
"""
Sets the current item based on the inputed column.
:param path | <str>
"""
if not path:
return False
parts = path.split('.')
name = parts[0]
next = parts[1:]
if name == self.text(0):
if next:
self.load()
path = '.'.join(next)
for c in range(self.childCount()):
if self.child(c).setCurrentSchemaPath(path):
self.setExpanded(True)
return True
return False
else:
self.treeWidget().setCurrentItem(self)
return True
return False | python | def setCurrentSchemaPath(self, path):
"""
Sets the current item based on the inputed column.
:param path | <str>
"""
if not path:
return False
parts = path.split('.')
name = parts[0]
next = parts[1:]
if name == self.text(0):
if next:
self.load()
path = '.'.join(next)
for c in range(self.childCount()):
if self.child(c).setCurrentSchemaPath(path):
self.setExpanded(True)
return True
return False
else:
self.treeWidget().setCurrentItem(self)
return True
return False | ['def', 'setCurrentSchemaPath', '(', 'self', ',', 'path', ')', ':', 'if', 'not', 'path', ':', 'return', 'False', 'parts', '=', 'path', '.', 'split', '(', "'.'", ')', 'name', '=', 'parts', '[', '0', ']', 'next', '=', 'parts', '[', '1', ':', ']', 'if', 'name', '==', 'self', '.', 'text', '(', '0', ')', ':', 'if', 'next', ':', 'self', '.', 'load', '(', ')', 'path', '=', "'.'", '.', 'join', '(', 'next', ')', 'for', 'c', 'in', 'range', '(', 'self', '.', 'childCount', '(', ')', ')', ':', 'if', 'self', '.', 'child', '(', 'c', ')', '.', 'setCurrentSchemaPath', '(', 'path', ')', ':', 'self', '.', 'setExpanded', '(', 'True', ')', 'return', 'True', 'return', 'False', 'else', ':', 'self', '.', 'treeWidget', '(', ')', '.', 'setCurrentItem', '(', 'self', ')', 'return', 'True', 'return', 'False'] | Sets the current item based on the inputed column.
:param path | <str> | ['Sets', 'the', 'current', 'item', 'based', 'on', 'the', 'inputed', 'column', '.', ':', 'param', 'path', '|', '<str', '>'] | train | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbcolumnnavigator.py#L96-L121 |
1,897 | fprimex/zdesk | zdesk/zdesk_api.py | ZendeskAPI.ticket_metrics | def ticket_metrics(self, ticket_id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/ticket_metrics#show-ticket-metrics"
api_path = "/api/v2/tickets/{ticket_id}/metrics.json"
api_path = api_path.format(ticket_id=ticket_id)
return self.call(api_path, **kwargs) | python | def ticket_metrics(self, ticket_id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/ticket_metrics#show-ticket-metrics"
api_path = "/api/v2/tickets/{ticket_id}/metrics.json"
api_path = api_path.format(ticket_id=ticket_id)
return self.call(api_path, **kwargs) | ['def', 'ticket_metrics', '(', 'self', ',', 'ticket_id', ',', '*', '*', 'kwargs', ')', ':', 'api_path', '=', '"/api/v2/tickets/{ticket_id}/metrics.json"', 'api_path', '=', 'api_path', '.', 'format', '(', 'ticket_id', '=', 'ticket_id', ')', 'return', 'self', '.', 'call', '(', 'api_path', ',', '*', '*', 'kwargs', ')'] | https://developer.zendesk.com/rest_api/docs/core/ticket_metrics#show-ticket-metrics | ['https', ':', '//', 'developer', '.', 'zendesk', '.', 'com', '/', 'rest_api', '/', 'docs', '/', 'core', '/', 'ticket_metrics#show', '-', 'ticket', '-', 'metrics'] | train | https://github.com/fprimex/zdesk/blob/851611c13b4d530e9df31390b3ec709baf0a0188/zdesk/zdesk_api.py#L3551-L3555 |
1,898 | deontologician/restnavigator | restnavigator/utils.py | LinkList.get_by | def get_by(self, prop, val, raise_exc=False):
'''Retrieve an item from the dictionary with the given metadata
properties. If there is no such item, None will be returned, if there
are multiple such items, the first will be returned.'''
try:
val = self.serialize(val)
return self._meta[prop][val][0]
except (KeyError, IndexError):
if raise_exc:
raise
else:
return None | python | def get_by(self, prop, val, raise_exc=False):
'''Retrieve an item from the dictionary with the given metadata
properties. If there is no such item, None will be returned, if there
are multiple such items, the first will be returned.'''
try:
val = self.serialize(val)
return self._meta[prop][val][0]
except (KeyError, IndexError):
if raise_exc:
raise
else:
return None | ['def', 'get_by', '(', 'self', ',', 'prop', ',', 'val', ',', 'raise_exc', '=', 'False', ')', ':', 'try', ':', 'val', '=', 'self', '.', 'serialize', '(', 'val', ')', 'return', 'self', '.', '_meta', '[', 'prop', ']', '[', 'val', ']', '[', '0', ']', 'except', '(', 'KeyError', ',', 'IndexError', ')', ':', 'if', 'raise_exc', ':', 'raise', 'else', ':', 'return', 'None'] | Retrieve an item from the dictionary with the given metadata
properties. If there is no such item, None will be returned, if there
are multiple such items, the first will be returned. | ['Retrieve', 'an', 'item', 'from', 'the', 'dictionary', 'with', 'the', 'given', 'metadata', 'properties', '.', 'If', 'there', 'is', 'no', 'such', 'item', 'None', 'will', 'be', 'returned', 'if', 'there', 'are', 'multiple', 'such', 'items', 'the', 'first', 'will', 'be', 'returned', '.'] | train | https://github.com/deontologician/restnavigator/blob/453b9de4e70e602009d3e3ffafcf77d23c8b07c5/restnavigator/utils.py#L205-L216 |
1,899 | mbedmicro/pyOCD | pyocd/utility/conversion.py | u16le_list_to_byte_list | def u16le_list_to_byte_list(data):
"""! @brief Convert a halfword array into a byte array"""
byteData = []
for h in data:
byteData.extend([h & 0xff, (h >> 8) & 0xff])
return byteData | python | def u16le_list_to_byte_list(data):
"""! @brief Convert a halfword array into a byte array"""
byteData = []
for h in data:
byteData.extend([h & 0xff, (h >> 8) & 0xff])
return byteData | ['def', 'u16le_list_to_byte_list', '(', 'data', ')', ':', 'byteData', '=', '[', ']', 'for', 'h', 'in', 'data', ':', 'byteData', '.', 'extend', '(', '[', 'h', '&', '0xff', ',', '(', 'h', '>>', '8', ')', '&', '0xff', ']', ')', 'return', 'byteData'] | ! @brief Convert a halfword array into a byte array | ['!'] | train | https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/utility/conversion.py#L49-L54 |
Subsets and Splits