repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__ensure_suffix_stem
|
def __ensure_suffix_stem(t, suffix):
""" Ensure that the target t has the given suffix, and return the file's stem. """
tpath = str(t)
if not tpath.endswith(suffix):
stem = tpath
tpath += suffix
return tpath, stem
else:
stem, ext = os.path.splitext(tpath)
return t, stem
|
python
|
def __ensure_suffix_stem(t, suffix):
""" Ensure that the target t has the given suffix, and return the file's stem. """
tpath = str(t)
if not tpath.endswith(suffix):
stem = tpath
tpath += suffix
return tpath, stem
else:
stem, ext = os.path.splitext(tpath)
return t, stem
|
[
"def",
"__ensure_suffix_stem",
"(",
"t",
",",
"suffix",
")",
":",
"tpath",
"=",
"str",
"(",
"t",
")",
"if",
"not",
"tpath",
".",
"endswith",
"(",
"suffix",
")",
":",
"stem",
"=",
"tpath",
"tpath",
"+=",
"suffix",
"return",
"tpath",
",",
"stem",
"else",
":",
"stem",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"tpath",
")",
"return",
"t",
",",
"stem"
] |
Ensure that the target t has the given suffix, and return the file's stem.
|
[
"Ensure",
"that",
"the",
"target",
"t",
"has",
"the",
"given",
"suffix",
"and",
"return",
"the",
"file",
"s",
"stem",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L121-L132
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__create_output_dir
|
def __create_output_dir(base_dir):
""" Ensure that the output directory base_dir exists. """
root, tail = os.path.split(base_dir)
dir = None
if tail:
if base_dir.endswith('/'):
dir = base_dir
else:
dir = root
else:
if base_dir.endswith('/'):
dir = base_dir
if dir and not os.path.isdir(dir):
os.makedirs(dir)
|
python
|
def __create_output_dir(base_dir):
""" Ensure that the output directory base_dir exists. """
root, tail = os.path.split(base_dir)
dir = None
if tail:
if base_dir.endswith('/'):
dir = base_dir
else:
dir = root
else:
if base_dir.endswith('/'):
dir = base_dir
if dir and not os.path.isdir(dir):
os.makedirs(dir)
|
[
"def",
"__create_output_dir",
"(",
"base_dir",
")",
":",
"root",
",",
"tail",
"=",
"os",
".",
"path",
".",
"split",
"(",
"base_dir",
")",
"dir",
"=",
"None",
"if",
"tail",
":",
"if",
"base_dir",
".",
"endswith",
"(",
"'/'",
")",
":",
"dir",
"=",
"base_dir",
"else",
":",
"dir",
"=",
"root",
"else",
":",
"if",
"base_dir",
".",
"endswith",
"(",
"'/'",
")",
":",
"dir",
"=",
"base_dir",
"if",
"dir",
"and",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"dir",
")",
":",
"os",
".",
"makedirs",
"(",
"dir",
")"
] |
Ensure that the output directory base_dir exists.
|
[
"Ensure",
"that",
"the",
"output",
"directory",
"base_dir",
"exists",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L142-L156
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__detect_cl_tool
|
def __detect_cl_tool(env, chainkey, cdict, cpriority=None):
"""
Helper function, picks a command line tool from the list
and initializes its environment variables.
"""
if env.get(chainkey,'') == '':
clpath = ''
if cpriority is None:
cpriority = cdict.keys()
for cltool in cpriority:
if __debug_tool_location:
print("DocBook: Looking for %s"%cltool)
clpath = env.WhereIs(cltool)
if clpath:
if __debug_tool_location:
print("DocBook: Found:%s"%cltool)
env[chainkey] = clpath
if not env[chainkey + 'COM']:
env[chainkey + 'COM'] = cdict[cltool]
break
|
python
|
def __detect_cl_tool(env, chainkey, cdict, cpriority=None):
"""
Helper function, picks a command line tool from the list
and initializes its environment variables.
"""
if env.get(chainkey,'') == '':
clpath = ''
if cpriority is None:
cpriority = cdict.keys()
for cltool in cpriority:
if __debug_tool_location:
print("DocBook: Looking for %s"%cltool)
clpath = env.WhereIs(cltool)
if clpath:
if __debug_tool_location:
print("DocBook: Found:%s"%cltool)
env[chainkey] = clpath
if not env[chainkey + 'COM']:
env[chainkey + 'COM'] = cdict[cltool]
break
|
[
"def",
"__detect_cl_tool",
"(",
"env",
",",
"chainkey",
",",
"cdict",
",",
"cpriority",
"=",
"None",
")",
":",
"if",
"env",
".",
"get",
"(",
"chainkey",
",",
"''",
")",
"==",
"''",
":",
"clpath",
"=",
"''",
"if",
"cpriority",
"is",
"None",
":",
"cpriority",
"=",
"cdict",
".",
"keys",
"(",
")",
"for",
"cltool",
"in",
"cpriority",
":",
"if",
"__debug_tool_location",
":",
"print",
"(",
"\"DocBook: Looking for %s\"",
"%",
"cltool",
")",
"clpath",
"=",
"env",
".",
"WhereIs",
"(",
"cltool",
")",
"if",
"clpath",
":",
"if",
"__debug_tool_location",
":",
"print",
"(",
"\"DocBook: Found:%s\"",
"%",
"cltool",
")",
"env",
"[",
"chainkey",
"]",
"=",
"clpath",
"if",
"not",
"env",
"[",
"chainkey",
"+",
"'COM'",
"]",
":",
"env",
"[",
"chainkey",
"+",
"'COM'",
"]",
"=",
"cdict",
"[",
"cltool",
"]",
"break"
] |
Helper function, picks a command line tool from the list
and initializes its environment variables.
|
[
"Helper",
"function",
"picks",
"a",
"command",
"line",
"tool",
"from",
"the",
"list",
"and",
"initializes",
"its",
"environment",
"variables",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L176-L196
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
_detect
|
def _detect(env):
"""
Detect all the command line tools that we might need for creating
the requested output formats.
"""
global prefer_xsltproc
if env.get('DOCBOOK_PREFER_XSLTPROC',''):
prefer_xsltproc = True
if ((not has_libxml2 and not has_lxml) or (prefer_xsltproc)):
# Try to find the XSLT processors
__detect_cl_tool(env, 'DOCBOOK_XSLTPROC', xsltproc_com, xsltproc_com_priority)
__detect_cl_tool(env, 'DOCBOOK_XMLLINT', xmllint_com)
__detect_cl_tool(env, 'DOCBOOK_FOP', fop_com, ['fop','xep','jw'])
|
python
|
def _detect(env):
"""
Detect all the command line tools that we might need for creating
the requested output formats.
"""
global prefer_xsltproc
if env.get('DOCBOOK_PREFER_XSLTPROC',''):
prefer_xsltproc = True
if ((not has_libxml2 and not has_lxml) or (prefer_xsltproc)):
# Try to find the XSLT processors
__detect_cl_tool(env, 'DOCBOOK_XSLTPROC', xsltproc_com, xsltproc_com_priority)
__detect_cl_tool(env, 'DOCBOOK_XMLLINT', xmllint_com)
__detect_cl_tool(env, 'DOCBOOK_FOP', fop_com, ['fop','xep','jw'])
|
[
"def",
"_detect",
"(",
"env",
")",
":",
"global",
"prefer_xsltproc",
"if",
"env",
".",
"get",
"(",
"'DOCBOOK_PREFER_XSLTPROC'",
",",
"''",
")",
":",
"prefer_xsltproc",
"=",
"True",
"if",
"(",
"(",
"not",
"has_libxml2",
"and",
"not",
"has_lxml",
")",
"or",
"(",
"prefer_xsltproc",
")",
")",
":",
"# Try to find the XSLT processors",
"__detect_cl_tool",
"(",
"env",
",",
"'DOCBOOK_XSLTPROC'",
",",
"xsltproc_com",
",",
"xsltproc_com_priority",
")",
"__detect_cl_tool",
"(",
"env",
",",
"'DOCBOOK_XMLLINT'",
",",
"xmllint_com",
")",
"__detect_cl_tool",
"(",
"env",
",",
"'DOCBOOK_FOP'",
",",
"fop_com",
",",
"[",
"'fop'",
",",
"'xep'",
",",
"'jw'",
"]",
")"
] |
Detect all the command line tools that we might need for creating
the requested output formats.
|
[
"Detect",
"all",
"the",
"command",
"line",
"tools",
"that",
"we",
"might",
"need",
"for",
"creating",
"the",
"requested",
"output",
"formats",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L198-L213
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__xml_scan
|
def __xml_scan(node, env, path, arg):
""" Simple XML file scanner, detecting local images and XIncludes as implicit dependencies. """
# Does the node exist yet?
if not os.path.isfile(str(node)):
return []
if env.get('DOCBOOK_SCANENT',''):
# Use simple pattern matching for system entities..., no support
# for recursion yet.
contents = node.get_text_contents()
return sentity_re.findall(contents)
xsl_file = os.path.join(scriptpath,'utils','xmldepend.xsl')
if not has_libxml2 or prefer_xsltproc:
if has_lxml and not prefer_xsltproc:
from lxml import etree
xsl_tree = etree.parse(xsl_file)
doc = etree.parse(str(node))
result = doc.xslt(xsl_tree)
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Try to call xsltproc
xsltproc = env.subst("$DOCBOOK_XSLTPROC")
if xsltproc and xsltproc.endswith('xsltproc'):
result = env.backtick(' '.join([xsltproc, xsl_file, str(node)]))
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Use simple pattern matching, there is currently no support
# for xi:includes...
contents = node.get_text_contents()
return include_re.findall(contents)
styledoc = libxml2.parseFile(xsl_file)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(node), None, libxml2.XML_PARSE_NOENT)
result = style.applyStylesheet(doc, None)
depfiles = []
for x in str(result).splitlines():
if x.strip() != "" and not x.startswith("<?xml "):
depfiles.extend(x.strip().split())
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return depfiles
|
python
|
def __xml_scan(node, env, path, arg):
""" Simple XML file scanner, detecting local images and XIncludes as implicit dependencies. """
# Does the node exist yet?
if not os.path.isfile(str(node)):
return []
if env.get('DOCBOOK_SCANENT',''):
# Use simple pattern matching for system entities..., no support
# for recursion yet.
contents = node.get_text_contents()
return sentity_re.findall(contents)
xsl_file = os.path.join(scriptpath,'utils','xmldepend.xsl')
if not has_libxml2 or prefer_xsltproc:
if has_lxml and not prefer_xsltproc:
from lxml import etree
xsl_tree = etree.parse(xsl_file)
doc = etree.parse(str(node))
result = doc.xslt(xsl_tree)
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Try to call xsltproc
xsltproc = env.subst("$DOCBOOK_XSLTPROC")
if xsltproc and xsltproc.endswith('xsltproc'):
result = env.backtick(' '.join([xsltproc, xsl_file, str(node)]))
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Use simple pattern matching, there is currently no support
# for xi:includes...
contents = node.get_text_contents()
return include_re.findall(contents)
styledoc = libxml2.parseFile(xsl_file)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(node), None, libxml2.XML_PARSE_NOENT)
result = style.applyStylesheet(doc, None)
depfiles = []
for x in str(result).splitlines():
if x.strip() != "" and not x.startswith("<?xml "):
depfiles.extend(x.strip().split())
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return depfiles
|
[
"def",
"__xml_scan",
"(",
"node",
",",
"env",
",",
"path",
",",
"arg",
")",
":",
"# Does the node exist yet?",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"str",
"(",
"node",
")",
")",
":",
"return",
"[",
"]",
"if",
"env",
".",
"get",
"(",
"'DOCBOOK_SCANENT'",
",",
"''",
")",
":",
"# Use simple pattern matching for system entities..., no support ",
"# for recursion yet.",
"contents",
"=",
"node",
".",
"get_text_contents",
"(",
")",
"return",
"sentity_re",
".",
"findall",
"(",
"contents",
")",
"xsl_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"scriptpath",
",",
"'utils'",
",",
"'xmldepend.xsl'",
")",
"if",
"not",
"has_libxml2",
"or",
"prefer_xsltproc",
":",
"if",
"has_lxml",
"and",
"not",
"prefer_xsltproc",
":",
"from",
"lxml",
"import",
"etree",
"xsl_tree",
"=",
"etree",
".",
"parse",
"(",
"xsl_file",
")",
"doc",
"=",
"etree",
".",
"parse",
"(",
"str",
"(",
"node",
")",
")",
"result",
"=",
"doc",
".",
"xslt",
"(",
"xsl_tree",
")",
"depfiles",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"str",
"(",
"result",
")",
".",
"splitlines",
"(",
")",
"if",
"x",
".",
"strip",
"(",
")",
"!=",
"\"\"",
"and",
"not",
"x",
".",
"startswith",
"(",
"\"<?xml \"",
")",
"]",
"return",
"depfiles",
"else",
":",
"# Try to call xsltproc",
"xsltproc",
"=",
"env",
".",
"subst",
"(",
"\"$DOCBOOK_XSLTPROC\"",
")",
"if",
"xsltproc",
"and",
"xsltproc",
".",
"endswith",
"(",
"'xsltproc'",
")",
":",
"result",
"=",
"env",
".",
"backtick",
"(",
"' '",
".",
"join",
"(",
"[",
"xsltproc",
",",
"xsl_file",
",",
"str",
"(",
"node",
")",
"]",
")",
")",
"depfiles",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"str",
"(",
"result",
")",
".",
"splitlines",
"(",
")",
"if",
"x",
".",
"strip",
"(",
")",
"!=",
"\"\"",
"and",
"not",
"x",
".",
"startswith",
"(",
"\"<?xml \"",
")",
"]",
"return",
"depfiles",
"else",
":",
"# Use simple pattern matching, there is currently no support",
"# for xi:includes...",
"contents",
"=",
"node",
".",
"get_text_contents",
"(",
")",
"return",
"include_re",
".",
"findall",
"(",
"contents",
")",
"styledoc",
"=",
"libxml2",
".",
"parseFile",
"(",
"xsl_file",
")",
"style",
"=",
"libxslt",
".",
"parseStylesheetDoc",
"(",
"styledoc",
")",
"doc",
"=",
"libxml2",
".",
"readFile",
"(",
"str",
"(",
"node",
")",
",",
"None",
",",
"libxml2",
".",
"XML_PARSE_NOENT",
")",
"result",
"=",
"style",
".",
"applyStylesheet",
"(",
"doc",
",",
"None",
")",
"depfiles",
"=",
"[",
"]",
"for",
"x",
"in",
"str",
"(",
"result",
")",
".",
"splitlines",
"(",
")",
":",
"if",
"x",
".",
"strip",
"(",
")",
"!=",
"\"\"",
"and",
"not",
"x",
".",
"startswith",
"(",
"\"<?xml \"",
")",
":",
"depfiles",
".",
"extend",
"(",
"x",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
")",
"style",
".",
"freeStylesheet",
"(",
")",
"doc",
".",
"freeDoc",
"(",
")",
"result",
".",
"freeDoc",
"(",
")",
"return",
"depfiles"
] |
Simple XML file scanner, detecting local images and XIncludes as implicit dependencies.
|
[
"Simple",
"XML",
"file",
"scanner",
"detecting",
"local",
"images",
"and",
"XIncludes",
"as",
"implicit",
"dependencies",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L221-L272
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__xinclude_libxml2
|
def __xinclude_libxml2(target, source, env):
"""
Resolving XIncludes, using the libxml2 module.
"""
doc = libxml2.readFile(str(source[0]), None, libxml2.XML_PARSE_NOENT)
doc.xincludeProcessFlags(libxml2.XML_PARSE_NOENT)
doc.saveFile(str(target[0]))
doc.freeDoc()
return None
|
python
|
def __xinclude_libxml2(target, source, env):
"""
Resolving XIncludes, using the libxml2 module.
"""
doc = libxml2.readFile(str(source[0]), None, libxml2.XML_PARSE_NOENT)
doc.xincludeProcessFlags(libxml2.XML_PARSE_NOENT)
doc.saveFile(str(target[0]))
doc.freeDoc()
return None
|
[
"def",
"__xinclude_libxml2",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"doc",
"=",
"libxml2",
".",
"readFile",
"(",
"str",
"(",
"source",
"[",
"0",
"]",
")",
",",
"None",
",",
"libxml2",
".",
"XML_PARSE_NOENT",
")",
"doc",
".",
"xincludeProcessFlags",
"(",
"libxml2",
".",
"XML_PARSE_NOENT",
")",
"doc",
".",
"saveFile",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"doc",
".",
"freeDoc",
"(",
")",
"return",
"None"
] |
Resolving XIncludes, using the libxml2 module.
|
[
"Resolving",
"XIncludes",
"using",
"the",
"libxml2",
"module",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L361-L370
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__xinclude_lxml
|
def __xinclude_lxml(target, source, env):
"""
Resolving XIncludes, using the lxml module.
"""
from lxml import etree
doc = etree.parse(str(source[0]))
doc.xinclude()
try:
doc.write(str(target[0]), xml_declaration=True,
encoding="UTF-8", pretty_print=True)
except:
pass
return None
|
python
|
def __xinclude_lxml(target, source, env):
"""
Resolving XIncludes, using the lxml module.
"""
from lxml import etree
doc = etree.parse(str(source[0]))
doc.xinclude()
try:
doc.write(str(target[0]), xml_declaration=True,
encoding="UTF-8", pretty_print=True)
except:
pass
return None
|
[
"def",
"__xinclude_lxml",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"from",
"lxml",
"import",
"etree",
"doc",
"=",
"etree",
".",
"parse",
"(",
"str",
"(",
"source",
"[",
"0",
"]",
")",
")",
"doc",
".",
"xinclude",
"(",
")",
"try",
":",
"doc",
".",
"write",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
",",
"xml_declaration",
"=",
"True",
",",
"encoding",
"=",
"\"UTF-8\"",
",",
"pretty_print",
"=",
"True",
")",
"except",
":",
"pass",
"return",
"None"
] |
Resolving XIncludes, using the lxml module.
|
[
"Resolving",
"XIncludes",
"using",
"the",
"lxml",
"module",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L372-L386
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
DocbookHtml
|
def DocbookHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTML', ['html','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, __ensure_suffix(t,'.html'), s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
|
python
|
def DocbookHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTML', ['html','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, __ensure_suffix(t,'.html'), s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
|
[
"def",
"DocbookHtml",
"(",
"env",
",",
"target",
",",
"source",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"# Init list of targets/sources",
"target",
",",
"source",
"=",
"__extend_targets_sources",
"(",
"target",
",",
"source",
")",
"# Init XSL stylesheet",
"__init_xsl_stylesheet",
"(",
"kw",
",",
"env",
",",
"'$DOCBOOK_DEFAULT_XSL_HTML'",
",",
"[",
"'html'",
",",
"'docbook.xsl'",
"]",
")",
"# Setup builder",
"__builder",
"=",
"__select_builder",
"(",
"__lxml_builder",
",",
"__libxml2_builder",
",",
"__xsltproc_builder",
")",
"# Create targets",
"result",
"=",
"[",
"]",
"for",
"t",
",",
"s",
"in",
"zip",
"(",
"target",
",",
"source",
")",
":",
"r",
"=",
"__builder",
".",
"__call__",
"(",
"env",
",",
"__ensure_suffix",
"(",
"t",
",",
"'.html'",
")",
",",
"s",
",",
"*",
"*",
"kw",
")",
"env",
".",
"Depends",
"(",
"r",
",",
"kw",
"[",
"'DOCBOOK_XSL'",
"]",
")",
"result",
".",
"extend",
"(",
"r",
")",
"return",
"result"
] |
A pseudo-Builder, providing a Docbook toolchain for HTML output.
|
[
"A",
"pseudo",
"-",
"Builder",
"providing",
"a",
"Docbook",
"toolchain",
"for",
"HTML",
"output",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L550-L570
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
DocbookMan
|
def DocbookMan(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for Man page output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_MAN', ['manpages','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
volnum = "1"
outfiles = []
srcfile = __ensure_suffix(str(s),'.xml')
if os.path.isfile(srcfile):
try:
import xml.dom.minidom
dom = xml.dom.minidom.parse(__ensure_suffix(str(s),'.xml'))
# Extract volume number, default is 1
for node in dom.getElementsByTagName('refmeta'):
for vol in node.getElementsByTagName('manvolnum'):
volnum = __get_xml_text(vol)
# Extract output filenames
for node in dom.getElementsByTagName('refnamediv'):
for ref in node.getElementsByTagName('refname'):
outfiles.append(__get_xml_text(ref)+'.'+volnum)
except:
# Use simple regex parsing
f = open(__ensure_suffix(str(s),'.xml'), 'r')
content = f.read()
f.close()
for m in re_manvolnum.finditer(content):
volnum = m.group(1)
for m in re_refname.finditer(content):
outfiles.append(m.group(1)+'.'+volnum)
if not outfiles:
# Use stem of the source file
spath = str(s)
if not spath.endswith('.xml'):
outfiles.append(spath+'.'+volnum)
else:
stem, ext = os.path.splitext(spath)
outfiles.append(stem+'.'+volnum)
else:
# We have to completely rely on the given target name
outfiles.append(t)
__builder.__call__(env, outfiles[0], s, **kw)
env.Depends(outfiles[0], kw['DOCBOOK_XSL'])
result.append(outfiles[0])
if len(outfiles) > 1:
env.Clean(outfiles[0], outfiles[1:])
return result
|
python
|
def DocbookMan(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for Man page output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_MAN', ['manpages','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
volnum = "1"
outfiles = []
srcfile = __ensure_suffix(str(s),'.xml')
if os.path.isfile(srcfile):
try:
import xml.dom.minidom
dom = xml.dom.minidom.parse(__ensure_suffix(str(s),'.xml'))
# Extract volume number, default is 1
for node in dom.getElementsByTagName('refmeta'):
for vol in node.getElementsByTagName('manvolnum'):
volnum = __get_xml_text(vol)
# Extract output filenames
for node in dom.getElementsByTagName('refnamediv'):
for ref in node.getElementsByTagName('refname'):
outfiles.append(__get_xml_text(ref)+'.'+volnum)
except:
# Use simple regex parsing
f = open(__ensure_suffix(str(s),'.xml'), 'r')
content = f.read()
f.close()
for m in re_manvolnum.finditer(content):
volnum = m.group(1)
for m in re_refname.finditer(content):
outfiles.append(m.group(1)+'.'+volnum)
if not outfiles:
# Use stem of the source file
spath = str(s)
if not spath.endswith('.xml'):
outfiles.append(spath+'.'+volnum)
else:
stem, ext = os.path.splitext(spath)
outfiles.append(stem+'.'+volnum)
else:
# We have to completely rely on the given target name
outfiles.append(t)
__builder.__call__(env, outfiles[0], s, **kw)
env.Depends(outfiles[0], kw['DOCBOOK_XSL'])
result.append(outfiles[0])
if len(outfiles) > 1:
env.Clean(outfiles[0], outfiles[1:])
return result
|
[
"def",
"DocbookMan",
"(",
"env",
",",
"target",
",",
"source",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"# Init list of targets/sources",
"target",
",",
"source",
"=",
"__extend_targets_sources",
"(",
"target",
",",
"source",
")",
"# Init XSL stylesheet",
"__init_xsl_stylesheet",
"(",
"kw",
",",
"env",
",",
"'$DOCBOOK_DEFAULT_XSL_MAN'",
",",
"[",
"'manpages'",
",",
"'docbook.xsl'",
"]",
")",
"# Setup builder",
"__builder",
"=",
"__select_builder",
"(",
"__lxml_builder",
",",
"__libxml2_builder",
",",
"__xsltproc_builder",
")",
"# Create targets",
"result",
"=",
"[",
"]",
"for",
"t",
",",
"s",
"in",
"zip",
"(",
"target",
",",
"source",
")",
":",
"volnum",
"=",
"\"1\"",
"outfiles",
"=",
"[",
"]",
"srcfile",
"=",
"__ensure_suffix",
"(",
"str",
"(",
"s",
")",
",",
"'.xml'",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"srcfile",
")",
":",
"try",
":",
"import",
"xml",
".",
"dom",
".",
"minidom",
"dom",
"=",
"xml",
".",
"dom",
".",
"minidom",
".",
"parse",
"(",
"__ensure_suffix",
"(",
"str",
"(",
"s",
")",
",",
"'.xml'",
")",
")",
"# Extract volume number, default is 1",
"for",
"node",
"in",
"dom",
".",
"getElementsByTagName",
"(",
"'refmeta'",
")",
":",
"for",
"vol",
"in",
"node",
".",
"getElementsByTagName",
"(",
"'manvolnum'",
")",
":",
"volnum",
"=",
"__get_xml_text",
"(",
"vol",
")",
"# Extract output filenames",
"for",
"node",
"in",
"dom",
".",
"getElementsByTagName",
"(",
"'refnamediv'",
")",
":",
"for",
"ref",
"in",
"node",
".",
"getElementsByTagName",
"(",
"'refname'",
")",
":",
"outfiles",
".",
"append",
"(",
"__get_xml_text",
"(",
"ref",
")",
"+",
"'.'",
"+",
"volnum",
")",
"except",
":",
"# Use simple regex parsing ",
"f",
"=",
"open",
"(",
"__ensure_suffix",
"(",
"str",
"(",
"s",
")",
",",
"'.xml'",
")",
",",
"'r'",
")",
"content",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"for",
"m",
"in",
"re_manvolnum",
".",
"finditer",
"(",
"content",
")",
":",
"volnum",
"=",
"m",
".",
"group",
"(",
"1",
")",
"for",
"m",
"in",
"re_refname",
".",
"finditer",
"(",
"content",
")",
":",
"outfiles",
".",
"append",
"(",
"m",
".",
"group",
"(",
"1",
")",
"+",
"'.'",
"+",
"volnum",
")",
"if",
"not",
"outfiles",
":",
"# Use stem of the source file",
"spath",
"=",
"str",
"(",
"s",
")",
"if",
"not",
"spath",
".",
"endswith",
"(",
"'.xml'",
")",
":",
"outfiles",
".",
"append",
"(",
"spath",
"+",
"'.'",
"+",
"volnum",
")",
"else",
":",
"stem",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"spath",
")",
"outfiles",
".",
"append",
"(",
"stem",
"+",
"'.'",
"+",
"volnum",
")",
"else",
":",
"# We have to completely rely on the given target name",
"outfiles",
".",
"append",
"(",
"t",
")",
"__builder",
".",
"__call__",
"(",
"env",
",",
"outfiles",
"[",
"0",
"]",
",",
"s",
",",
"*",
"*",
"kw",
")",
"env",
".",
"Depends",
"(",
"outfiles",
"[",
"0",
"]",
",",
"kw",
"[",
"'DOCBOOK_XSL'",
"]",
")",
"result",
".",
"append",
"(",
"outfiles",
"[",
"0",
"]",
")",
"if",
"len",
"(",
"outfiles",
")",
">",
"1",
":",
"env",
".",
"Clean",
"(",
"outfiles",
"[",
"0",
"]",
",",
"outfiles",
"[",
"1",
":",
"]",
")",
"return",
"result"
] |
A pseudo-Builder, providing a Docbook toolchain for Man page output.
|
[
"A",
"pseudo",
"-",
"Builder",
"providing",
"a",
"Docbook",
"toolchain",
"for",
"Man",
"page",
"output",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L666-L731
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
DocbookSlidesPdf
|
def DocbookSlidesPdf(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for PDF slides output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESPDF', ['slides','fo','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
t, stem = __ensure_suffix_stem(t, '.pdf')
xsl = __builder.__call__(env, stem+'.fo', s, **kw)
env.Depends(xsl, kw['DOCBOOK_XSL'])
result.extend(xsl)
result.extend(__fop_builder.__call__(env, t, xsl, **kw))
return result
|
python
|
def DocbookSlidesPdf(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for PDF slides output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESPDF', ['slides','fo','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
t, stem = __ensure_suffix_stem(t, '.pdf')
xsl = __builder.__call__(env, stem+'.fo', s, **kw)
env.Depends(xsl, kw['DOCBOOK_XSL'])
result.extend(xsl)
result.extend(__fop_builder.__call__(env, t, xsl, **kw))
return result
|
[
"def",
"DocbookSlidesPdf",
"(",
"env",
",",
"target",
",",
"source",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"# Init list of targets/sources",
"target",
",",
"source",
"=",
"__extend_targets_sources",
"(",
"target",
",",
"source",
")",
"# Init XSL stylesheet",
"__init_xsl_stylesheet",
"(",
"kw",
",",
"env",
",",
"'$DOCBOOK_DEFAULT_XSL_SLIDESPDF'",
",",
"[",
"'slides'",
",",
"'fo'",
",",
"'plain.xsl'",
"]",
")",
"# Setup builder",
"__builder",
"=",
"__select_builder",
"(",
"__lxml_builder",
",",
"__libxml2_builder",
",",
"__xsltproc_builder",
")",
"# Create targets",
"result",
"=",
"[",
"]",
"for",
"t",
",",
"s",
"in",
"zip",
"(",
"target",
",",
"source",
")",
":",
"t",
",",
"stem",
"=",
"__ensure_suffix_stem",
"(",
"t",
",",
"'.pdf'",
")",
"xsl",
"=",
"__builder",
".",
"__call__",
"(",
"env",
",",
"stem",
"+",
"'.fo'",
",",
"s",
",",
"*",
"*",
"kw",
")",
"env",
".",
"Depends",
"(",
"xsl",
",",
"kw",
"[",
"'DOCBOOK_XSL'",
"]",
")",
"result",
".",
"extend",
"(",
"xsl",
")",
"result",
".",
"extend",
"(",
"__fop_builder",
".",
"__call__",
"(",
"env",
",",
"t",
",",
"xsl",
",",
"*",
"*",
"kw",
")",
")",
"return",
"result"
] |
A pseudo-Builder, providing a Docbook toolchain for PDF slides output.
|
[
"A",
"pseudo",
"-",
"Builder",
"providing",
"a",
"Docbook",
"toolchain",
"for",
"PDF",
"slides",
"output",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L733-L755
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
DocbookSlidesHtml
|
def DocbookSlidesHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML slides output.
"""
# Init list of targets/sources
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESHTML', ['slides','html','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, [os.path.join(base_dir, 'toc.html')] +
glob.glob(os.path.join(base_dir, 'foil*.html')))
return result
|
python
|
def DocbookSlidesHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML slides output.
"""
# Init list of targets/sources
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESHTML', ['slides','html','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, [os.path.join(base_dir, 'toc.html')] +
glob.glob(os.path.join(base_dir, 'foil*.html')))
return result
|
[
"def",
"DocbookSlidesHtml",
"(",
"env",
",",
"target",
",",
"source",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"# Init list of targets/sources",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"target",
")",
":",
"target",
"=",
"[",
"target",
"]",
"if",
"not",
"source",
":",
"source",
"=",
"target",
"target",
"=",
"[",
"'index.html'",
"]",
"elif",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"source",
")",
":",
"source",
"=",
"[",
"source",
"]",
"# Init XSL stylesheet",
"__init_xsl_stylesheet",
"(",
"kw",
",",
"env",
",",
"'$DOCBOOK_DEFAULT_XSL_SLIDESHTML'",
",",
"[",
"'slides'",
",",
"'html'",
",",
"'plain.xsl'",
"]",
")",
"# Setup builder",
"__builder",
"=",
"__select_builder",
"(",
"__lxml_builder",
",",
"__libxml2_builder",
",",
"__xsltproc_builder",
")",
"# Detect base dir",
"base_dir",
"=",
"kw",
".",
"get",
"(",
"'base_dir'",
",",
"''",
")",
"if",
"base_dir",
":",
"__create_output_dir",
"(",
"base_dir",
")",
"# Create targets",
"result",
"=",
"[",
"]",
"r",
"=",
"__builder",
".",
"__call__",
"(",
"env",
",",
"__ensure_suffix",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
",",
"'.html'",
")",
",",
"source",
"[",
"0",
"]",
",",
"*",
"*",
"kw",
")",
"env",
".",
"Depends",
"(",
"r",
",",
"kw",
"[",
"'DOCBOOK_XSL'",
"]",
")",
"result",
".",
"extend",
"(",
"r",
")",
"# Add supporting files for cleanup",
"env",
".",
"Clean",
"(",
"r",
",",
"[",
"os",
".",
"path",
".",
"join",
"(",
"base_dir",
",",
"'toc.html'",
")",
"]",
"+",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"base_dir",
",",
"'foil*.html'",
")",
")",
")",
"return",
"result"
] |
A pseudo-Builder, providing a Docbook toolchain for HTML slides output.
|
[
"A",
"pseudo",
"-",
"Builder",
"providing",
"a",
"Docbook",
"toolchain",
"for",
"HTML",
"slides",
"output",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L757-L790
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
DocbookXInclude
|
def DocbookXInclude(env, target, source, *args, **kw):
"""
A pseudo-Builder, for resolving XIncludes in a separate processing step.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Setup builder
__builder = __select_builder(__xinclude_lxml_builder,__xinclude_libxml2_builder,__xmllint_builder)
# Create targets
result = []
for t,s in zip(target,source):
result.extend(__builder.__call__(env, t, s, **kw))
return result
|
python
|
def DocbookXInclude(env, target, source, *args, **kw):
"""
A pseudo-Builder, for resolving XIncludes in a separate processing step.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Setup builder
__builder = __select_builder(__xinclude_lxml_builder,__xinclude_libxml2_builder,__xmllint_builder)
# Create targets
result = []
for t,s in zip(target,source):
result.extend(__builder.__call__(env, t, s, **kw))
return result
|
[
"def",
"DocbookXInclude",
"(",
"env",
",",
"target",
",",
"source",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"# Init list of targets/sources",
"target",
",",
"source",
"=",
"__extend_targets_sources",
"(",
"target",
",",
"source",
")",
"# Setup builder",
"__builder",
"=",
"__select_builder",
"(",
"__xinclude_lxml_builder",
",",
"__xinclude_libxml2_builder",
",",
"__xmllint_builder",
")",
"# Create targets",
"result",
"=",
"[",
"]",
"for",
"t",
",",
"s",
"in",
"zip",
"(",
"target",
",",
"source",
")",
":",
"result",
".",
"extend",
"(",
"__builder",
".",
"__call__",
"(",
"env",
",",
"t",
",",
"s",
",",
"*",
"*",
"kw",
")",
")",
"return",
"result"
] |
A pseudo-Builder, for resolving XIncludes in a separate processing step.
|
[
"A",
"pseudo",
"-",
"Builder",
"for",
"resolving",
"XIncludes",
"in",
"a",
"separate",
"processing",
"step",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L792-L807
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
DocbookXslt
|
def DocbookXslt(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, applying a simple XSL transformation to the input file.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
kw['DOCBOOK_XSL'] = kw.get('xsl', 'transform.xsl')
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, t, s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
|
python
|
def DocbookXslt(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, applying a simple XSL transformation to the input file.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
kw['DOCBOOK_XSL'] = kw.get('xsl', 'transform.xsl')
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, t, s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
|
[
"def",
"DocbookXslt",
"(",
"env",
",",
"target",
",",
"source",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"# Init list of targets/sources",
"target",
",",
"source",
"=",
"__extend_targets_sources",
"(",
"target",
",",
"source",
")",
"# Init XSL stylesheet",
"kw",
"[",
"'DOCBOOK_XSL'",
"]",
"=",
"kw",
".",
"get",
"(",
"'xsl'",
",",
"'transform.xsl'",
")",
"# Setup builder",
"__builder",
"=",
"__select_builder",
"(",
"__lxml_builder",
",",
"__libxml2_builder",
",",
"__xsltproc_builder",
")",
"# Create targets",
"result",
"=",
"[",
"]",
"for",
"t",
",",
"s",
"in",
"zip",
"(",
"target",
",",
"source",
")",
":",
"r",
"=",
"__builder",
".",
"__call__",
"(",
"env",
",",
"t",
",",
"s",
",",
"*",
"*",
"kw",
")",
"env",
".",
"Depends",
"(",
"r",
",",
"kw",
"[",
"'DOCBOOK_XSL'",
"]",
")",
"result",
".",
"extend",
"(",
"r",
")",
"return",
"result"
] |
A pseudo-Builder, applying a simple XSL transformation to the input file.
|
[
"A",
"pseudo",
"-",
"Builder",
"applying",
"a",
"simple",
"XSL",
"transformation",
"to",
"the",
"input",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L809-L829
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for docbook to an Environment."""
env.SetDefault(
# Default names for customized XSL stylesheets
DOCBOOK_DEFAULT_XSL_EPUB = '',
DOCBOOK_DEFAULT_XSL_HTML = '',
DOCBOOK_DEFAULT_XSL_HTMLCHUNKED = '',
DOCBOOK_DEFAULT_XSL_HTMLHELP = '',
DOCBOOK_DEFAULT_XSL_PDF = '',
DOCBOOK_DEFAULT_XSL_MAN = '',
DOCBOOK_DEFAULT_XSL_SLIDESPDF = '',
DOCBOOK_DEFAULT_XSL_SLIDESHTML = '',
# Paths to the detected executables
DOCBOOK_XSLTPROC = '',
DOCBOOK_XMLLINT = '',
DOCBOOK_FOP = '',
# Additional flags for the text processors
DOCBOOK_XSLTPROCFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XMLLINTFLAGS = SCons.Util.CLVar(''),
DOCBOOK_FOPFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XSLTPROCPARAMS = SCons.Util.CLVar(''),
# Default command lines for the detected executables
DOCBOOK_XSLTPROCCOM = xsltproc_com['xsltproc'],
DOCBOOK_XMLLINTCOM = xmllint_com['xmllint'],
DOCBOOK_FOPCOM = fop_com['fop'],
# Screen output for the text processors
DOCBOOK_XSLTPROCCOMSTR = None,
DOCBOOK_XMLLINTCOMSTR = None,
DOCBOOK_FOPCOMSTR = None,
)
_detect(env)
env.AddMethod(DocbookEpub, "DocbookEpub")
env.AddMethod(DocbookHtml, "DocbookHtml")
env.AddMethod(DocbookHtmlChunked, "DocbookHtmlChunked")
env.AddMethod(DocbookHtmlhelp, "DocbookHtmlhelp")
env.AddMethod(DocbookPdf, "DocbookPdf")
env.AddMethod(DocbookMan, "DocbookMan")
env.AddMethod(DocbookSlidesPdf, "DocbookSlidesPdf")
env.AddMethod(DocbookSlidesHtml, "DocbookSlidesHtml")
env.AddMethod(DocbookXInclude, "DocbookXInclude")
env.AddMethod(DocbookXslt, "DocbookXslt")
|
python
|
def generate(env):
"""Add Builders and construction variables for docbook to an Environment."""
env.SetDefault(
# Default names for customized XSL stylesheets
DOCBOOK_DEFAULT_XSL_EPUB = '',
DOCBOOK_DEFAULT_XSL_HTML = '',
DOCBOOK_DEFAULT_XSL_HTMLCHUNKED = '',
DOCBOOK_DEFAULT_XSL_HTMLHELP = '',
DOCBOOK_DEFAULT_XSL_PDF = '',
DOCBOOK_DEFAULT_XSL_MAN = '',
DOCBOOK_DEFAULT_XSL_SLIDESPDF = '',
DOCBOOK_DEFAULT_XSL_SLIDESHTML = '',
# Paths to the detected executables
DOCBOOK_XSLTPROC = '',
DOCBOOK_XMLLINT = '',
DOCBOOK_FOP = '',
# Additional flags for the text processors
DOCBOOK_XSLTPROCFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XMLLINTFLAGS = SCons.Util.CLVar(''),
DOCBOOK_FOPFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XSLTPROCPARAMS = SCons.Util.CLVar(''),
# Default command lines for the detected executables
DOCBOOK_XSLTPROCCOM = xsltproc_com['xsltproc'],
DOCBOOK_XMLLINTCOM = xmllint_com['xmllint'],
DOCBOOK_FOPCOM = fop_com['fop'],
# Screen output for the text processors
DOCBOOK_XSLTPROCCOMSTR = None,
DOCBOOK_XMLLINTCOMSTR = None,
DOCBOOK_FOPCOMSTR = None,
)
_detect(env)
env.AddMethod(DocbookEpub, "DocbookEpub")
env.AddMethod(DocbookHtml, "DocbookHtml")
env.AddMethod(DocbookHtmlChunked, "DocbookHtmlChunked")
env.AddMethod(DocbookHtmlhelp, "DocbookHtmlhelp")
env.AddMethod(DocbookPdf, "DocbookPdf")
env.AddMethod(DocbookMan, "DocbookMan")
env.AddMethod(DocbookSlidesPdf, "DocbookSlidesPdf")
env.AddMethod(DocbookSlidesHtml, "DocbookSlidesHtml")
env.AddMethod(DocbookXInclude, "DocbookXInclude")
env.AddMethod(DocbookXslt, "DocbookXslt")
|
[
"def",
"generate",
"(",
"env",
")",
":",
"env",
".",
"SetDefault",
"(",
"# Default names for customized XSL stylesheets",
"DOCBOOK_DEFAULT_XSL_EPUB",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_HTML",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_HTMLCHUNKED",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_HTMLHELP",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_PDF",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_MAN",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_SLIDESPDF",
"=",
"''",
",",
"DOCBOOK_DEFAULT_XSL_SLIDESHTML",
"=",
"''",
",",
"# Paths to the detected executables",
"DOCBOOK_XSLTPROC",
"=",
"''",
",",
"DOCBOOK_XMLLINT",
"=",
"''",
",",
"DOCBOOK_FOP",
"=",
"''",
",",
"# Additional flags for the text processors",
"DOCBOOK_XSLTPROCFLAGS",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
",",
"DOCBOOK_XMLLINTFLAGS",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
",",
"DOCBOOK_FOPFLAGS",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
",",
"DOCBOOK_XSLTPROCPARAMS",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
",",
"# Default command lines for the detected executables",
"DOCBOOK_XSLTPROCCOM",
"=",
"xsltproc_com",
"[",
"'xsltproc'",
"]",
",",
"DOCBOOK_XMLLINTCOM",
"=",
"xmllint_com",
"[",
"'xmllint'",
"]",
",",
"DOCBOOK_FOPCOM",
"=",
"fop_com",
"[",
"'fop'",
"]",
",",
"# Screen output for the text processors",
"DOCBOOK_XSLTPROCCOMSTR",
"=",
"None",
",",
"DOCBOOK_XMLLINTCOMSTR",
"=",
"None",
",",
"DOCBOOK_FOPCOMSTR",
"=",
"None",
",",
")",
"_detect",
"(",
"env",
")",
"env",
".",
"AddMethod",
"(",
"DocbookEpub",
",",
"\"DocbookEpub\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookHtml",
",",
"\"DocbookHtml\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookHtmlChunked",
",",
"\"DocbookHtmlChunked\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookHtmlhelp",
",",
"\"DocbookHtmlhelp\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookPdf",
",",
"\"DocbookPdf\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookMan",
",",
"\"DocbookMan\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookSlidesPdf",
",",
"\"DocbookSlidesPdf\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookSlidesHtml",
",",
"\"DocbookSlidesHtml\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookXInclude",
",",
"\"DocbookXInclude\"",
")",
"env",
".",
"AddMethod",
"(",
"DocbookXslt",
",",
"\"DocbookXslt\"",
")"
] |
Add Builders and construction variables for docbook to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"docbook",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L832-L879
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/rcfile.py
|
RCFile.save
|
def save(self):
"""Update the configuration file on disk with the current contents of self.contents.
Previous contents are overwritten.
"""
try:
with open(self.path, "w") as f:
f.writelines(self.contents)
except IOError as e:
raise InternalError("Could not write RCFile contents", name=self.name, path=self.path, error_message=str(e))
|
python
|
def save(self):
"""Update the configuration file on disk with the current contents of self.contents.
Previous contents are overwritten.
"""
try:
with open(self.path, "w") as f:
f.writelines(self.contents)
except IOError as e:
raise InternalError("Could not write RCFile contents", name=self.name, path=self.path, error_message=str(e))
|
[
"def",
"save",
"(",
"self",
")",
":",
"try",
":",
"with",
"open",
"(",
"self",
".",
"path",
",",
"\"w\"",
")",
"as",
"f",
":",
"f",
".",
"writelines",
"(",
"self",
".",
"contents",
")",
"except",
"IOError",
"as",
"e",
":",
"raise",
"InternalError",
"(",
"\"Could not write RCFile contents\"",
",",
"name",
"=",
"self",
".",
"name",
",",
"path",
"=",
"self",
".",
"path",
",",
"error_message",
"=",
"str",
"(",
"e",
")",
")"
] |
Update the configuration file on disk with the current contents of self.contents.
Previous contents are overwritten.
|
[
"Update",
"the",
"configuration",
"file",
"on",
"disk",
"with",
"the",
"current",
"contents",
"of",
"self",
".",
"contents",
".",
"Previous",
"contents",
"are",
"overwritten",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/rcfile.py#L52-L61
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.probe_message
|
async def probe_message(self, _message, context):
"""Handle a probe message.
See :meth:`AbstractDeviceAdapter.probe`.
"""
client_id = context.user_data
await self.probe(client_id)
|
python
|
async def probe_message(self, _message, context):
"""Handle a probe message.
See :meth:`AbstractDeviceAdapter.probe`.
"""
client_id = context.user_data
await self.probe(client_id)
|
[
"async",
"def",
"probe_message",
"(",
"self",
",",
"_message",
",",
"context",
")",
":",
"client_id",
"=",
"context",
".",
"user_data",
"await",
"self",
".",
"probe",
"(",
"client_id",
")"
] |
Handle a probe message.
See :meth:`AbstractDeviceAdapter.probe`.
|
[
"Handle",
"a",
"probe",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L111-L118
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.connect_message
|
async def connect_message(self, message, context):
"""Handle a connect message.
See :meth:`AbstractDeviceAdapter.connect`.
"""
conn_string = message.get('connection_string')
client_id = context.user_data
await self.connect(client_id, conn_string)
|
python
|
async def connect_message(self, message, context):
"""Handle a connect message.
See :meth:`AbstractDeviceAdapter.connect`.
"""
conn_string = message.get('connection_string')
client_id = context.user_data
await self.connect(client_id, conn_string)
|
[
"async",
"def",
"connect_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"await",
"self",
".",
"connect",
"(",
"client_id",
",",
"conn_string",
")"
] |
Handle a connect message.
See :meth:`AbstractDeviceAdapter.connect`.
|
[
"Handle",
"a",
"connect",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L120-L128
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.disconnect_message
|
async def disconnect_message(self, message, context):
"""Handle a disconnect message.
See :meth:`AbstractDeviceAdapter.disconnect`.
"""
conn_string = message.get('connection_string')
client_id = context.user_data
await self.disconnect(client_id, conn_string)
|
python
|
async def disconnect_message(self, message, context):
"""Handle a disconnect message.
See :meth:`AbstractDeviceAdapter.disconnect`.
"""
conn_string = message.get('connection_string')
client_id = context.user_data
await self.disconnect(client_id, conn_string)
|
[
"async",
"def",
"disconnect_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"await",
"self",
".",
"disconnect",
"(",
"client_id",
",",
"conn_string",
")"
] |
Handle a disconnect message.
See :meth:`AbstractDeviceAdapter.disconnect`.
|
[
"Handle",
"a",
"disconnect",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L130-L139
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.open_interface_message
|
async def open_interface_message(self, message, context):
"""Handle an open_interface message.
See :meth:`AbstractDeviceAdapter.open_interface`.
"""
conn_string = message.get('connection_string')
interface = message.get('interface')
client_id = context.user_data
await self.open_interface(client_id, conn_string, interface)
|
python
|
async def open_interface_message(self, message, context):
"""Handle an open_interface message.
See :meth:`AbstractDeviceAdapter.open_interface`.
"""
conn_string = message.get('connection_string')
interface = message.get('interface')
client_id = context.user_data
await self.open_interface(client_id, conn_string, interface)
|
[
"async",
"def",
"open_interface_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"interface",
"=",
"message",
".",
"get",
"(",
"'interface'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"await",
"self",
".",
"open_interface",
"(",
"client_id",
",",
"conn_string",
",",
"interface",
")"
] |
Handle an open_interface message.
See :meth:`AbstractDeviceAdapter.open_interface`.
|
[
"Handle",
"an",
"open_interface",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L141-L151
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.close_interface_message
|
async def close_interface_message(self, message, context):
"""Handle a close_interface message.
See :meth:`AbstractDeviceAdapter.close_interface`.
"""
conn_string = message.get('connection_string')
interface = message.get('interface')
client_id = context.user_data
await self.close_interface(client_id, conn_string, interface)
|
python
|
async def close_interface_message(self, message, context):
"""Handle a close_interface message.
See :meth:`AbstractDeviceAdapter.close_interface`.
"""
conn_string = message.get('connection_string')
interface = message.get('interface')
client_id = context.user_data
await self.close_interface(client_id, conn_string, interface)
|
[
"async",
"def",
"close_interface_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"interface",
"=",
"message",
".",
"get",
"(",
"'interface'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"await",
"self",
".",
"close_interface",
"(",
"client_id",
",",
"conn_string",
",",
"interface",
")"
] |
Handle a close_interface message.
See :meth:`AbstractDeviceAdapter.close_interface`.
|
[
"Handle",
"a",
"close_interface",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L153-L163
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.send_rpc_message
|
async def send_rpc_message(self, message, context):
"""Handle a send_rpc message.
See :meth:`AbstractDeviceAdapter.send_rpc`.
"""
conn_string = message.get('connection_string')
rpc_id = message.get('rpc_id')
address = message.get('address')
timeout = message.get('timeout')
payload = message.get('payload')
client_id = context.user_data
self._logger.debug("Calling RPC %d:0x%04X with payload %s on %s",
address, rpc_id, payload, conn_string)
response = bytes()
err = None
try:
response = await self.send_rpc(client_id, conn_string, address, rpc_id, payload, timeout=timeout)
except VALID_RPC_EXCEPTIONS as internal_err:
err = internal_err
except (DeviceAdapterError, DeviceServerError):
raise
except Exception as internal_err:
self._logger.warning("Unexpected exception calling RPC %d:0x%04x", address, rpc_id, exc_info=True)
raise ServerCommandError('send_rpc', str(internal_err)) from internal_err
status, response = pack_rpc_response(response, err)
return {
'status': status,
'payload': base64.b64encode(response)
}
|
python
|
async def send_rpc_message(self, message, context):
"""Handle a send_rpc message.
See :meth:`AbstractDeviceAdapter.send_rpc`.
"""
conn_string = message.get('connection_string')
rpc_id = message.get('rpc_id')
address = message.get('address')
timeout = message.get('timeout')
payload = message.get('payload')
client_id = context.user_data
self._logger.debug("Calling RPC %d:0x%04X with payload %s on %s",
address, rpc_id, payload, conn_string)
response = bytes()
err = None
try:
response = await self.send_rpc(client_id, conn_string, address, rpc_id, payload, timeout=timeout)
except VALID_RPC_EXCEPTIONS as internal_err:
err = internal_err
except (DeviceAdapterError, DeviceServerError):
raise
except Exception as internal_err:
self._logger.warning("Unexpected exception calling RPC %d:0x%04x", address, rpc_id, exc_info=True)
raise ServerCommandError('send_rpc', str(internal_err)) from internal_err
status, response = pack_rpc_response(response, err)
return {
'status': status,
'payload': base64.b64encode(response)
}
|
[
"async",
"def",
"send_rpc_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"rpc_id",
"=",
"message",
".",
"get",
"(",
"'rpc_id'",
")",
"address",
"=",
"message",
".",
"get",
"(",
"'address'",
")",
"timeout",
"=",
"message",
".",
"get",
"(",
"'timeout'",
")",
"payload",
"=",
"message",
".",
"get",
"(",
"'payload'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Calling RPC %d:0x%04X with payload %s on %s\"",
",",
"address",
",",
"rpc_id",
",",
"payload",
",",
"conn_string",
")",
"response",
"=",
"bytes",
"(",
")",
"err",
"=",
"None",
"try",
":",
"response",
"=",
"await",
"self",
".",
"send_rpc",
"(",
"client_id",
",",
"conn_string",
",",
"address",
",",
"rpc_id",
",",
"payload",
",",
"timeout",
"=",
"timeout",
")",
"except",
"VALID_RPC_EXCEPTIONS",
"as",
"internal_err",
":",
"err",
"=",
"internal_err",
"except",
"(",
"DeviceAdapterError",
",",
"DeviceServerError",
")",
":",
"raise",
"except",
"Exception",
"as",
"internal_err",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"Unexpected exception calling RPC %d:0x%04x\"",
",",
"address",
",",
"rpc_id",
",",
"exc_info",
"=",
"True",
")",
"raise",
"ServerCommandError",
"(",
"'send_rpc'",
",",
"str",
"(",
"internal_err",
")",
")",
"from",
"internal_err",
"status",
",",
"response",
"=",
"pack_rpc_response",
"(",
"response",
",",
"err",
")",
"return",
"{",
"'status'",
":",
"status",
",",
"'payload'",
":",
"base64",
".",
"b64encode",
"(",
"response",
")",
"}"
] |
Handle a send_rpc message.
See :meth:`AbstractDeviceAdapter.send_rpc`.
|
[
"Handle",
"a",
"send_rpc",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L165-L197
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.send_script_message
|
async def send_script_message(self, message, context):
"""Handle a send_script message.
See :meth:`AbstractDeviceAdapter.send_script`.
"""
script = message.get('script')
conn_string = message.get('connection_string')
client_id = context.user_data
if message.get('fragment_count') != 1:
raise DeviceServerError(client_id, conn_string, 'send_script', 'fragmented scripts are not yet supported')
await self.send_script(client_id, conn_string, script)
|
python
|
async def send_script_message(self, message, context):
"""Handle a send_script message.
See :meth:`AbstractDeviceAdapter.send_script`.
"""
script = message.get('script')
conn_string = message.get('connection_string')
client_id = context.user_data
if message.get('fragment_count') != 1:
raise DeviceServerError(client_id, conn_string, 'send_script', 'fragmented scripts are not yet supported')
await self.send_script(client_id, conn_string, script)
|
[
"async",
"def",
"send_script_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"script",
"=",
"message",
".",
"get",
"(",
"'script'",
")",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"if",
"message",
".",
"get",
"(",
"'fragment_count'",
")",
"!=",
"1",
":",
"raise",
"DeviceServerError",
"(",
"client_id",
",",
"conn_string",
",",
"'send_script'",
",",
"'fragmented scripts are not yet supported'",
")",
"await",
"self",
".",
"send_script",
"(",
"client_id",
",",
"conn_string",
",",
"script",
")"
] |
Handle a send_script message.
See :meth:`AbstractDeviceAdapter.send_script`.
|
[
"Handle",
"a",
"send_script",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L199-L212
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.debug_command_message
|
async def debug_command_message(self, message, context):
"""Handle a debug message.
See :meth:`AbstractDeviceAdapter.debug`.
"""
conn_string = message.get('connection_string')
command = message.get('command')
args = message.get('args')
client_id = context.user_data
result = await self.debug(client_id, conn_string, command, args)
return result
|
python
|
async def debug_command_message(self, message, context):
"""Handle a debug message.
See :meth:`AbstractDeviceAdapter.debug`.
"""
conn_string = message.get('connection_string')
command = message.get('command')
args = message.get('args')
client_id = context.user_data
result = await self.debug(client_id, conn_string, command, args)
return result
|
[
"async",
"def",
"debug_command_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"command",
"=",
"message",
".",
"get",
"(",
"'command'",
")",
"args",
"=",
"message",
".",
"get",
"(",
"'args'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"result",
"=",
"await",
"self",
".",
"debug",
"(",
"client_id",
",",
"conn_string",
",",
"command",
",",
"args",
")",
"return",
"result"
] |
Handle a debug message.
See :meth:`AbstractDeviceAdapter.debug`.
|
[
"Handle",
"a",
"debug",
"message",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L214-L226
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/device_server.py
|
WebSocketDeviceServer.client_event_handler
|
async def client_event_handler(self, client_id, event_tuple, user_data):
"""Forward an event on behalf of a client.
This method is called by StandardDeviceServer when it has an event that
should be sent to a client.
Args:
client_id (str): The client that we should send this event to
event_tuple (tuple): The conn_string, event_name and event
object passed from the call to notify_event.
user_data (object): The user data passed in the call to
:meth:`setup_client`.
"""
#TODO: Support sending disconnection events
conn_string, event_name, event = event_tuple
if event_name == 'report':
report = event.serialize()
report['encoded_report'] = base64.b64encode(report['encoded_report'])
msg_payload = dict(connection_string=conn_string, serialized_report=report)
msg_name = OPERATIONS.NOTIFY_REPORT
elif event_name == 'trace':
encoded_payload = base64.b64encode(event)
msg_payload = dict(connection_string=conn_string, payload=encoded_payload)
msg_name = OPERATIONS.NOTIFY_TRACE
elif event_name == 'progress':
msg_payload = dict(connection_string=conn_string, operation=event.get('operation'),
done_count=event.get('finished'), total_count=event.get('total'))
msg_name = OPERATIONS.NOTIFY_PROGRESS
elif event_name == 'device_seen':
msg_payload = event
msg_name = OPERATIONS.NOTIFY_DEVICE_FOUND
elif event_name == 'broadcast':
report = event.serialize()
report['encoded_report'] = base64.b64encode(report['encoded_report'])
msg_payload = dict(connection_string=conn_string, serialized_report=report)
msg_name = OPERATIONS.NOTIFY_BROADCAST
else:
self._logger.debug("Not forwarding unknown event over websockets: %s", event_tuple)
return
try:
self._logger.debug("Sending event %s: %s", msg_name, msg_payload)
await self.server.send_event(user_data, msg_name, msg_payload)
except websockets.exceptions.ConnectionClosed:
self._logger.debug("Could not send notification because connection was closed for client %s", client_id)
|
python
|
async def client_event_handler(self, client_id, event_tuple, user_data):
"""Forward an event on behalf of a client.
This method is called by StandardDeviceServer when it has an event that
should be sent to a client.
Args:
client_id (str): The client that we should send this event to
event_tuple (tuple): The conn_string, event_name and event
object passed from the call to notify_event.
user_data (object): The user data passed in the call to
:meth:`setup_client`.
"""
#TODO: Support sending disconnection events
conn_string, event_name, event = event_tuple
if event_name == 'report':
report = event.serialize()
report['encoded_report'] = base64.b64encode(report['encoded_report'])
msg_payload = dict(connection_string=conn_string, serialized_report=report)
msg_name = OPERATIONS.NOTIFY_REPORT
elif event_name == 'trace':
encoded_payload = base64.b64encode(event)
msg_payload = dict(connection_string=conn_string, payload=encoded_payload)
msg_name = OPERATIONS.NOTIFY_TRACE
elif event_name == 'progress':
msg_payload = dict(connection_string=conn_string, operation=event.get('operation'),
done_count=event.get('finished'), total_count=event.get('total'))
msg_name = OPERATIONS.NOTIFY_PROGRESS
elif event_name == 'device_seen':
msg_payload = event
msg_name = OPERATIONS.NOTIFY_DEVICE_FOUND
elif event_name == 'broadcast':
report = event.serialize()
report['encoded_report'] = base64.b64encode(report['encoded_report'])
msg_payload = dict(connection_string=conn_string, serialized_report=report)
msg_name = OPERATIONS.NOTIFY_BROADCAST
else:
self._logger.debug("Not forwarding unknown event over websockets: %s", event_tuple)
return
try:
self._logger.debug("Sending event %s: %s", msg_name, msg_payload)
await self.server.send_event(user_data, msg_name, msg_payload)
except websockets.exceptions.ConnectionClosed:
self._logger.debug("Could not send notification because connection was closed for client %s", client_id)
|
[
"async",
"def",
"client_event_handler",
"(",
"self",
",",
"client_id",
",",
"event_tuple",
",",
"user_data",
")",
":",
"#TODO: Support sending disconnection events",
"conn_string",
",",
"event_name",
",",
"event",
"=",
"event_tuple",
"if",
"event_name",
"==",
"'report'",
":",
"report",
"=",
"event",
".",
"serialize",
"(",
")",
"report",
"[",
"'encoded_report'",
"]",
"=",
"base64",
".",
"b64encode",
"(",
"report",
"[",
"'encoded_report'",
"]",
")",
"msg_payload",
"=",
"dict",
"(",
"connection_string",
"=",
"conn_string",
",",
"serialized_report",
"=",
"report",
")",
"msg_name",
"=",
"OPERATIONS",
".",
"NOTIFY_REPORT",
"elif",
"event_name",
"==",
"'trace'",
":",
"encoded_payload",
"=",
"base64",
".",
"b64encode",
"(",
"event",
")",
"msg_payload",
"=",
"dict",
"(",
"connection_string",
"=",
"conn_string",
",",
"payload",
"=",
"encoded_payload",
")",
"msg_name",
"=",
"OPERATIONS",
".",
"NOTIFY_TRACE",
"elif",
"event_name",
"==",
"'progress'",
":",
"msg_payload",
"=",
"dict",
"(",
"connection_string",
"=",
"conn_string",
",",
"operation",
"=",
"event",
".",
"get",
"(",
"'operation'",
")",
",",
"done_count",
"=",
"event",
".",
"get",
"(",
"'finished'",
")",
",",
"total_count",
"=",
"event",
".",
"get",
"(",
"'total'",
")",
")",
"msg_name",
"=",
"OPERATIONS",
".",
"NOTIFY_PROGRESS",
"elif",
"event_name",
"==",
"'device_seen'",
":",
"msg_payload",
"=",
"event",
"msg_name",
"=",
"OPERATIONS",
".",
"NOTIFY_DEVICE_FOUND",
"elif",
"event_name",
"==",
"'broadcast'",
":",
"report",
"=",
"event",
".",
"serialize",
"(",
")",
"report",
"[",
"'encoded_report'",
"]",
"=",
"base64",
".",
"b64encode",
"(",
"report",
"[",
"'encoded_report'",
"]",
")",
"msg_payload",
"=",
"dict",
"(",
"connection_string",
"=",
"conn_string",
",",
"serialized_report",
"=",
"report",
")",
"msg_name",
"=",
"OPERATIONS",
".",
"NOTIFY_BROADCAST",
"else",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Not forwarding unknown event over websockets: %s\"",
",",
"event_tuple",
")",
"return",
"try",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Sending event %s: %s\"",
",",
"msg_name",
",",
"msg_payload",
")",
"await",
"self",
".",
"server",
".",
"send_event",
"(",
"user_data",
",",
"msg_name",
",",
"msg_payload",
")",
"except",
"websockets",
".",
"exceptions",
".",
"ConnectionClosed",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Could not send notification because connection was closed for client %s\"",
",",
"client_id",
")"
] |
Forward an event on behalf of a client.
This method is called by StandardDeviceServer when it has an event that
should be sent to a client.
Args:
client_id (str): The client that we should send this event to
event_tuple (tuple): The conn_string, event_name and event
object passed from the call to notify_event.
user_data (object): The user data passed in the call to
:meth:`setup_client`.
|
[
"Forward",
"an",
"event",
"on",
"behalf",
"of",
"a",
"client",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_server.py#L228-L275
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sunf90.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for sun f90 compiler to an
Environment."""
add_all_to_env(env)
fcomp = env.Detect(compilers) or 'f90'
env['FORTRAN'] = fcomp
env['F90'] = fcomp
env['SHFORTRAN'] = '$FORTRAN'
env['SHF90'] = '$F90'
env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC')
env['SHF90FLAGS'] = SCons.Util.CLVar('$F90FLAGS -KPIC')
|
python
|
def generate(env):
"""Add Builders and construction variables for sun f90 compiler to an
Environment."""
add_all_to_env(env)
fcomp = env.Detect(compilers) or 'f90'
env['FORTRAN'] = fcomp
env['F90'] = fcomp
env['SHFORTRAN'] = '$FORTRAN'
env['SHF90'] = '$F90'
env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC')
env['SHF90FLAGS'] = SCons.Util.CLVar('$F90FLAGS -KPIC')
|
[
"def",
"generate",
"(",
"env",
")",
":",
"add_all_to_env",
"(",
"env",
")",
"fcomp",
"=",
"env",
".",
"Detect",
"(",
"compilers",
")",
"or",
"'f90'",
"env",
"[",
"'FORTRAN'",
"]",
"=",
"fcomp",
"env",
"[",
"'F90'",
"]",
"=",
"fcomp",
"env",
"[",
"'SHFORTRAN'",
"]",
"=",
"'$FORTRAN'",
"env",
"[",
"'SHF90'",
"]",
"=",
"'$F90'",
"env",
"[",
"'SHFORTRANFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$FORTRANFLAGS -KPIC'",
")",
"env",
"[",
"'SHF90FLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$F90FLAGS -KPIC'",
")"
] |
Add Builders and construction variables for sun f90 compiler to an
Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"sun",
"f90",
"compiler",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sunf90.py#L42-L55
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
Builder
|
def Builder(**kw):
"""A factory for builder objects."""
composite = None
if 'generator' in kw:
if 'action' in kw:
raise UserError("You must not specify both an action and a generator.")
kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {})
del kw['generator']
elif 'action' in kw:
source_ext_match = kw.get('source_ext_match', 1)
if 'source_ext_match' in kw:
del kw['source_ext_match']
if SCons.Util.is_Dict(kw['action']):
composite = DictCmdGenerator(kw['action'], source_ext_match)
kw['action'] = SCons.Action.CommandGeneratorAction(composite, {})
kw['src_suffix'] = composite.src_suffixes()
else:
kw['action'] = SCons.Action.Action(kw['action'])
if 'emitter' in kw:
emitter = kw['emitter']
if SCons.Util.is_String(emitter):
# This allows users to pass in an Environment
# variable reference (like "$FOO") as an emitter.
# We will look in that Environment variable for
# a callable to use as the actual emitter.
var = SCons.Util.get_environment_var(emitter)
if not var:
raise UserError("Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter)
kw['emitter'] = EmitterProxy(var)
elif SCons.Util.is_Dict(emitter):
kw['emitter'] = DictEmitter(emitter)
elif SCons.Util.is_List(emitter):
kw['emitter'] = ListEmitter(emitter)
result = BuilderBase(**kw)
if not composite is None:
result = CompositeBuilder(result, composite)
return result
|
python
|
def Builder(**kw):
"""A factory for builder objects."""
composite = None
if 'generator' in kw:
if 'action' in kw:
raise UserError("You must not specify both an action and a generator.")
kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {})
del kw['generator']
elif 'action' in kw:
source_ext_match = kw.get('source_ext_match', 1)
if 'source_ext_match' in kw:
del kw['source_ext_match']
if SCons.Util.is_Dict(kw['action']):
composite = DictCmdGenerator(kw['action'], source_ext_match)
kw['action'] = SCons.Action.CommandGeneratorAction(composite, {})
kw['src_suffix'] = composite.src_suffixes()
else:
kw['action'] = SCons.Action.Action(kw['action'])
if 'emitter' in kw:
emitter = kw['emitter']
if SCons.Util.is_String(emitter):
# This allows users to pass in an Environment
# variable reference (like "$FOO") as an emitter.
# We will look in that Environment variable for
# a callable to use as the actual emitter.
var = SCons.Util.get_environment_var(emitter)
if not var:
raise UserError("Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter)
kw['emitter'] = EmitterProxy(var)
elif SCons.Util.is_Dict(emitter):
kw['emitter'] = DictEmitter(emitter)
elif SCons.Util.is_List(emitter):
kw['emitter'] = ListEmitter(emitter)
result = BuilderBase(**kw)
if not composite is None:
result = CompositeBuilder(result, composite)
return result
|
[
"def",
"Builder",
"(",
"*",
"*",
"kw",
")",
":",
"composite",
"=",
"None",
"if",
"'generator'",
"in",
"kw",
":",
"if",
"'action'",
"in",
"kw",
":",
"raise",
"UserError",
"(",
"\"You must not specify both an action and a generator.\"",
")",
"kw",
"[",
"'action'",
"]",
"=",
"SCons",
".",
"Action",
".",
"CommandGeneratorAction",
"(",
"kw",
"[",
"'generator'",
"]",
",",
"{",
"}",
")",
"del",
"kw",
"[",
"'generator'",
"]",
"elif",
"'action'",
"in",
"kw",
":",
"source_ext_match",
"=",
"kw",
".",
"get",
"(",
"'source_ext_match'",
",",
"1",
")",
"if",
"'source_ext_match'",
"in",
"kw",
":",
"del",
"kw",
"[",
"'source_ext_match'",
"]",
"if",
"SCons",
".",
"Util",
".",
"is_Dict",
"(",
"kw",
"[",
"'action'",
"]",
")",
":",
"composite",
"=",
"DictCmdGenerator",
"(",
"kw",
"[",
"'action'",
"]",
",",
"source_ext_match",
")",
"kw",
"[",
"'action'",
"]",
"=",
"SCons",
".",
"Action",
".",
"CommandGeneratorAction",
"(",
"composite",
",",
"{",
"}",
")",
"kw",
"[",
"'src_suffix'",
"]",
"=",
"composite",
".",
"src_suffixes",
"(",
")",
"else",
":",
"kw",
"[",
"'action'",
"]",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"kw",
"[",
"'action'",
"]",
")",
"if",
"'emitter'",
"in",
"kw",
":",
"emitter",
"=",
"kw",
"[",
"'emitter'",
"]",
"if",
"SCons",
".",
"Util",
".",
"is_String",
"(",
"emitter",
")",
":",
"# This allows users to pass in an Environment",
"# variable reference (like \"$FOO\") as an emitter.",
"# We will look in that Environment variable for",
"# a callable to use as the actual emitter.",
"var",
"=",
"SCons",
".",
"Util",
".",
"get_environment_var",
"(",
"emitter",
")",
"if",
"not",
"var",
":",
"raise",
"UserError",
"(",
"\"Supplied emitter '%s' does not appear to refer to an Environment variable\"",
"%",
"emitter",
")",
"kw",
"[",
"'emitter'",
"]",
"=",
"EmitterProxy",
"(",
"var",
")",
"elif",
"SCons",
".",
"Util",
".",
"is_Dict",
"(",
"emitter",
")",
":",
"kw",
"[",
"'emitter'",
"]",
"=",
"DictEmitter",
"(",
"emitter",
")",
"elif",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"emitter",
")",
":",
"kw",
"[",
"'emitter'",
"]",
"=",
"ListEmitter",
"(",
"emitter",
")",
"result",
"=",
"BuilderBase",
"(",
"*",
"*",
"kw",
")",
"if",
"not",
"composite",
"is",
"None",
":",
"result",
"=",
"CompositeBuilder",
"(",
"result",
",",
"composite",
")",
"return",
"result"
] |
A factory for builder objects.
|
[
"A",
"factory",
"for",
"builder",
"objects",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L240-L280
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
_node_errors
|
def _node_errors(builder, env, tlist, slist):
"""Validate that the lists of target and source nodes are
legal for this builder and environment. Raise errors or
issue warnings as appropriate.
"""
# First, figure out if there are any errors in the way the targets
# were specified.
for t in tlist:
if t.side_effect:
raise UserError("Multiple ways to build the same target were specified for: %s" % t)
if t.has_explicit_builder():
# Check for errors when the environments are different
# No error if environments are the same Environment instance
if (not t.env is None and not t.env is env and
# Check OverrideEnvironment case - no error if wrapped Environments
# are the same instance, and overrides lists match
not (getattr(t.env, '__subject', 0) is getattr(env, '__subject', 1) and
getattr(t.env, 'overrides', 0) == getattr(env, 'overrides', 1) and
not builder.multi)):
action = t.builder.action
t_contents = t.builder.action.get_contents(tlist, slist, t.env)
contents = builder.action.get_contents(tlist, slist, env)
if t_contents == contents:
msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env))
SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg)
else:
try:
msg = "Two environments with different actions were specified for the same target: %s\n(action 1: %s)\n(action 2: %s)" % (t,t_contents.decode('utf-8'),contents.decode('utf-8'))
except UnicodeDecodeError as e:
msg = "Two environments with different actions were specified for the same target: %s"%t
raise UserError(msg)
if builder.multi:
if t.builder != builder:
msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t)
raise UserError(msg)
# TODO(batch): list constructed each time!
if t.get_executor().get_all_targets() != tlist:
msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, list(map(str, t.get_executor().get_all_targets())), list(map(str, tlist)))
raise UserError(msg)
elif t.sources != slist:
msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, list(map(str, t.sources)), list(map(str, slist)))
raise UserError(msg)
if builder.single_source:
if len(slist) > 1:
raise UserError("More than one source given for single-source builder: targets=%s sources=%s" % (list(map(str,tlist)), list(map(str,slist))))
|
python
|
def _node_errors(builder, env, tlist, slist):
"""Validate that the lists of target and source nodes are
legal for this builder and environment. Raise errors or
issue warnings as appropriate.
"""
# First, figure out if there are any errors in the way the targets
# were specified.
for t in tlist:
if t.side_effect:
raise UserError("Multiple ways to build the same target were specified for: %s" % t)
if t.has_explicit_builder():
# Check for errors when the environments are different
# No error if environments are the same Environment instance
if (not t.env is None and not t.env is env and
# Check OverrideEnvironment case - no error if wrapped Environments
# are the same instance, and overrides lists match
not (getattr(t.env, '__subject', 0) is getattr(env, '__subject', 1) and
getattr(t.env, 'overrides', 0) == getattr(env, 'overrides', 1) and
not builder.multi)):
action = t.builder.action
t_contents = t.builder.action.get_contents(tlist, slist, t.env)
contents = builder.action.get_contents(tlist, slist, env)
if t_contents == contents:
msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env))
SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg)
else:
try:
msg = "Two environments with different actions were specified for the same target: %s\n(action 1: %s)\n(action 2: %s)" % (t,t_contents.decode('utf-8'),contents.decode('utf-8'))
except UnicodeDecodeError as e:
msg = "Two environments with different actions were specified for the same target: %s"%t
raise UserError(msg)
if builder.multi:
if t.builder != builder:
msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t)
raise UserError(msg)
# TODO(batch): list constructed each time!
if t.get_executor().get_all_targets() != tlist:
msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, list(map(str, t.get_executor().get_all_targets())), list(map(str, tlist)))
raise UserError(msg)
elif t.sources != slist:
msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, list(map(str, t.sources)), list(map(str, slist)))
raise UserError(msg)
if builder.single_source:
if len(slist) > 1:
raise UserError("More than one source given for single-source builder: targets=%s sources=%s" % (list(map(str,tlist)), list(map(str,slist))))
|
[
"def",
"_node_errors",
"(",
"builder",
",",
"env",
",",
"tlist",
",",
"slist",
")",
":",
"# First, figure out if there are any errors in the way the targets",
"# were specified.",
"for",
"t",
"in",
"tlist",
":",
"if",
"t",
".",
"side_effect",
":",
"raise",
"UserError",
"(",
"\"Multiple ways to build the same target were specified for: %s\"",
"%",
"t",
")",
"if",
"t",
".",
"has_explicit_builder",
"(",
")",
":",
"# Check for errors when the environments are different",
"# No error if environments are the same Environment instance",
"if",
"(",
"not",
"t",
".",
"env",
"is",
"None",
"and",
"not",
"t",
".",
"env",
"is",
"env",
"and",
"# Check OverrideEnvironment case - no error if wrapped Environments",
"# are the same instance, and overrides lists match",
"not",
"(",
"getattr",
"(",
"t",
".",
"env",
",",
"'__subject'",
",",
"0",
")",
"is",
"getattr",
"(",
"env",
",",
"'__subject'",
",",
"1",
")",
"and",
"getattr",
"(",
"t",
".",
"env",
",",
"'overrides'",
",",
"0",
")",
"==",
"getattr",
"(",
"env",
",",
"'overrides'",
",",
"1",
")",
"and",
"not",
"builder",
".",
"multi",
")",
")",
":",
"action",
"=",
"t",
".",
"builder",
".",
"action",
"t_contents",
"=",
"t",
".",
"builder",
".",
"action",
".",
"get_contents",
"(",
"tlist",
",",
"slist",
",",
"t",
".",
"env",
")",
"contents",
"=",
"builder",
".",
"action",
".",
"get_contents",
"(",
"tlist",
",",
"slist",
",",
"env",
")",
"if",
"t_contents",
"==",
"contents",
":",
"msg",
"=",
"\"Two different environments were specified for target %s,\\n\\tbut they appear to have the same action: %s\"",
"%",
"(",
"t",
",",
"action",
".",
"genstring",
"(",
"tlist",
",",
"slist",
",",
"t",
".",
"env",
")",
")",
"SCons",
".",
"Warnings",
".",
"warn",
"(",
"SCons",
".",
"Warnings",
".",
"DuplicateEnvironmentWarning",
",",
"msg",
")",
"else",
":",
"try",
":",
"msg",
"=",
"\"Two environments with different actions were specified for the same target: %s\\n(action 1: %s)\\n(action 2: %s)\"",
"%",
"(",
"t",
",",
"t_contents",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"contents",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"except",
"UnicodeDecodeError",
"as",
"e",
":",
"msg",
"=",
"\"Two environments with different actions were specified for the same target: %s\"",
"%",
"t",
"raise",
"UserError",
"(",
"msg",
")",
"if",
"builder",
".",
"multi",
":",
"if",
"t",
".",
"builder",
"!=",
"builder",
":",
"msg",
"=",
"\"Two different builders (%s and %s) were specified for the same target: %s\"",
"%",
"(",
"t",
".",
"builder",
".",
"get_name",
"(",
"env",
")",
",",
"builder",
".",
"get_name",
"(",
"env",
")",
",",
"t",
")",
"raise",
"UserError",
"(",
"msg",
")",
"# TODO(batch): list constructed each time!",
"if",
"t",
".",
"get_executor",
"(",
")",
".",
"get_all_targets",
"(",
")",
"!=",
"tlist",
":",
"msg",
"=",
"\"Two different target lists have a target in common: %s (from %s and from %s)\"",
"%",
"(",
"t",
",",
"list",
"(",
"map",
"(",
"str",
",",
"t",
".",
"get_executor",
"(",
")",
".",
"get_all_targets",
"(",
")",
")",
")",
",",
"list",
"(",
"map",
"(",
"str",
",",
"tlist",
")",
")",
")",
"raise",
"UserError",
"(",
"msg",
")",
"elif",
"t",
".",
"sources",
"!=",
"slist",
":",
"msg",
"=",
"\"Multiple ways to build the same target were specified for: %s (from %s and from %s)\"",
"%",
"(",
"t",
",",
"list",
"(",
"map",
"(",
"str",
",",
"t",
".",
"sources",
")",
")",
",",
"list",
"(",
"map",
"(",
"str",
",",
"slist",
")",
")",
")",
"raise",
"UserError",
"(",
"msg",
")",
"if",
"builder",
".",
"single_source",
":",
"if",
"len",
"(",
"slist",
")",
">",
"1",
":",
"raise",
"UserError",
"(",
"\"More than one source given for single-source builder: targets=%s sources=%s\"",
"%",
"(",
"list",
"(",
"map",
"(",
"str",
",",
"tlist",
")",
")",
",",
"list",
"(",
"map",
"(",
"str",
",",
"slist",
")",
")",
")",
")"
] |
Validate that the lists of target and source nodes are
legal for this builder and environment. Raise errors or
issue warnings as appropriate.
|
[
"Validate",
"that",
"the",
"lists",
"of",
"target",
"and",
"source",
"nodes",
"are",
"legal",
"for",
"this",
"builder",
"and",
"environment",
".",
"Raise",
"errors",
"or",
"issue",
"warnings",
"as",
"appropriate",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L282-L329
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
is_a_Builder
|
def is_a_Builder(obj):
""""Returns True if the specified obj is one of our Builder classes.
The test is complicated a bit by the fact that CompositeBuilder
is a proxy, not a subclass of BuilderBase.
"""
return (isinstance(obj, BuilderBase)
or isinstance(obj, CompositeBuilder)
or callable(obj))
|
python
|
def is_a_Builder(obj):
""""Returns True if the specified obj is one of our Builder classes.
The test is complicated a bit by the fact that CompositeBuilder
is a proxy, not a subclass of BuilderBase.
"""
return (isinstance(obj, BuilderBase)
or isinstance(obj, CompositeBuilder)
or callable(obj))
|
[
"def",
"is_a_Builder",
"(",
"obj",
")",
":",
"return",
"(",
"isinstance",
"(",
"obj",
",",
"BuilderBase",
")",
"or",
"isinstance",
"(",
"obj",
",",
"CompositeBuilder",
")",
"or",
"callable",
"(",
"obj",
")",
")"
] |
Returns True if the specified obj is one of our Builder classes.
The test is complicated a bit by the fact that CompositeBuilder
is a proxy, not a subclass of BuilderBase.
|
[
"Returns",
"True",
"if",
"the",
"specified",
"obj",
"is",
"one",
"of",
"our",
"Builder",
"classes",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L874-L882
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
BuilderBase.get_name
|
def get_name(self, env):
"""Attempts to get the name of the Builder.
Look at the BUILDERS variable of env, expecting it to be a
dictionary containing this Builder, and return the key of the
dictionary. If there's no key, then return a directly-configured
name (if there is one) or the name of the class (by default)."""
try:
index = list(env['BUILDERS'].values()).index(self)
return list(env['BUILDERS'].keys())[index]
except (AttributeError, KeyError, TypeError, ValueError):
try:
return self.name
except AttributeError:
return str(self.__class__)
|
python
|
def get_name(self, env):
"""Attempts to get the name of the Builder.
Look at the BUILDERS variable of env, expecting it to be a
dictionary containing this Builder, and return the key of the
dictionary. If there's no key, then return a directly-configured
name (if there is one) or the name of the class (by default)."""
try:
index = list(env['BUILDERS'].values()).index(self)
return list(env['BUILDERS'].keys())[index]
except (AttributeError, KeyError, TypeError, ValueError):
try:
return self.name
except AttributeError:
return str(self.__class__)
|
[
"def",
"get_name",
"(",
"self",
",",
"env",
")",
":",
"try",
":",
"index",
"=",
"list",
"(",
"env",
"[",
"'BUILDERS'",
"]",
".",
"values",
"(",
")",
")",
".",
"index",
"(",
"self",
")",
"return",
"list",
"(",
"env",
"[",
"'BUILDERS'",
"]",
".",
"keys",
"(",
")",
")",
"[",
"index",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
",",
"TypeError",
",",
"ValueError",
")",
":",
"try",
":",
"return",
"self",
".",
"name",
"except",
"AttributeError",
":",
"return",
"str",
"(",
"self",
".",
"__class__",
")"
] |
Attempts to get the name of the Builder.
Look at the BUILDERS variable of env, expecting it to be a
dictionary containing this Builder, and return the key of the
dictionary. If there's no key, then return a directly-configured
name (if there is one) or the name of the class (by default).
|
[
"Attempts",
"to",
"get",
"the",
"name",
"of",
"the",
"Builder",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L443-L458
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
BuilderBase._create_nodes
|
def _create_nodes(self, env, target = None, source = None):
"""Create and return lists of target and source nodes.
"""
src_suf = self.get_src_suffix(env)
target_factory = env.get_factory(self.target_factory)
source_factory = env.get_factory(self.source_factory)
source = self._adjustixes(source, None, src_suf)
slist = env.arg2nodes(source, source_factory)
pre = self.get_prefix(env, slist)
suf = self.get_suffix(env, slist)
if target is None:
try:
t_from_s = slist[0].target_from_source
except AttributeError:
raise UserError("Do not know how to create a target from source `%s'" % slist[0])
except IndexError:
tlist = []
else:
splitext = lambda S: self.splitext(S,env)
tlist = [ t_from_s(pre, suf, splitext) ]
else:
target = self._adjustixes(target, pre, suf, self.ensure_suffix)
tlist = env.arg2nodes(target, target_factory, target=target, source=source)
if self.emitter:
# The emitter is going to do str(node), but because we're
# being called *from* a builder invocation, the new targets
# don't yet have a builder set on them and will look like
# source files. Fool the emitter's str() calls by setting
# up a temporary builder on the new targets.
new_targets = []
for t in tlist:
if not t.is_derived():
t.builder_set(self)
new_targets.append(t)
orig_tlist = tlist[:]
orig_slist = slist[:]
target, source = self.emitter(target=tlist, source=slist, env=env)
# Now delete the temporary builders that we attached to any
# new targets, so that _node_errors() doesn't do weird stuff
# to them because it thinks they already have builders.
for t in new_targets:
if t.builder is self:
# Only delete the temporary builder if the emitter
# didn't change it on us.
t.builder_set(None)
# Have to call arg2nodes yet again, since it is legal for
# emitters to spit out strings as well as Node instances.
tlist = env.arg2nodes(target, target_factory,
target=orig_tlist, source=orig_slist)
slist = env.arg2nodes(source, source_factory,
target=orig_tlist, source=orig_slist)
return tlist, slist
|
python
|
def _create_nodes(self, env, target = None, source = None):
"""Create and return lists of target and source nodes.
"""
src_suf = self.get_src_suffix(env)
target_factory = env.get_factory(self.target_factory)
source_factory = env.get_factory(self.source_factory)
source = self._adjustixes(source, None, src_suf)
slist = env.arg2nodes(source, source_factory)
pre = self.get_prefix(env, slist)
suf = self.get_suffix(env, slist)
if target is None:
try:
t_from_s = slist[0].target_from_source
except AttributeError:
raise UserError("Do not know how to create a target from source `%s'" % slist[0])
except IndexError:
tlist = []
else:
splitext = lambda S: self.splitext(S,env)
tlist = [ t_from_s(pre, suf, splitext) ]
else:
target = self._adjustixes(target, pre, suf, self.ensure_suffix)
tlist = env.arg2nodes(target, target_factory, target=target, source=source)
if self.emitter:
# The emitter is going to do str(node), but because we're
# being called *from* a builder invocation, the new targets
# don't yet have a builder set on them and will look like
# source files. Fool the emitter's str() calls by setting
# up a temporary builder on the new targets.
new_targets = []
for t in tlist:
if not t.is_derived():
t.builder_set(self)
new_targets.append(t)
orig_tlist = tlist[:]
orig_slist = slist[:]
target, source = self.emitter(target=tlist, source=slist, env=env)
# Now delete the temporary builders that we attached to any
# new targets, so that _node_errors() doesn't do weird stuff
# to them because it thinks they already have builders.
for t in new_targets:
if t.builder is self:
# Only delete the temporary builder if the emitter
# didn't change it on us.
t.builder_set(None)
# Have to call arg2nodes yet again, since it is legal for
# emitters to spit out strings as well as Node instances.
tlist = env.arg2nodes(target, target_factory,
target=orig_tlist, source=orig_slist)
slist = env.arg2nodes(source, source_factory,
target=orig_tlist, source=orig_slist)
return tlist, slist
|
[
"def",
"_create_nodes",
"(",
"self",
",",
"env",
",",
"target",
"=",
"None",
",",
"source",
"=",
"None",
")",
":",
"src_suf",
"=",
"self",
".",
"get_src_suffix",
"(",
"env",
")",
"target_factory",
"=",
"env",
".",
"get_factory",
"(",
"self",
".",
"target_factory",
")",
"source_factory",
"=",
"env",
".",
"get_factory",
"(",
"self",
".",
"source_factory",
")",
"source",
"=",
"self",
".",
"_adjustixes",
"(",
"source",
",",
"None",
",",
"src_suf",
")",
"slist",
"=",
"env",
".",
"arg2nodes",
"(",
"source",
",",
"source_factory",
")",
"pre",
"=",
"self",
".",
"get_prefix",
"(",
"env",
",",
"slist",
")",
"suf",
"=",
"self",
".",
"get_suffix",
"(",
"env",
",",
"slist",
")",
"if",
"target",
"is",
"None",
":",
"try",
":",
"t_from_s",
"=",
"slist",
"[",
"0",
"]",
".",
"target_from_source",
"except",
"AttributeError",
":",
"raise",
"UserError",
"(",
"\"Do not know how to create a target from source `%s'\"",
"%",
"slist",
"[",
"0",
"]",
")",
"except",
"IndexError",
":",
"tlist",
"=",
"[",
"]",
"else",
":",
"splitext",
"=",
"lambda",
"S",
":",
"self",
".",
"splitext",
"(",
"S",
",",
"env",
")",
"tlist",
"=",
"[",
"t_from_s",
"(",
"pre",
",",
"suf",
",",
"splitext",
")",
"]",
"else",
":",
"target",
"=",
"self",
".",
"_adjustixes",
"(",
"target",
",",
"pre",
",",
"suf",
",",
"self",
".",
"ensure_suffix",
")",
"tlist",
"=",
"env",
".",
"arg2nodes",
"(",
"target",
",",
"target_factory",
",",
"target",
"=",
"target",
",",
"source",
"=",
"source",
")",
"if",
"self",
".",
"emitter",
":",
"# The emitter is going to do str(node), but because we're",
"# being called *from* a builder invocation, the new targets",
"# don't yet have a builder set on them and will look like",
"# source files. Fool the emitter's str() calls by setting",
"# up a temporary builder on the new targets.",
"new_targets",
"=",
"[",
"]",
"for",
"t",
"in",
"tlist",
":",
"if",
"not",
"t",
".",
"is_derived",
"(",
")",
":",
"t",
".",
"builder_set",
"(",
"self",
")",
"new_targets",
".",
"append",
"(",
"t",
")",
"orig_tlist",
"=",
"tlist",
"[",
":",
"]",
"orig_slist",
"=",
"slist",
"[",
":",
"]",
"target",
",",
"source",
"=",
"self",
".",
"emitter",
"(",
"target",
"=",
"tlist",
",",
"source",
"=",
"slist",
",",
"env",
"=",
"env",
")",
"# Now delete the temporary builders that we attached to any",
"# new targets, so that _node_errors() doesn't do weird stuff",
"# to them because it thinks they already have builders.",
"for",
"t",
"in",
"new_targets",
":",
"if",
"t",
".",
"builder",
"is",
"self",
":",
"# Only delete the temporary builder if the emitter",
"# didn't change it on us.",
"t",
".",
"builder_set",
"(",
"None",
")",
"# Have to call arg2nodes yet again, since it is legal for",
"# emitters to spit out strings as well as Node instances.",
"tlist",
"=",
"env",
".",
"arg2nodes",
"(",
"target",
",",
"target_factory",
",",
"target",
"=",
"orig_tlist",
",",
"source",
"=",
"orig_slist",
")",
"slist",
"=",
"env",
".",
"arg2nodes",
"(",
"source",
",",
"source_factory",
",",
"target",
"=",
"orig_tlist",
",",
"source",
"=",
"orig_slist",
")",
"return",
"tlist",
",",
"slist"
] |
Create and return lists of target and source nodes.
|
[
"Create",
"and",
"return",
"lists",
"of",
"target",
"and",
"source",
"nodes",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L485-L546
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
BuilderBase._get_sdict
|
def _get_sdict(self, env):
"""
Returns a dictionary mapping all of the source suffixes of all
src_builders of this Builder to the underlying Builder that
should be called first.
This dictionary is used for each target specified, so we save a
lot of extra computation by memoizing it for each construction
environment.
Note that this is re-computed each time, not cached, because there
might be changes to one of our source Builders (or one of their
source Builders, and so on, and so on...) that we can't "see."
The underlying methods we call cache their computed values,
though, so we hope repeatedly aggregating them into a dictionary
like this won't be too big a hit. We may need to look for a
better way to do this if performance data show this has turned
into a significant bottleneck.
"""
sdict = {}
for bld in self.get_src_builders(env):
for suf in bld.src_suffixes(env):
sdict[suf] = bld
return sdict
|
python
|
def _get_sdict(self, env):
"""
Returns a dictionary mapping all of the source suffixes of all
src_builders of this Builder to the underlying Builder that
should be called first.
This dictionary is used for each target specified, so we save a
lot of extra computation by memoizing it for each construction
environment.
Note that this is re-computed each time, not cached, because there
might be changes to one of our source Builders (or one of their
source Builders, and so on, and so on...) that we can't "see."
The underlying methods we call cache their computed values,
though, so we hope repeatedly aggregating them into a dictionary
like this won't be too big a hit. We may need to look for a
better way to do this if performance data show this has turned
into a significant bottleneck.
"""
sdict = {}
for bld in self.get_src_builders(env):
for suf in bld.src_suffixes(env):
sdict[suf] = bld
return sdict
|
[
"def",
"_get_sdict",
"(",
"self",
",",
"env",
")",
":",
"sdict",
"=",
"{",
"}",
"for",
"bld",
"in",
"self",
".",
"get_src_builders",
"(",
"env",
")",
":",
"for",
"suf",
"in",
"bld",
".",
"src_suffixes",
"(",
"env",
")",
":",
"sdict",
"[",
"suf",
"]",
"=",
"bld",
"return",
"sdict"
] |
Returns a dictionary mapping all of the source suffixes of all
src_builders of this Builder to the underlying Builder that
should be called first.
This dictionary is used for each target specified, so we save a
lot of extra computation by memoizing it for each construction
environment.
Note that this is re-computed each time, not cached, because there
might be changes to one of our source Builders (or one of their
source Builders, and so on, and so on...) that we can't "see."
The underlying methods we call cache their computed values,
though, so we hope repeatedly aggregating them into a dictionary
like this won't be too big a hit. We may need to look for a
better way to do this if performance data show this has turned
into a significant bottleneck.
|
[
"Returns",
"a",
"dictionary",
"mapping",
"all",
"of",
"the",
"source",
"suffixes",
"of",
"all",
"src_builders",
"of",
"this",
"Builder",
"to",
"the",
"underlying",
"Builder",
"that",
"should",
"be",
"called",
"first",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L703-L727
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
BuilderBase.get_src_builders
|
def get_src_builders(self, env):
"""
Returns the list of source Builders for this Builder.
This exists mainly to look up Builders referenced as
strings in the 'BUILDER' variable of the construction
environment and cache the result.
"""
memo_key = id(env)
try:
memo_dict = self._memo['get_src_builders']
except KeyError:
memo_dict = {}
self._memo['get_src_builders'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
builders = []
for bld in self.src_builder:
if SCons.Util.is_String(bld):
try:
bld = env['BUILDERS'][bld]
except KeyError:
continue
builders.append(bld)
memo_dict[memo_key] = builders
return builders
|
python
|
def get_src_builders(self, env):
"""
Returns the list of source Builders for this Builder.
This exists mainly to look up Builders referenced as
strings in the 'BUILDER' variable of the construction
environment and cache the result.
"""
memo_key = id(env)
try:
memo_dict = self._memo['get_src_builders']
except KeyError:
memo_dict = {}
self._memo['get_src_builders'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
builders = []
for bld in self.src_builder:
if SCons.Util.is_String(bld):
try:
bld = env['BUILDERS'][bld]
except KeyError:
continue
builders.append(bld)
memo_dict[memo_key] = builders
return builders
|
[
"def",
"get_src_builders",
"(",
"self",
",",
"env",
")",
":",
"memo_key",
"=",
"id",
"(",
"env",
")",
"try",
":",
"memo_dict",
"=",
"self",
".",
"_memo",
"[",
"'get_src_builders'",
"]",
"except",
"KeyError",
":",
"memo_dict",
"=",
"{",
"}",
"self",
".",
"_memo",
"[",
"'get_src_builders'",
"]",
"=",
"memo_dict",
"else",
":",
"try",
":",
"return",
"memo_dict",
"[",
"memo_key",
"]",
"except",
"KeyError",
":",
"pass",
"builders",
"=",
"[",
"]",
"for",
"bld",
"in",
"self",
".",
"src_builder",
":",
"if",
"SCons",
".",
"Util",
".",
"is_String",
"(",
"bld",
")",
":",
"try",
":",
"bld",
"=",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"bld",
"]",
"except",
"KeyError",
":",
"continue",
"builders",
".",
"append",
"(",
"bld",
")",
"memo_dict",
"[",
"memo_key",
"]",
"=",
"builders",
"return",
"builders"
] |
Returns the list of source Builders for this Builder.
This exists mainly to look up Builders referenced as
strings in the 'BUILDER' variable of the construction
environment and cache the result.
|
[
"Returns",
"the",
"list",
"of",
"source",
"Builders",
"for",
"this",
"Builder",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L776-L806
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
BuilderBase.subst_src_suffixes
|
def subst_src_suffixes(self, env):
"""
The suffix list may contain construction variable expansions,
so we have to evaluate the individual strings. To avoid doing
this over and over, we memoize the results for each construction
environment.
"""
memo_key = id(env)
try:
memo_dict = self._memo['subst_src_suffixes']
except KeyError:
memo_dict = {}
self._memo['subst_src_suffixes'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
suffixes = [env.subst(x) for x in self.src_suffix]
memo_dict[memo_key] = suffixes
return suffixes
|
python
|
def subst_src_suffixes(self, env):
"""
The suffix list may contain construction variable expansions,
so we have to evaluate the individual strings. To avoid doing
this over and over, we memoize the results for each construction
environment.
"""
memo_key = id(env)
try:
memo_dict = self._memo['subst_src_suffixes']
except KeyError:
memo_dict = {}
self._memo['subst_src_suffixes'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
suffixes = [env.subst(x) for x in self.src_suffix]
memo_dict[memo_key] = suffixes
return suffixes
|
[
"def",
"subst_src_suffixes",
"(",
"self",
",",
"env",
")",
":",
"memo_key",
"=",
"id",
"(",
"env",
")",
"try",
":",
"memo_dict",
"=",
"self",
".",
"_memo",
"[",
"'subst_src_suffixes'",
"]",
"except",
"KeyError",
":",
"memo_dict",
"=",
"{",
"}",
"self",
".",
"_memo",
"[",
"'subst_src_suffixes'",
"]",
"=",
"memo_dict",
"else",
":",
"try",
":",
"return",
"memo_dict",
"[",
"memo_key",
"]",
"except",
"KeyError",
":",
"pass",
"suffixes",
"=",
"[",
"env",
".",
"subst",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"src_suffix",
"]",
"memo_dict",
"[",
"memo_key",
"]",
"=",
"suffixes",
"return",
"suffixes"
] |
The suffix list may contain construction variable expansions,
so we have to evaluate the individual strings. To avoid doing
this over and over, we memoize the results for each construction
environment.
|
[
"The",
"suffix",
"list",
"may",
"contain",
"construction",
"variable",
"expansions",
"so",
"we",
"have",
"to",
"evaluate",
"the",
"individual",
"strings",
".",
"To",
"avoid",
"doing",
"this",
"over",
"and",
"over",
"we",
"memoize",
"the",
"results",
"for",
"each",
"construction",
"environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L812-L832
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py
|
BuilderBase.src_suffixes
|
def src_suffixes(self, env):
"""
Returns the list of source suffixes for all src_builders of this
Builder.
This is essentially a recursive descent of the src_builder "tree."
(This value isn't cached because there may be changes in a
src_builder many levels deep that we can't see.)
"""
sdict = {}
suffixes = self.subst_src_suffixes(env)
for s in suffixes:
sdict[s] = 1
for builder in self.get_src_builders(env):
for s in builder.src_suffixes(env):
if s not in sdict:
sdict[s] = 1
suffixes.append(s)
return suffixes
|
python
|
def src_suffixes(self, env):
"""
Returns the list of source suffixes for all src_builders of this
Builder.
This is essentially a recursive descent of the src_builder "tree."
(This value isn't cached because there may be changes in a
src_builder many levels deep that we can't see.)
"""
sdict = {}
suffixes = self.subst_src_suffixes(env)
for s in suffixes:
sdict[s] = 1
for builder in self.get_src_builders(env):
for s in builder.src_suffixes(env):
if s not in sdict:
sdict[s] = 1
suffixes.append(s)
return suffixes
|
[
"def",
"src_suffixes",
"(",
"self",
",",
"env",
")",
":",
"sdict",
"=",
"{",
"}",
"suffixes",
"=",
"self",
".",
"subst_src_suffixes",
"(",
"env",
")",
"for",
"s",
"in",
"suffixes",
":",
"sdict",
"[",
"s",
"]",
"=",
"1",
"for",
"builder",
"in",
"self",
".",
"get_src_builders",
"(",
"env",
")",
":",
"for",
"s",
"in",
"builder",
".",
"src_suffixes",
"(",
"env",
")",
":",
"if",
"s",
"not",
"in",
"sdict",
":",
"sdict",
"[",
"s",
"]",
"=",
"1",
"suffixes",
".",
"append",
"(",
"s",
")",
"return",
"suffixes"
] |
Returns the list of source suffixes for all src_builders of this
Builder.
This is essentially a recursive descent of the src_builder "tree."
(This value isn't cached because there may be changes in a
src_builder many levels deep that we can't see.)
|
[
"Returns",
"the",
"list",
"of",
"source",
"suffixes",
"for",
"all",
"src_builders",
"of",
"this",
"Builder",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Builder.py#L834-L852
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/aixlink.py
|
generate
|
def generate(env):
"""
Add Builders and construction variables for Visual Age linker to
an Environment.
"""
link.generate(env)
env['SMARTLINKFLAGS'] = smart_linkflags
env['LINKFLAGS'] = SCons.Util.CLVar('$SMARTLINKFLAGS')
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -qmkshrobj -qsuppress=1501-218')
env['SHLIBSUFFIX'] = '.a'
|
python
|
def generate(env):
"""
Add Builders and construction variables for Visual Age linker to
an Environment.
"""
link.generate(env)
env['SMARTLINKFLAGS'] = smart_linkflags
env['LINKFLAGS'] = SCons.Util.CLVar('$SMARTLINKFLAGS')
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -qmkshrobj -qsuppress=1501-218')
env['SHLIBSUFFIX'] = '.a'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"link",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'SMARTLINKFLAGS'",
"]",
"=",
"smart_linkflags",
"env",
"[",
"'LINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$SMARTLINKFLAGS'",
")",
"env",
"[",
"'SHLINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$LINKFLAGS -qmkshrobj -qsuppress=1501-218'",
")",
"env",
"[",
"'SHLIBSUFFIX'",
"]",
"=",
"'.a'"
] |
Add Builders and construction variables for Visual Age linker to
an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"Visual",
"Age",
"linker",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/aixlink.py#L55-L65
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/update/records/reflash_tile.py
|
_parse_target
|
def _parse_target(target):
"""Parse a binary targeting information structure.
This function only supports extracting the slot number or controller from
the target and will raise an ArgumentError if more complicated targeting
is desired.
Args:
target (bytes): The binary targeting data blob.
Returns:
dict: The parsed targeting data
"""
if len(target) != 8:
raise ArgumentError("Invalid targeting data length", expected=8, length=len(target))
slot, match_op = struct.unpack("<B6xB", target)
if match_op == _MATCH_CONTROLLER:
return {'controller': True, 'slot': 0}
elif match_op == _MATCH_SLOT:
return {'controller': False, 'slot': slot}
raise ArgumentError("Unsupported complex targeting specified", match_op=match_op)
|
python
|
def _parse_target(target):
"""Parse a binary targeting information structure.
This function only supports extracting the slot number or controller from
the target and will raise an ArgumentError if more complicated targeting
is desired.
Args:
target (bytes): The binary targeting data blob.
Returns:
dict: The parsed targeting data
"""
if len(target) != 8:
raise ArgumentError("Invalid targeting data length", expected=8, length=len(target))
slot, match_op = struct.unpack("<B6xB", target)
if match_op == _MATCH_CONTROLLER:
return {'controller': True, 'slot': 0}
elif match_op == _MATCH_SLOT:
return {'controller': False, 'slot': slot}
raise ArgumentError("Unsupported complex targeting specified", match_op=match_op)
|
[
"def",
"_parse_target",
"(",
"target",
")",
":",
"if",
"len",
"(",
"target",
")",
"!=",
"8",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid targeting data length\"",
",",
"expected",
"=",
"8",
",",
"length",
"=",
"len",
"(",
"target",
")",
")",
"slot",
",",
"match_op",
"=",
"struct",
".",
"unpack",
"(",
"\"<B6xB\"",
",",
"target",
")",
"if",
"match_op",
"==",
"_MATCH_CONTROLLER",
":",
"return",
"{",
"'controller'",
":",
"True",
",",
"'slot'",
":",
"0",
"}",
"elif",
"match_op",
"==",
"_MATCH_SLOT",
":",
"return",
"{",
"'controller'",
":",
"False",
",",
"'slot'",
":",
"slot",
"}",
"raise",
"ArgumentError",
"(",
"\"Unsupported complex targeting specified\"",
",",
"match_op",
"=",
"match_op",
")"
] |
Parse a binary targeting information structure.
This function only supports extracting the slot number or controller from
the target and will raise an ArgumentError if more complicated targeting
is desired.
Args:
target (bytes): The binary targeting data blob.
Returns:
dict: The parsed targeting data
|
[
"Parse",
"a",
"binary",
"targeting",
"information",
"structure",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/update/records/reflash_tile.py#L165-L188
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/internal/rpc_queue.py
|
RPCQueue.put_task
|
def put_task(self, func, args, response):
"""Place a task onto the RPC queue.
This temporary functionality will go away but it lets you run a
task synchronously with RPC dispatch by placing it onto the
RCP queue.
Args:
func (callable): The function to execute
args (iterable): The function arguments
response (GenericResponse): The response object to signal the
result on.
"""
self._rpc_queue.put_nowait((func, args, response))
|
python
|
def put_task(self, func, args, response):
"""Place a task onto the RPC queue.
This temporary functionality will go away but it lets you run a
task synchronously with RPC dispatch by placing it onto the
RCP queue.
Args:
func (callable): The function to execute
args (iterable): The function arguments
response (GenericResponse): The response object to signal the
result on.
"""
self._rpc_queue.put_nowait((func, args, response))
|
[
"def",
"put_task",
"(",
"self",
",",
"func",
",",
"args",
",",
"response",
")",
":",
"self",
".",
"_rpc_queue",
".",
"put_nowait",
"(",
"(",
"func",
",",
"args",
",",
"response",
")",
")"
] |
Place a task onto the RPC queue.
This temporary functionality will go away but it lets you run a
task synchronously with RPC dispatch by placing it onto the
RCP queue.
Args:
func (callable): The function to execute
args (iterable): The function arguments
response (GenericResponse): The response object to signal the
result on.
|
[
"Place",
"a",
"task",
"onto",
"the",
"RPC",
"queue",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/internal/rpc_queue.py#L44-L58
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/internal/rpc_queue.py
|
RPCQueue.put_rpc
|
def put_rpc(self, address, rpc_id, arg_payload, response):
"""Place an RPC onto the RPC queue.
The rpc will be dispatched asynchronously by the background dispatch
task. This method must be called from the event loop. This method
does not block.
Args:
address (int): The address of the tile with the RPC
rpc_id (int): The id of the rpc you want to call
arg_payload (bytes): The RPC payload
respones (GenericResponse): The object to use to signal the result.
"""
self._rpc_queue.put_nowait((address, rpc_id, arg_payload, response))
|
python
|
def put_rpc(self, address, rpc_id, arg_payload, response):
"""Place an RPC onto the RPC queue.
The rpc will be dispatched asynchronously by the background dispatch
task. This method must be called from the event loop. This method
does not block.
Args:
address (int): The address of the tile with the RPC
rpc_id (int): The id of the rpc you want to call
arg_payload (bytes): The RPC payload
respones (GenericResponse): The object to use to signal the result.
"""
self._rpc_queue.put_nowait((address, rpc_id, arg_payload, response))
|
[
"def",
"put_rpc",
"(",
"self",
",",
"address",
",",
"rpc_id",
",",
"arg_payload",
",",
"response",
")",
":",
"self",
".",
"_rpc_queue",
".",
"put_nowait",
"(",
"(",
"address",
",",
"rpc_id",
",",
"arg_payload",
",",
"response",
")",
")"
] |
Place an RPC onto the RPC queue.
The rpc will be dispatched asynchronously by the background dispatch
task. This method must be called from the event loop. This method
does not block.
Args:
address (int): The address of the tile with the RPC
rpc_id (int): The id of the rpc you want to call
arg_payload (bytes): The RPC payload
respones (GenericResponse): The object to use to signal the result.
|
[
"Place",
"an",
"RPC",
"onto",
"the",
"RPC",
"queue",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/internal/rpc_queue.py#L60-L74
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/internal/rpc_queue.py
|
RPCQueue.stop
|
async def stop(self):
"""Stop the rpc queue from inside the event loop."""
if self._rpc_task is not None:
self._rpc_task.cancel()
try:
await self._rpc_task
except asyncio.CancelledError:
pass
self._rpc_task = None
|
python
|
async def stop(self):
"""Stop the rpc queue from inside the event loop."""
if self._rpc_task is not None:
self._rpc_task.cancel()
try:
await self._rpc_task
except asyncio.CancelledError:
pass
self._rpc_task = None
|
[
"async",
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"_rpc_task",
"is",
"not",
"None",
":",
"self",
".",
"_rpc_task",
".",
"cancel",
"(",
")",
"try",
":",
"await",
"self",
".",
"_rpc_task",
"except",
"asyncio",
".",
"CancelledError",
":",
"pass",
"self",
".",
"_rpc_task",
"=",
"None"
] |
Stop the rpc queue from inside the event loop.
|
[
"Stop",
"the",
"rpc",
"queue",
"from",
"inside",
"the",
"event",
"loop",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/internal/rpc_queue.py#L152-L163
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/debug/sparse_memory.py
|
SparseMemory.add_segment
|
def add_segment(self, address, data, overwrite=False):
"""Add a contiguous segment of data to this memory map
If the segment overlaps with a segment already added , an
ArgumentError is raised unless the overwrite flag is True.
Params:
address (int): The starting address for this segment
data (bytearray): The data to add
overwrite (bool): Overwrite data if this segment overlaps
with one previously added.
"""
seg_type = self._classify_segment(address, len(data))
if not isinstance(seg_type, DisjointSegment):
raise ArgumentError("Unsupported segment type")
segment = MemorySegment(address, address+len(data)-1, len(data), bytearray(data))
self._segments.append(segment)
|
python
|
def add_segment(self, address, data, overwrite=False):
"""Add a contiguous segment of data to this memory map
If the segment overlaps with a segment already added , an
ArgumentError is raised unless the overwrite flag is True.
Params:
address (int): The starting address for this segment
data (bytearray): The data to add
overwrite (bool): Overwrite data if this segment overlaps
with one previously added.
"""
seg_type = self._classify_segment(address, len(data))
if not isinstance(seg_type, DisjointSegment):
raise ArgumentError("Unsupported segment type")
segment = MemorySegment(address, address+len(data)-1, len(data), bytearray(data))
self._segments.append(segment)
|
[
"def",
"add_segment",
"(",
"self",
",",
"address",
",",
"data",
",",
"overwrite",
"=",
"False",
")",
":",
"seg_type",
"=",
"self",
".",
"_classify_segment",
"(",
"address",
",",
"len",
"(",
"data",
")",
")",
"if",
"not",
"isinstance",
"(",
"seg_type",
",",
"DisjointSegment",
")",
":",
"raise",
"ArgumentError",
"(",
"\"Unsupported segment type\"",
")",
"segment",
"=",
"MemorySegment",
"(",
"address",
",",
"address",
"+",
"len",
"(",
"data",
")",
"-",
"1",
",",
"len",
"(",
"data",
")",
",",
"bytearray",
"(",
"data",
")",
")",
"self",
".",
"_segments",
".",
"append",
"(",
"segment",
")"
] |
Add a contiguous segment of data to this memory map
If the segment overlaps with a segment already added , an
ArgumentError is raised unless the overwrite flag is True.
Params:
address (int): The starting address for this segment
data (bytearray): The data to add
overwrite (bool): Overwrite data if this segment overlaps
with one previously added.
|
[
"Add",
"a",
"contiguous",
"segment",
"of",
"data",
"to",
"this",
"memory",
"map"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/debug/sparse_memory.py#L24-L42
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/debug/sparse_memory.py
|
SparseMemory._create_slice
|
def _create_slice(self, key):
"""Create a slice in a memory segment corresponding to a key."""
if isinstance(key, slice):
step = key.step
if step is None:
step = 1
if step != 1:
raise ArgumentError("You cannot slice with a step that is not equal to 1", step=key.step)
start_address = key.start
end_address = key.stop - 1
start_i, start_seg = self._find_address(start_address)
end_i, _end_seg = self._find_address(end_address)
if start_seg is None or start_i != end_i:
raise ArgumentError("Slice would span invalid data in memory",
start_address=start_address, end_address=end_address)
block_offset = start_address - start_seg.start_address
block_length = end_address - start_address + 1
return start_seg, block_offset, block_offset + block_length
elif isinstance(key, int):
start_i, start_seg = self._find_address(key)
if start_seg is None:
raise ArgumentError("Requested invalid address", address=key)
return start_seg, key - start_seg.start_address, None
else:
raise ArgumentError("Unknown type of address key", address=key)
|
python
|
def _create_slice(self, key):
"""Create a slice in a memory segment corresponding to a key."""
if isinstance(key, slice):
step = key.step
if step is None:
step = 1
if step != 1:
raise ArgumentError("You cannot slice with a step that is not equal to 1", step=key.step)
start_address = key.start
end_address = key.stop - 1
start_i, start_seg = self._find_address(start_address)
end_i, _end_seg = self._find_address(end_address)
if start_seg is None or start_i != end_i:
raise ArgumentError("Slice would span invalid data in memory",
start_address=start_address, end_address=end_address)
block_offset = start_address - start_seg.start_address
block_length = end_address - start_address + 1
return start_seg, block_offset, block_offset + block_length
elif isinstance(key, int):
start_i, start_seg = self._find_address(key)
if start_seg is None:
raise ArgumentError("Requested invalid address", address=key)
return start_seg, key - start_seg.start_address, None
else:
raise ArgumentError("Unknown type of address key", address=key)
|
[
"def",
"_create_slice",
"(",
"self",
",",
"key",
")",
":",
"if",
"isinstance",
"(",
"key",
",",
"slice",
")",
":",
"step",
"=",
"key",
".",
"step",
"if",
"step",
"is",
"None",
":",
"step",
"=",
"1",
"if",
"step",
"!=",
"1",
":",
"raise",
"ArgumentError",
"(",
"\"You cannot slice with a step that is not equal to 1\"",
",",
"step",
"=",
"key",
".",
"step",
")",
"start_address",
"=",
"key",
".",
"start",
"end_address",
"=",
"key",
".",
"stop",
"-",
"1",
"start_i",
",",
"start_seg",
"=",
"self",
".",
"_find_address",
"(",
"start_address",
")",
"end_i",
",",
"_end_seg",
"=",
"self",
".",
"_find_address",
"(",
"end_address",
")",
"if",
"start_seg",
"is",
"None",
"or",
"start_i",
"!=",
"end_i",
":",
"raise",
"ArgumentError",
"(",
"\"Slice would span invalid data in memory\"",
",",
"start_address",
"=",
"start_address",
",",
"end_address",
"=",
"end_address",
")",
"block_offset",
"=",
"start_address",
"-",
"start_seg",
".",
"start_address",
"block_length",
"=",
"end_address",
"-",
"start_address",
"+",
"1",
"return",
"start_seg",
",",
"block_offset",
",",
"block_offset",
"+",
"block_length",
"elif",
"isinstance",
"(",
"key",
",",
"int",
")",
":",
"start_i",
",",
"start_seg",
"=",
"self",
".",
"_find_address",
"(",
"key",
")",
"if",
"start_seg",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Requested invalid address\"",
",",
"address",
"=",
"key",
")",
"return",
"start_seg",
",",
"key",
"-",
"start_seg",
".",
"start_address",
",",
"None",
"else",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown type of address key\"",
",",
"address",
"=",
"key",
")"
] |
Create a slice in a memory segment corresponding to a key.
|
[
"Create",
"a",
"slice",
"in",
"a",
"memory",
"segment",
"corresponding",
"to",
"a",
"key",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/debug/sparse_memory.py#L44-L76
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/debug/sparse_memory.py
|
SparseMemory._classify_segment
|
def _classify_segment(self, address, length):
"""Determine how a new data segment fits into our existing world
Params:
address (int): The address we wish to classify
length (int): The length of the segment
Returns:
int: One of SparseMemoryMap.prepended
"""
end_address = address + length - 1
_, start_seg = self._find_address(address)
_, end_seg = self._find_address(end_address)
if start_seg is not None or end_seg is not None:
raise ArgumentError("Overlapping segments are not yet supported", address=address, length=length)
return DisjointSegment()
|
python
|
def _classify_segment(self, address, length):
"""Determine how a new data segment fits into our existing world
Params:
address (int): The address we wish to classify
length (int): The length of the segment
Returns:
int: One of SparseMemoryMap.prepended
"""
end_address = address + length - 1
_, start_seg = self._find_address(address)
_, end_seg = self._find_address(end_address)
if start_seg is not None or end_seg is not None:
raise ArgumentError("Overlapping segments are not yet supported", address=address, length=length)
return DisjointSegment()
|
[
"def",
"_classify_segment",
"(",
"self",
",",
"address",
",",
"length",
")",
":",
"end_address",
"=",
"address",
"+",
"length",
"-",
"1",
"_",
",",
"start_seg",
"=",
"self",
".",
"_find_address",
"(",
"address",
")",
"_",
",",
"end_seg",
"=",
"self",
".",
"_find_address",
"(",
"end_address",
")",
"if",
"start_seg",
"is",
"not",
"None",
"or",
"end_seg",
"is",
"not",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Overlapping segments are not yet supported\"",
",",
"address",
"=",
"address",
",",
"length",
"=",
"length",
")",
"return",
"DisjointSegment",
"(",
")"
] |
Determine how a new data segment fits into our existing world
Params:
address (int): The address we wish to classify
length (int): The length of the segment
Returns:
int: One of SparseMemoryMap.prepended
|
[
"Determine",
"how",
"a",
"new",
"data",
"segment",
"fits",
"into",
"our",
"existing",
"world"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/debug/sparse_memory.py#L105-L124
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/ifort.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for ifort to an Environment."""
# ifort supports Fortran 90 and Fortran 95
# Additionally, ifort recognizes more file extensions.
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if 'FORTRANFILESUFFIXES' not in env:
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if 'F90FILESUFFIXES' not in env:
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
fc = 'ifort'
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
env['%s' % dialect] = fc
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] == 'posix':
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
if env['PLATFORM'] == 'win32':
# On Windows, the ifort compiler specifies the object on the
# command line with -object:, not -o. Massage the necessary
# command-line construction variables.
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
for var in ['%sCOM' % dialect, '%sPPCOM' % dialect,
'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]:
env[var] = env[var].replace('-o $TARGET', '-object:$TARGET')
env['FORTRANMODDIRPREFIX'] = "/module:"
else:
env['FORTRANMODDIRPREFIX'] = "-module "
|
python
|
def generate(env):
"""Add Builders and construction variables for ifort to an Environment."""
# ifort supports Fortran 90 and Fortran 95
# Additionally, ifort recognizes more file extensions.
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if 'FORTRANFILESUFFIXES' not in env:
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if 'F90FILESUFFIXES' not in env:
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
fc = 'ifort'
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
env['%s' % dialect] = fc
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] == 'posix':
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
if env['PLATFORM'] == 'win32':
# On Windows, the ifort compiler specifies the object on the
# command line with -object:, not -o. Massage the necessary
# command-line construction variables.
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
for var in ['%sCOM' % dialect, '%sPPCOM' % dialect,
'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]:
env[var] = env[var].replace('-o $TARGET', '-object:$TARGET')
env['FORTRANMODDIRPREFIX'] = "/module:"
else:
env['FORTRANMODDIRPREFIX'] = "-module "
|
[
"def",
"generate",
"(",
"env",
")",
":",
"# ifort supports Fortran 90 and Fortran 95",
"# Additionally, ifort recognizes more file extensions.",
"fscan",
"=",
"FortranScan",
"(",
"\"FORTRANPATH\"",
")",
"SCons",
".",
"Tool",
".",
"SourceFileScanner",
".",
"add_scanner",
"(",
"'.i'",
",",
"fscan",
")",
"SCons",
".",
"Tool",
".",
"SourceFileScanner",
".",
"add_scanner",
"(",
"'.i90'",
",",
"fscan",
")",
"if",
"'FORTRANFILESUFFIXES'",
"not",
"in",
"env",
":",
"env",
"[",
"'FORTRANFILESUFFIXES'",
"]",
"=",
"[",
"'.i'",
"]",
"else",
":",
"env",
"[",
"'FORTRANFILESUFFIXES'",
"]",
".",
"append",
"(",
"'.i'",
")",
"if",
"'F90FILESUFFIXES'",
"not",
"in",
"env",
":",
"env",
"[",
"'F90FILESUFFIXES'",
"]",
"=",
"[",
"'.i90'",
"]",
"else",
":",
"env",
"[",
"'F90FILESUFFIXES'",
"]",
".",
"append",
"(",
"'.i90'",
")",
"add_all_to_env",
"(",
"env",
")",
"fc",
"=",
"'ifort'",
"for",
"dialect",
"in",
"[",
"'F77'",
",",
"'F90'",
",",
"'FORTRAN'",
",",
"'F95'",
"]",
":",
"env",
"[",
"'%s'",
"%",
"dialect",
"]",
"=",
"fc",
"env",
"[",
"'SH%s'",
"%",
"dialect",
"]",
"=",
"'$%s'",
"%",
"dialect",
"if",
"env",
"[",
"'PLATFORM'",
"]",
"==",
"'posix'",
":",
"env",
"[",
"'SH%sFLAGS'",
"%",
"dialect",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$%sFLAGS -fPIC'",
"%",
"dialect",
")",
"if",
"env",
"[",
"'PLATFORM'",
"]",
"==",
"'win32'",
":",
"# On Windows, the ifort compiler specifies the object on the",
"# command line with -object:, not -o. Massage the necessary",
"# command-line construction variables.",
"for",
"dialect",
"in",
"[",
"'F77'",
",",
"'F90'",
",",
"'FORTRAN'",
",",
"'F95'",
"]",
":",
"for",
"var",
"in",
"[",
"'%sCOM'",
"%",
"dialect",
",",
"'%sPPCOM'",
"%",
"dialect",
",",
"'SH%sCOM'",
"%",
"dialect",
",",
"'SH%sPPCOM'",
"%",
"dialect",
"]",
":",
"env",
"[",
"var",
"]",
"=",
"env",
"[",
"var",
"]",
".",
"replace",
"(",
"'-o $TARGET'",
",",
"'-object:$TARGET'",
")",
"env",
"[",
"'FORTRANMODDIRPREFIX'",
"]",
"=",
"\"/module:\"",
"else",
":",
"env",
"[",
"'FORTRANMODDIRPREFIX'",
"]",
"=",
"\"-module \""
] |
Add Builders and construction variables for ifort to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"ifort",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/ifort.py#L41-L79
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Job.py
|
Jobs.run
|
def run(self, postfunc=lambda: None):
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
|
python
|
def run(self, postfunc=lambda: None):
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
|
[
"def",
"run",
"(",
"self",
",",
"postfunc",
"=",
"lambda",
":",
"None",
")",
":",
"self",
".",
"_setup_sig_handler",
"(",
")",
"try",
":",
"self",
".",
"job",
".",
"start",
"(",
")",
"finally",
":",
"postfunc",
"(",
")",
"self",
".",
"_reset_sig_handler",
"(",
")"
] |
Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion.
|
[
"Run",
"the",
"jobs",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Job.py#L100-L114
|
train
|
iotile/coretools
|
transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py
|
ConnectionAction.expired
|
def expired(self):
"""Boolean property if this action has expired
"""
if self.timeout is None:
return False
return monotonic() - self.start_time > self.timeout
|
python
|
def expired(self):
"""Boolean property if this action has expired
"""
if self.timeout is None:
return False
return monotonic() - self.start_time > self.timeout
|
[
"def",
"expired",
"(",
"self",
")",
":",
"if",
"self",
".",
"timeout",
"is",
"None",
":",
"return",
"False",
"return",
"monotonic",
"(",
")",
"-",
"self",
".",
"start_time",
">",
"self",
".",
"timeout"
] |
Boolean property if this action has expired
|
[
"Boolean",
"property",
"if",
"this",
"action",
"has",
"expired"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L37-L43
|
train
|
iotile/coretools
|
transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py
|
ConnectionManager.begin_connection
|
def begin_connection(self, connection_id, internal_id, callback, context, timeout):
"""Asynchronously begin a connection attempt
Args:
connection_id (int): The external connection id
internal_id (string): An internal identifier for the connection
callback (callable): The function to be called when the connection
attempt finishes
context (dict): Additional information to associate with this context
timeout (float): How long to allow this connection attempt to proceed
without timing it out
"""
data = {
'callback': callback,
'connection_id': connection_id,
'internal_id': internal_id,
'context': context
}
action = ConnectionAction('begin_connection', data, timeout=timeout, sync=False)
self._actions.put(action)
|
python
|
def begin_connection(self, connection_id, internal_id, callback, context, timeout):
"""Asynchronously begin a connection attempt
Args:
connection_id (int): The external connection id
internal_id (string): An internal identifier for the connection
callback (callable): The function to be called when the connection
attempt finishes
context (dict): Additional information to associate with this context
timeout (float): How long to allow this connection attempt to proceed
without timing it out
"""
data = {
'callback': callback,
'connection_id': connection_id,
'internal_id': internal_id,
'context': context
}
action = ConnectionAction('begin_connection', data, timeout=timeout, sync=False)
self._actions.put(action)
|
[
"def",
"begin_connection",
"(",
"self",
",",
"connection_id",
",",
"internal_id",
",",
"callback",
",",
"context",
",",
"timeout",
")",
":",
"data",
"=",
"{",
"'callback'",
":",
"callback",
",",
"'connection_id'",
":",
"connection_id",
",",
"'internal_id'",
":",
"internal_id",
",",
"'context'",
":",
"context",
"}",
"action",
"=",
"ConnectionAction",
"(",
"'begin_connection'",
",",
"data",
",",
"timeout",
"=",
"timeout",
",",
"sync",
"=",
"False",
")",
"self",
".",
"_actions",
".",
"put",
"(",
"action",
")"
] |
Asynchronously begin a connection attempt
Args:
connection_id (int): The external connection id
internal_id (string): An internal identifier for the connection
callback (callable): The function to be called when the connection
attempt finishes
context (dict): Additional information to associate with this context
timeout (float): How long to allow this connection attempt to proceed
without timing it out
|
[
"Asynchronously",
"begin",
"a",
"connection",
"attempt"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L337-L358
|
train
|
iotile/coretools
|
transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py
|
ConnectionManager.begin_operation
|
def begin_operation(self, conn_or_internal_id, op_name, callback, timeout):
"""Begin an operation on a connection
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
op_name (string): The name of the operation that we are starting (stored in
the connection's microstate)
callback (callable): Callback to call when this disconnection attempt either
succeeds or fails
timeout (float): How long to allow this connection attempt to proceed
without timing it out (in seconds)
"""
data = {
'id': conn_or_internal_id,
'callback': callback,
'operation_name': op_name
}
action = ConnectionAction('begin_operation', data, timeout=timeout, sync=False)
self._actions.put(action)
|
python
|
def begin_operation(self, conn_or_internal_id, op_name, callback, timeout):
"""Begin an operation on a connection
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
op_name (string): The name of the operation that we are starting (stored in
the connection's microstate)
callback (callable): Callback to call when this disconnection attempt either
succeeds or fails
timeout (float): How long to allow this connection attempt to proceed
without timing it out (in seconds)
"""
data = {
'id': conn_or_internal_id,
'callback': callback,
'operation_name': op_name
}
action = ConnectionAction('begin_operation', data, timeout=timeout, sync=False)
self._actions.put(action)
|
[
"def",
"begin_operation",
"(",
"self",
",",
"conn_or_internal_id",
",",
"op_name",
",",
"callback",
",",
"timeout",
")",
":",
"data",
"=",
"{",
"'id'",
":",
"conn_or_internal_id",
",",
"'callback'",
":",
"callback",
",",
"'operation_name'",
":",
"op_name",
"}",
"action",
"=",
"ConnectionAction",
"(",
"'begin_operation'",
",",
"data",
",",
"timeout",
"=",
"timeout",
",",
"sync",
"=",
"False",
")",
"self",
".",
"_actions",
".",
"put",
"(",
"action",
")"
] |
Begin an operation on a connection
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
op_name (string): The name of the operation that we are starting (stored in
the connection's microstate)
callback (callable): Callback to call when this disconnection attempt either
succeeds or fails
timeout (float): How long to allow this connection attempt to proceed
without timing it out (in seconds)
|
[
"Begin",
"an",
"operation",
"on",
"a",
"connection"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L611-L632
|
train
|
iotile/coretools
|
transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py
|
ConnectionManager._begin_operation_action
|
def _begin_operation_action(self, action):
"""Begin an attempted operation.
Args:
action (ConnectionAction): the action object describing what we are
operating on
"""
conn_key = action.data['id']
callback = action.data['callback']
if self._get_connection_state(conn_key) != self.Idle:
callback(conn_key, self.id, False, 'Cannot start operation, connection is not idle')
return
data = self._get_connection(conn_key)
data['state'] = self.InProgress
data['microstate'] = action.data['operation_name']
data['action'] = action
|
python
|
def _begin_operation_action(self, action):
"""Begin an attempted operation.
Args:
action (ConnectionAction): the action object describing what we are
operating on
"""
conn_key = action.data['id']
callback = action.data['callback']
if self._get_connection_state(conn_key) != self.Idle:
callback(conn_key, self.id, False, 'Cannot start operation, connection is not idle')
return
data = self._get_connection(conn_key)
data['state'] = self.InProgress
data['microstate'] = action.data['operation_name']
data['action'] = action
|
[
"def",
"_begin_operation_action",
"(",
"self",
",",
"action",
")",
":",
"conn_key",
"=",
"action",
".",
"data",
"[",
"'id'",
"]",
"callback",
"=",
"action",
".",
"data",
"[",
"'callback'",
"]",
"if",
"self",
".",
"_get_connection_state",
"(",
"conn_key",
")",
"!=",
"self",
".",
"Idle",
":",
"callback",
"(",
"conn_key",
",",
"self",
".",
"id",
",",
"False",
",",
"'Cannot start operation, connection is not idle'",
")",
"return",
"data",
"=",
"self",
".",
"_get_connection",
"(",
"conn_key",
")",
"data",
"[",
"'state'",
"]",
"=",
"self",
".",
"InProgress",
"data",
"[",
"'microstate'",
"]",
"=",
"action",
".",
"data",
"[",
"'operation_name'",
"]",
"data",
"[",
"'action'",
"]",
"=",
"action"
] |
Begin an attempted operation.
Args:
action (ConnectionAction): the action object describing what we are
operating on
|
[
"Begin",
"an",
"attempted",
"operation",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L634-L652
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient.allow_exception
|
def allow_exception(self, exc_class):
"""Allow raising this class of exceptions from commands.
When a command fails on the server side due to an exception, by
default it is turned into a string and raised on the client side as an
ExternalError. The original class name is sent but ignored. If you
would like to instead raise an instance of the same exception on the
client side, you can pass the exception class object to this method
and instances of that exception will be reraised.
The caveat is that the exception must be creatable with a single
string parameter and it should have a ``msg`` property.
Args:
exc_class (class): A class object with the exception that
we should allow to pass from server to client.
"""
name = exc_class.__name__
self._allowed_exceptions[name] = exc_class
|
python
|
def allow_exception(self, exc_class):
"""Allow raising this class of exceptions from commands.
When a command fails on the server side due to an exception, by
default it is turned into a string and raised on the client side as an
ExternalError. The original class name is sent but ignored. If you
would like to instead raise an instance of the same exception on the
client side, you can pass the exception class object to this method
and instances of that exception will be reraised.
The caveat is that the exception must be creatable with a single
string parameter and it should have a ``msg`` property.
Args:
exc_class (class): A class object with the exception that
we should allow to pass from server to client.
"""
name = exc_class.__name__
self._allowed_exceptions[name] = exc_class
|
[
"def",
"allow_exception",
"(",
"self",
",",
"exc_class",
")",
":",
"name",
"=",
"exc_class",
".",
"__name__",
"self",
".",
"_allowed_exceptions",
"[",
"name",
"]",
"=",
"exc_class"
] |
Allow raising this class of exceptions from commands.
When a command fails on the server side due to an exception, by
default it is turned into a string and raised on the client side as an
ExternalError. The original class name is sent but ignored. If you
would like to instead raise an instance of the same exception on the
client side, you can pass the exception class object to this method
and instances of that exception will be reraised.
The caveat is that the exception must be creatable with a single
string parameter and it should have a ``msg`` property.
Args:
exc_class (class): A class object with the exception that
we should allow to pass from server to client.
|
[
"Allow",
"raising",
"this",
"class",
"of",
"exceptions",
"from",
"commands",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L53-L72
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient.start
|
async def start(self, name="websocket_client"):
"""Connect to the websocket server.
This method will spawn a background task in the designated event loop
that will run until stop() is called. You can control the name of the
background task for debugging purposes using the name parameter. The
name is not used in anyway except for debug logging statements.
Args:
name (str): Optional name for the background task.
"""
self._con = await websockets.connect(self.url)
self._connection_task = self._loop.add_task(self._manage_connection(), name=name)
|
python
|
async def start(self, name="websocket_client"):
"""Connect to the websocket server.
This method will spawn a background task in the designated event loop
that will run until stop() is called. You can control the name of the
background task for debugging purposes using the name parameter. The
name is not used in anyway except for debug logging statements.
Args:
name (str): Optional name for the background task.
"""
self._con = await websockets.connect(self.url)
self._connection_task = self._loop.add_task(self._manage_connection(), name=name)
|
[
"async",
"def",
"start",
"(",
"self",
",",
"name",
"=",
"\"websocket_client\"",
")",
":",
"self",
".",
"_con",
"=",
"await",
"websockets",
".",
"connect",
"(",
"self",
".",
"url",
")",
"self",
".",
"_connection_task",
"=",
"self",
".",
"_loop",
".",
"add_task",
"(",
"self",
".",
"_manage_connection",
"(",
")",
",",
"name",
"=",
"name",
")"
] |
Connect to the websocket server.
This method will spawn a background task in the designated event loop
that will run until stop() is called. You can control the name of the
background task for debugging purposes using the name parameter. The
name is not used in anyway except for debug logging statements.
Args:
name (str): Optional name for the background task.
|
[
"Connect",
"to",
"the",
"websocket",
"server",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L74-L87
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient.stop
|
async def stop(self):
"""Stop this websocket client and disconnect from the server.
This method is idempotent and may be called multiple times. If called
when there is no active connection, it will simply return.
"""
if self._connection_task is None:
return
try:
await self._connection_task.stop()
finally:
self._con = None
self._connection_task = None
self._manager.clear()
|
python
|
async def stop(self):
"""Stop this websocket client and disconnect from the server.
This method is idempotent and may be called multiple times. If called
when there is no active connection, it will simply return.
"""
if self._connection_task is None:
return
try:
await self._connection_task.stop()
finally:
self._con = None
self._connection_task = None
self._manager.clear()
|
[
"async",
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"_connection_task",
"is",
"None",
":",
"return",
"try",
":",
"await",
"self",
".",
"_connection_task",
".",
"stop",
"(",
")",
"finally",
":",
"self",
".",
"_con",
"=",
"None",
"self",
".",
"_connection_task",
"=",
"None",
"self",
".",
"_manager",
".",
"clear",
"(",
")"
] |
Stop this websocket client and disconnect from the server.
This method is idempotent and may be called multiple times. If called
when there is no active connection, it will simply return.
|
[
"Stop",
"this",
"websocket",
"client",
"and",
"disconnect",
"from",
"the",
"server",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L89-L104
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient.send_command
|
async def send_command(self, command, args, validator, timeout=10.0):
"""Send a command and synchronously wait for a single response.
Args:
command (string): The command name
args (dict): Optional arguments.
validator (Verifier): A SchemaVerifier to verify the response
payload.
timeout (float): The maximum time to wait for a response.
Defaults to 10 seconds.
Returns:
dict: The response payload
Raises:
ExternalError: If the server is not connected or the command
fails.
asyncio.TimeoutError: If the command times out.
ValidationError: If the response payload does not match the
given validator.
"""
if self._con is None:
raise ExternalError("No websock connection established")
cmd_uuid = str(uuid.uuid4())
msg = dict(type='command', operation=command, uuid=cmd_uuid,
payload=args)
packed = pack(msg)
# Note: register future before sending to avoid race conditions
response_future = self._manager.wait_for(type="response", uuid=cmd_uuid,
timeout=timeout)
await self._con.send(packed)
response = await response_future
if response.get('success') is False:
self._raise_error(command, response)
if validator is None:
return response.get('payload')
return validator.verify(response.get('payload'))
|
python
|
async def send_command(self, command, args, validator, timeout=10.0):
"""Send a command and synchronously wait for a single response.
Args:
command (string): The command name
args (dict): Optional arguments.
validator (Verifier): A SchemaVerifier to verify the response
payload.
timeout (float): The maximum time to wait for a response.
Defaults to 10 seconds.
Returns:
dict: The response payload
Raises:
ExternalError: If the server is not connected or the command
fails.
asyncio.TimeoutError: If the command times out.
ValidationError: If the response payload does not match the
given validator.
"""
if self._con is None:
raise ExternalError("No websock connection established")
cmd_uuid = str(uuid.uuid4())
msg = dict(type='command', operation=command, uuid=cmd_uuid,
payload=args)
packed = pack(msg)
# Note: register future before sending to avoid race conditions
response_future = self._manager.wait_for(type="response", uuid=cmd_uuid,
timeout=timeout)
await self._con.send(packed)
response = await response_future
if response.get('success') is False:
self._raise_error(command, response)
if validator is None:
return response.get('payload')
return validator.verify(response.get('payload'))
|
[
"async",
"def",
"send_command",
"(",
"self",
",",
"command",
",",
"args",
",",
"validator",
",",
"timeout",
"=",
"10.0",
")",
":",
"if",
"self",
".",
"_con",
"is",
"None",
":",
"raise",
"ExternalError",
"(",
"\"No websock connection established\"",
")",
"cmd_uuid",
"=",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"msg",
"=",
"dict",
"(",
"type",
"=",
"'command'",
",",
"operation",
"=",
"command",
",",
"uuid",
"=",
"cmd_uuid",
",",
"payload",
"=",
"args",
")",
"packed",
"=",
"pack",
"(",
"msg",
")",
"# Note: register future before sending to avoid race conditions",
"response_future",
"=",
"self",
".",
"_manager",
".",
"wait_for",
"(",
"type",
"=",
"\"response\"",
",",
"uuid",
"=",
"cmd_uuid",
",",
"timeout",
"=",
"timeout",
")",
"await",
"self",
".",
"_con",
".",
"send",
"(",
"packed",
")",
"response",
"=",
"await",
"response_future",
"if",
"response",
".",
"get",
"(",
"'success'",
")",
"is",
"False",
":",
"self",
".",
"_raise_error",
"(",
"command",
",",
"response",
")",
"if",
"validator",
"is",
"None",
":",
"return",
"response",
".",
"get",
"(",
"'payload'",
")",
"return",
"validator",
".",
"verify",
"(",
"response",
".",
"get",
"(",
"'payload'",
")",
")"
] |
Send a command and synchronously wait for a single response.
Args:
command (string): The command name
args (dict): Optional arguments.
validator (Verifier): A SchemaVerifier to verify the response
payload.
timeout (float): The maximum time to wait for a response.
Defaults to 10 seconds.
Returns:
dict: The response payload
Raises:
ExternalError: If the server is not connected or the command
fails.
asyncio.TimeoutError: If the command times out.
ValidationError: If the response payload does not match the
given validator.
|
[
"Send",
"a",
"command",
"and",
"synchronously",
"wait",
"for",
"a",
"single",
"response",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L106-L151
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient._manage_connection
|
async def _manage_connection(self):
"""Internal coroutine for managing the client connection."""
try:
while True:
message = await self._con.recv()
try:
unpacked = unpack(message)
except Exception: # pylint:disable=broad-except;This is a background worker
self._logger.exception("Corrupt message received")
continue
if not VALID_SERVER_MESSAGE.matches(unpacked):
self._logger.warning("Dropping invalid message from server: %s", unpacked)
continue
# Don't block until all callbacks have finished since once of
# those callbacks may call self.send_command, which would deadlock
# since it couldn't get the response until it had already finished.
if not await self._manager.process_message(unpacked, wait=False):
self._logger.warning("No handler found for received message, message=%s", unpacked)
except asyncio.CancelledError:
self._logger.info("Closing connection to server due to stop()")
finally:
await self._manager.process_message(dict(type='event', name=self.DISCONNECT_EVENT, payload=None))
await self._con.close()
|
python
|
async def _manage_connection(self):
"""Internal coroutine for managing the client connection."""
try:
while True:
message = await self._con.recv()
try:
unpacked = unpack(message)
except Exception: # pylint:disable=broad-except;This is a background worker
self._logger.exception("Corrupt message received")
continue
if not VALID_SERVER_MESSAGE.matches(unpacked):
self._logger.warning("Dropping invalid message from server: %s", unpacked)
continue
# Don't block until all callbacks have finished since once of
# those callbacks may call self.send_command, which would deadlock
# since it couldn't get the response until it had already finished.
if not await self._manager.process_message(unpacked, wait=False):
self._logger.warning("No handler found for received message, message=%s", unpacked)
except asyncio.CancelledError:
self._logger.info("Closing connection to server due to stop()")
finally:
await self._manager.process_message(dict(type='event', name=self.DISCONNECT_EVENT, payload=None))
await self._con.close()
|
[
"async",
"def",
"_manage_connection",
"(",
"self",
")",
":",
"try",
":",
"while",
"True",
":",
"message",
"=",
"await",
"self",
".",
"_con",
".",
"recv",
"(",
")",
"try",
":",
"unpacked",
"=",
"unpack",
"(",
"message",
")",
"except",
"Exception",
":",
"# pylint:disable=broad-except;This is a background worker",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Corrupt message received\"",
")",
"continue",
"if",
"not",
"VALID_SERVER_MESSAGE",
".",
"matches",
"(",
"unpacked",
")",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"Dropping invalid message from server: %s\"",
",",
"unpacked",
")",
"continue",
"# Don't block until all callbacks have finished since once of",
"# those callbacks may call self.send_command, which would deadlock",
"# since it couldn't get the response until it had already finished.",
"if",
"not",
"await",
"self",
".",
"_manager",
".",
"process_message",
"(",
"unpacked",
",",
"wait",
"=",
"False",
")",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"No handler found for received message, message=%s\"",
",",
"unpacked",
")",
"except",
"asyncio",
".",
"CancelledError",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Closing connection to server due to stop()\"",
")",
"finally",
":",
"await",
"self",
".",
"_manager",
".",
"process_message",
"(",
"dict",
"(",
"type",
"=",
"'event'",
",",
"name",
"=",
"self",
".",
"DISCONNECT_EVENT",
",",
"payload",
"=",
"None",
")",
")",
"await",
"self",
".",
"_con",
".",
"close",
"(",
")"
] |
Internal coroutine for managing the client connection.
|
[
"Internal",
"coroutine",
"for",
"managing",
"the",
"client",
"connection",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L164-L190
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient.register_event
|
def register_event(self, name, callback, validator):
"""Register a callback to receive events.
Every event with the matching name will have its payload validated
using validator and then will be passed to callback if validation
succeeds.
Callback must be a normal callback function, coroutines are not
allowed. If you need to run a coroutine you are free to schedule it
from your callback.
Args:
name (str): The name of the event that we are listening
for
callback (callable): The function that should be called
when a message that matches validator is received.
validator (Verifier): A schema verifier that will
validate a received message uniquely
"""
async def _validate_and_call(message):
payload = message.get('payload')
try:
payload = validator.verify(payload)
except ValidationError:
self._logger.warning("Dropping invalid payload for event %s, payload=%s",
name, payload)
return
try:
result = callback(payload)
if inspect.isawaitable(result):
await result
except: # pylint:disable=bare-except;This is a background logging routine
self._logger.error("Error calling callback for event %s, payload=%s",
name, payload, exc_info=True)
self._manager.every_match(_validate_and_call, type="event", name=name)
|
python
|
def register_event(self, name, callback, validator):
"""Register a callback to receive events.
Every event with the matching name will have its payload validated
using validator and then will be passed to callback if validation
succeeds.
Callback must be a normal callback function, coroutines are not
allowed. If you need to run a coroutine you are free to schedule it
from your callback.
Args:
name (str): The name of the event that we are listening
for
callback (callable): The function that should be called
when a message that matches validator is received.
validator (Verifier): A schema verifier that will
validate a received message uniquely
"""
async def _validate_and_call(message):
payload = message.get('payload')
try:
payload = validator.verify(payload)
except ValidationError:
self._logger.warning("Dropping invalid payload for event %s, payload=%s",
name, payload)
return
try:
result = callback(payload)
if inspect.isawaitable(result):
await result
except: # pylint:disable=bare-except;This is a background logging routine
self._logger.error("Error calling callback for event %s, payload=%s",
name, payload, exc_info=True)
self._manager.every_match(_validate_and_call, type="event", name=name)
|
[
"def",
"register_event",
"(",
"self",
",",
"name",
",",
"callback",
",",
"validator",
")",
":",
"async",
"def",
"_validate_and_call",
"(",
"message",
")",
":",
"payload",
"=",
"message",
".",
"get",
"(",
"'payload'",
")",
"try",
":",
"payload",
"=",
"validator",
".",
"verify",
"(",
"payload",
")",
"except",
"ValidationError",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"Dropping invalid payload for event %s, payload=%s\"",
",",
"name",
",",
"payload",
")",
"return",
"try",
":",
"result",
"=",
"callback",
"(",
"payload",
")",
"if",
"inspect",
".",
"isawaitable",
"(",
"result",
")",
":",
"await",
"result",
"except",
":",
"# pylint:disable=bare-except;This is a background logging routine",
"self",
".",
"_logger",
".",
"error",
"(",
"\"Error calling callback for event %s, payload=%s\"",
",",
"name",
",",
"payload",
",",
"exc_info",
"=",
"True",
")",
"self",
".",
"_manager",
".",
"every_match",
"(",
"_validate_and_call",
",",
"type",
"=",
"\"event\"",
",",
"name",
"=",
"name",
")"
] |
Register a callback to receive events.
Every event with the matching name will have its payload validated
using validator and then will be passed to callback if validation
succeeds.
Callback must be a normal callback function, coroutines are not
allowed. If you need to run a coroutine you are free to schedule it
from your callback.
Args:
name (str): The name of the event that we are listening
for
callback (callable): The function that should be called
when a message that matches validator is received.
validator (Verifier): A schema verifier that will
validate a received message uniquely
|
[
"Register",
"a",
"callback",
"to",
"receive",
"events",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L192-L230
|
train
|
iotile/coretools
|
transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py
|
AsyncValidatingWSClient.post_command
|
def post_command(self, command, args):
"""Post a command asynchronously and don't wait for a response.
There is no notification of any error that could happen during
command execution. A log message will be generated if an error
occurred. The command's response is discarded.
This method is thread-safe and may be called from inside or ouside
of the background event loop. If there is no websockets connection,
no error will be raised (though an error will be logged).
Args:
command (string): The command name
args (dict): Optional arguments
"""
self._loop.log_coroutine(self.send_command(command, args, Verifier()))
|
python
|
def post_command(self, command, args):
"""Post a command asynchronously and don't wait for a response.
There is no notification of any error that could happen during
command execution. A log message will be generated if an error
occurred. The command's response is discarded.
This method is thread-safe and may be called from inside or ouside
of the background event loop. If there is no websockets connection,
no error will be raised (though an error will be logged).
Args:
command (string): The command name
args (dict): Optional arguments
"""
self._loop.log_coroutine(self.send_command(command, args, Verifier()))
|
[
"def",
"post_command",
"(",
"self",
",",
"command",
",",
"args",
")",
":",
"self",
".",
"_loop",
".",
"log_coroutine",
"(",
"self",
".",
"send_command",
"(",
"command",
",",
"args",
",",
"Verifier",
"(",
")",
")",
")"
] |
Post a command asynchronously and don't wait for a response.
There is no notification of any error that could happen during
command execution. A log message will be generated if an error
occurred. The command's response is discarded.
This method is thread-safe and may be called from inside or ouside
of the background event loop. If there is no websockets connection,
no error will be raised (though an error will be logged).
Args:
command (string): The command name
args (dict): Optional arguments
|
[
"Post",
"a",
"command",
"asynchronously",
"and",
"don",
"t",
"wait",
"for",
"a",
"response",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L232-L248
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/processors.py
|
copy_all_a
|
def copy_all_a(input_a, *other_inputs, **kwargs):
"""Copy all readings in input a into the output.
All other inputs are skipped so that after this function runs there are no
readings left in any of the input walkers when the function finishes, even
if it generated no output readings.
Returns:
list(IOTileReading)
"""
output = []
while input_a.count() > 0:
output.append(input_a.pop())
for input_x in other_inputs:
input_x.skip_all()
return output
|
python
|
def copy_all_a(input_a, *other_inputs, **kwargs):
"""Copy all readings in input a into the output.
All other inputs are skipped so that after this function runs there are no
readings left in any of the input walkers when the function finishes, even
if it generated no output readings.
Returns:
list(IOTileReading)
"""
output = []
while input_a.count() > 0:
output.append(input_a.pop())
for input_x in other_inputs:
input_x.skip_all()
return output
|
[
"def",
"copy_all_a",
"(",
"input_a",
",",
"*",
"other_inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"output",
"=",
"[",
"]",
"while",
"input_a",
".",
"count",
"(",
")",
">",
"0",
":",
"output",
".",
"append",
"(",
"input_a",
".",
"pop",
"(",
")",
")",
"for",
"input_x",
"in",
"other_inputs",
":",
"input_x",
".",
"skip_all",
"(",
")",
"return",
"output"
] |
Copy all readings in input a into the output.
All other inputs are skipped so that after this function runs there are no
readings left in any of the input walkers when the function finishes, even
if it generated no output readings.
Returns:
list(IOTileReading)
|
[
"Copy",
"all",
"readings",
"in",
"input",
"a",
"into",
"the",
"output",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L13-L31
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/processors.py
|
copy_count_a
|
def copy_count_a(input_a, *other_inputs, **kwargs):
"""Copy the latest reading from input a into the output.
All other inputs are skipped to that after this function
runs there are no readings left in any of the input walkers
even if no output is generated.
Returns:
list(IOTileReading)
"""
count = input_a.count()
input_a.skip_all();
for input_x in other_inputs:
input_x.skip_all()
return [IOTileReading(0, 0, count)]
|
python
|
def copy_count_a(input_a, *other_inputs, **kwargs):
"""Copy the latest reading from input a into the output.
All other inputs are skipped to that after this function
runs there are no readings left in any of the input walkers
even if no output is generated.
Returns:
list(IOTileReading)
"""
count = input_a.count()
input_a.skip_all();
for input_x in other_inputs:
input_x.skip_all()
return [IOTileReading(0, 0, count)]
|
[
"def",
"copy_count_a",
"(",
"input_a",
",",
"*",
"other_inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"count",
"=",
"input_a",
".",
"count",
"(",
")",
"input_a",
".",
"skip_all",
"(",
")",
"for",
"input_x",
"in",
"other_inputs",
":",
"input_x",
".",
"skip_all",
"(",
")",
"return",
"[",
"IOTileReading",
"(",
"0",
",",
"0",
",",
"count",
")",
"]"
] |
Copy the latest reading from input a into the output.
All other inputs are skipped to that after this function
runs there are no readings left in any of the input walkers
even if no output is generated.
Returns:
list(IOTileReading)
|
[
"Copy",
"the",
"latest",
"reading",
"from",
"input",
"a",
"into",
"the",
"output",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L63-L81
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/processors.py
|
call_rpc
|
def call_rpc(*inputs, **kwargs):
"""Call an RPC based on the encoded value read from input b.
The response of the RPC must be a 4 byte value that is used as
the output of this call. The encoded RPC must be a 32 bit value
encoded as "BBH":
B: ignored, should be 0
B: the address of the tile that we should call
H: The id of the RPC to call
All other readings are then skipped so that there are no
readings in any input queue when this function returns
Returns:
list(IOTileReading)
"""
rpc_executor = kwargs['rpc_executor']
output = []
try:
value = inputs[1].pop()
addr = value.value >> 16
rpc_id = value.value & 0xFFFF
reading_value = rpc_executor.rpc(addr, rpc_id)
output.append(IOTileReading(0, 0, reading_value))
except (HardwareError, StreamEmptyError):
pass
for input_x in inputs:
input_x.skip_all()
return output
|
python
|
def call_rpc(*inputs, **kwargs):
"""Call an RPC based on the encoded value read from input b.
The response of the RPC must be a 4 byte value that is used as
the output of this call. The encoded RPC must be a 32 bit value
encoded as "BBH":
B: ignored, should be 0
B: the address of the tile that we should call
H: The id of the RPC to call
All other readings are then skipped so that there are no
readings in any input queue when this function returns
Returns:
list(IOTileReading)
"""
rpc_executor = kwargs['rpc_executor']
output = []
try:
value = inputs[1].pop()
addr = value.value >> 16
rpc_id = value.value & 0xFFFF
reading_value = rpc_executor.rpc(addr, rpc_id)
output.append(IOTileReading(0, 0, reading_value))
except (HardwareError, StreamEmptyError):
pass
for input_x in inputs:
input_x.skip_all()
return output
|
[
"def",
"call_rpc",
"(",
"*",
"inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"rpc_executor",
"=",
"kwargs",
"[",
"'rpc_executor'",
"]",
"output",
"=",
"[",
"]",
"try",
":",
"value",
"=",
"inputs",
"[",
"1",
"]",
".",
"pop",
"(",
")",
"addr",
"=",
"value",
".",
"value",
">>",
"16",
"rpc_id",
"=",
"value",
".",
"value",
"&",
"0xFFFF",
"reading_value",
"=",
"rpc_executor",
".",
"rpc",
"(",
"addr",
",",
"rpc_id",
")",
"output",
".",
"append",
"(",
"IOTileReading",
"(",
"0",
",",
"0",
",",
"reading_value",
")",
")",
"except",
"(",
"HardwareError",
",",
"StreamEmptyError",
")",
":",
"pass",
"for",
"input_x",
"in",
"inputs",
":",
"input_x",
".",
"skip_all",
"(",
")",
"return",
"output"
] |
Call an RPC based on the encoded value read from input b.
The response of the RPC must be a 4 byte value that is used as
the output of this call. The encoded RPC must be a 32 bit value
encoded as "BBH":
B: ignored, should be 0
B: the address of the tile that we should call
H: The id of the RPC to call
All other readings are then skipped so that there are no
readings in any input queue when this function returns
Returns:
list(IOTileReading)
|
[
"Call",
"an",
"RPC",
"based",
"on",
"the",
"encoded",
"value",
"read",
"from",
"input",
"b",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L84-L118
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/processors.py
|
trigger_streamer
|
def trigger_streamer(*inputs, **kwargs):
"""Trigger a streamer based on the index read from input b.
Returns:
list(IOTileReading)
"""
streamer_marker = kwargs['mark_streamer']
try:
reading = inputs[1].pop()
except StreamEmptyError:
return []
finally:
for input_x in inputs:
input_x.skip_all()
try:
streamer_marker(reading.value)
except ArgumentError:
return []
return [IOTileReading(0, 0, 0)]
|
python
|
def trigger_streamer(*inputs, **kwargs):
"""Trigger a streamer based on the index read from input b.
Returns:
list(IOTileReading)
"""
streamer_marker = kwargs['mark_streamer']
try:
reading = inputs[1].pop()
except StreamEmptyError:
return []
finally:
for input_x in inputs:
input_x.skip_all()
try:
streamer_marker(reading.value)
except ArgumentError:
return []
return [IOTileReading(0, 0, 0)]
|
[
"def",
"trigger_streamer",
"(",
"*",
"inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"streamer_marker",
"=",
"kwargs",
"[",
"'mark_streamer'",
"]",
"try",
":",
"reading",
"=",
"inputs",
"[",
"1",
"]",
".",
"pop",
"(",
")",
"except",
"StreamEmptyError",
":",
"return",
"[",
"]",
"finally",
":",
"for",
"input_x",
"in",
"inputs",
":",
"input_x",
".",
"skip_all",
"(",
")",
"try",
":",
"streamer_marker",
"(",
"reading",
".",
"value",
")",
"except",
"ArgumentError",
":",
"return",
"[",
"]",
"return",
"[",
"IOTileReading",
"(",
"0",
",",
"0",
",",
"0",
")",
"]"
] |
Trigger a streamer based on the index read from input b.
Returns:
list(IOTileReading)
|
[
"Trigger",
"a",
"streamer",
"based",
"on",
"the",
"index",
"read",
"from",
"input",
"b",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L121-L143
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/processors.py
|
subtract_afromb
|
def subtract_afromb(*inputs, **kwargs):
"""Subtract stream a from stream b.
Returns:
list(IOTileReading)
"""
try:
value_a = inputs[0].pop()
value_b = inputs[1].pop()
return [IOTileReading(0, 0, value_b.value - value_a.value)]
except StreamEmptyError:
return []
|
python
|
def subtract_afromb(*inputs, **kwargs):
"""Subtract stream a from stream b.
Returns:
list(IOTileReading)
"""
try:
value_a = inputs[0].pop()
value_b = inputs[1].pop()
return [IOTileReading(0, 0, value_b.value - value_a.value)]
except StreamEmptyError:
return []
|
[
"def",
"subtract_afromb",
"(",
"*",
"inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"value_a",
"=",
"inputs",
"[",
"0",
"]",
".",
"pop",
"(",
")",
"value_b",
"=",
"inputs",
"[",
"1",
"]",
".",
"pop",
"(",
")",
"return",
"[",
"IOTileReading",
"(",
"0",
",",
"0",
",",
"value_b",
".",
"value",
"-",
"value_a",
".",
"value",
")",
"]",
"except",
"StreamEmptyError",
":",
"return",
"[",
"]"
] |
Subtract stream a from stream b.
Returns:
list(IOTileReading)
|
[
"Subtract",
"stream",
"a",
"from",
"stream",
"b",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L146-L159
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
|
_clean_intenum
|
def _clean_intenum(obj):
"""Remove all IntEnum classes from a map."""
if isinstance(obj, dict):
for key, value in obj.items():
if isinstance(value, IntEnum):
obj[key] = value.value
elif isinstance(value, (dict, list)):
obj[key] = _clean_intenum(value)
elif isinstance(obj, list):
for i, value in enumerate(obj):
if isinstance(value, IntEnum):
obj[i] = value.value
elif isinstance(value, (dict, list)):
obj[i] = _clean_intenum(value)
return obj
|
python
|
def _clean_intenum(obj):
"""Remove all IntEnum classes from a map."""
if isinstance(obj, dict):
for key, value in obj.items():
if isinstance(value, IntEnum):
obj[key] = value.value
elif isinstance(value, (dict, list)):
obj[key] = _clean_intenum(value)
elif isinstance(obj, list):
for i, value in enumerate(obj):
if isinstance(value, IntEnum):
obj[i] = value.value
elif isinstance(value, (dict, list)):
obj[i] = _clean_intenum(value)
return obj
|
[
"def",
"_clean_intenum",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"for",
"key",
",",
"value",
"in",
"obj",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"IntEnum",
")",
":",
"obj",
"[",
"key",
"]",
"=",
"value",
".",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"(",
"dict",
",",
"list",
")",
")",
":",
"obj",
"[",
"key",
"]",
"=",
"_clean_intenum",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"list",
")",
":",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"IntEnum",
")",
":",
"obj",
"[",
"i",
"]",
"=",
"value",
".",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"(",
"dict",
",",
"list",
")",
")",
":",
"obj",
"[",
"i",
"]",
"=",
"_clean_intenum",
"(",
"value",
")",
"return",
"obj"
] |
Remove all IntEnum classes from a map.
|
[
"Remove",
"all",
"IntEnum",
"classes",
"from",
"a",
"map",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L177-L193
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
|
EmulationMixin._track_change
|
def _track_change(self, name, value, formatter=None):
"""Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
"""
self._emulation_log.track_change(self._emulation_address, name, value, formatter)
|
python
|
def _track_change(self, name, value, formatter=None):
"""Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
"""
self._emulation_log.track_change(self._emulation_address, name, value, formatter)
|
[
"def",
"_track_change",
"(",
"self",
",",
"name",
",",
"value",
",",
"formatter",
"=",
"None",
")",
":",
"self",
".",
"_emulation_log",
".",
"track_change",
"(",
"self",
".",
"_emulation_address",
",",
"name",
",",
"value",
",",
"formatter",
")"
] |
Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
|
[
"Track",
"that",
"a",
"change",
"happened",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L50-L76
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
|
EmulationMixin.save_state
|
def save_state(self, out_path):
"""Save the current state of this emulated object to a file.
Args:
out_path (str): The path to save the dumped state of this emulated
object.
"""
state = self.dump_state()
# Remove all IntEnums from state since they cannot be json-serialized on python 2.7
# See https://bitbucket.org/stoneleaf/enum34/issues/17/difference-between-enum34-and-enum-json
state = _clean_intenum(state)
with open(out_path, "w") as outfile:
json.dump(state, outfile, indent=4)
|
python
|
def save_state(self, out_path):
"""Save the current state of this emulated object to a file.
Args:
out_path (str): The path to save the dumped state of this emulated
object.
"""
state = self.dump_state()
# Remove all IntEnums from state since they cannot be json-serialized on python 2.7
# See https://bitbucket.org/stoneleaf/enum34/issues/17/difference-between-enum34-and-enum-json
state = _clean_intenum(state)
with open(out_path, "w") as outfile:
json.dump(state, outfile, indent=4)
|
[
"def",
"save_state",
"(",
"self",
",",
"out_path",
")",
":",
"state",
"=",
"self",
".",
"dump_state",
"(",
")",
"# Remove all IntEnums from state since they cannot be json-serialized on python 2.7",
"# See https://bitbucket.org/stoneleaf/enum34/issues/17/difference-between-enum34-and-enum-json",
"state",
"=",
"_clean_intenum",
"(",
"state",
")",
"with",
"open",
"(",
"out_path",
",",
"\"w\"",
")",
"as",
"outfile",
":",
"json",
".",
"dump",
"(",
"state",
",",
"outfile",
",",
"indent",
"=",
"4",
")"
] |
Save the current state of this emulated object to a file.
Args:
out_path (str): The path to save the dumped state of this emulated
object.
|
[
"Save",
"the",
"current",
"state",
"of",
"this",
"emulated",
"object",
"to",
"a",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L96-L111
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
|
EmulationMixin.load_state
|
def load_state(self, in_path):
"""Load the current state of this emulated object from a file.
The file should have been produced by a previous call to save_state.
Args:
in_path (str): The path to the saved state dump that you wish
to load.
"""
with open(in_path, "r") as infile:
state = json.load(infile)
self.restore_state(state)
|
python
|
def load_state(self, in_path):
"""Load the current state of this emulated object from a file.
The file should have been produced by a previous call to save_state.
Args:
in_path (str): The path to the saved state dump that you wish
to load.
"""
with open(in_path, "r") as infile:
state = json.load(infile)
self.restore_state(state)
|
[
"def",
"load_state",
"(",
"self",
",",
"in_path",
")",
":",
"with",
"open",
"(",
"in_path",
",",
"\"r\"",
")",
"as",
"infile",
":",
"state",
"=",
"json",
".",
"load",
"(",
"infile",
")",
"self",
".",
"restore_state",
"(",
"state",
")"
] |
Load the current state of this emulated object from a file.
The file should have been produced by a previous call to save_state.
Args:
in_path (str): The path to the saved state dump that you wish
to load.
|
[
"Load",
"the",
"current",
"state",
"of",
"this",
"emulated",
"object",
"from",
"a",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L113-L126
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
|
EmulationMixin.load_scenario
|
def load_scenario(self, scenario_name, **kwargs):
"""Load a scenario into the emulated object.
Scenarios are specific states of an an object that can be customized
with keyword parameters. Typical examples are:
- data logger with full storage
- device with low battery indication on
Args:
scenario_name (str): The name of the scenario that we wish to
load.
**kwargs: Any arguments that should be passed to configure
the scenario. These arguments will be passed directly
to the scenario handler.
"""
scenario = self._known_scenarios.get(scenario_name)
if scenario is None:
raise ArgumentError("Unknown scenario %s" % scenario_name, known_scenarios=list(self._known_scenarios))
scenario(**kwargs)
|
python
|
def load_scenario(self, scenario_name, **kwargs):
"""Load a scenario into the emulated object.
Scenarios are specific states of an an object that can be customized
with keyword parameters. Typical examples are:
- data logger with full storage
- device with low battery indication on
Args:
scenario_name (str): The name of the scenario that we wish to
load.
**kwargs: Any arguments that should be passed to configure
the scenario. These arguments will be passed directly
to the scenario handler.
"""
scenario = self._known_scenarios.get(scenario_name)
if scenario is None:
raise ArgumentError("Unknown scenario %s" % scenario_name, known_scenarios=list(self._known_scenarios))
scenario(**kwargs)
|
[
"def",
"load_scenario",
"(",
"self",
",",
"scenario_name",
",",
"*",
"*",
"kwargs",
")",
":",
"scenario",
"=",
"self",
".",
"_known_scenarios",
".",
"get",
"(",
"scenario_name",
")",
"if",
"scenario",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown scenario %s\"",
"%",
"scenario_name",
",",
"known_scenarios",
"=",
"list",
"(",
"self",
".",
"_known_scenarios",
")",
")",
"scenario",
"(",
"*",
"*",
"kwargs",
")"
] |
Load a scenario into the emulated object.
Scenarios are specific states of an an object that can be customized
with keyword parameters. Typical examples are:
- data logger with full storage
- device with low battery indication on
Args:
scenario_name (str): The name of the scenario that we wish to
load.
**kwargs: Any arguments that should be passed to configure
the scenario. These arguments will be passed directly
to the scenario handler.
|
[
"Load",
"a",
"scenario",
"into",
"the",
"emulated",
"object",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L128-L149
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
|
EmulationMixin.register_scenario
|
def register_scenario(self, scenario_name, handler):
"""Register a scenario handler for this object.
Scenario handlers are callable functions with no positional arguments
that can be called by name with the load_scenario function and should
prepare the emulated object into a known state. The purpose of a
scenario is to make it easy to get a device into a specific state for
testing purposes that may otherwise be difficult or time consuming to
prepare on the physical, non-emulated device.
Args:
scenario_name (str): The name of this scenario that can be passed to
load_scenario later in order to invoke the scenario.
handler (callable): A callable function that takes no positional
arguments and can prepare this object into the given scenario
state. It may take required or optional keyword arguments that
may be passed to `load_scenario` if needed.
"""
if scenario_name in self._known_scenarios:
raise ArgumentError("Attempted to add the same scenario name twice", scenario_name=scenario_name,
previous_handler=self._known_scenarios[scenario_name])
self._known_scenarios[scenario_name] = handler
|
python
|
def register_scenario(self, scenario_name, handler):
"""Register a scenario handler for this object.
Scenario handlers are callable functions with no positional arguments
that can be called by name with the load_scenario function and should
prepare the emulated object into a known state. The purpose of a
scenario is to make it easy to get a device into a specific state for
testing purposes that may otherwise be difficult or time consuming to
prepare on the physical, non-emulated device.
Args:
scenario_name (str): The name of this scenario that can be passed to
load_scenario later in order to invoke the scenario.
handler (callable): A callable function that takes no positional
arguments and can prepare this object into the given scenario
state. It may take required or optional keyword arguments that
may be passed to `load_scenario` if needed.
"""
if scenario_name in self._known_scenarios:
raise ArgumentError("Attempted to add the same scenario name twice", scenario_name=scenario_name,
previous_handler=self._known_scenarios[scenario_name])
self._known_scenarios[scenario_name] = handler
|
[
"def",
"register_scenario",
"(",
"self",
",",
"scenario_name",
",",
"handler",
")",
":",
"if",
"scenario_name",
"in",
"self",
".",
"_known_scenarios",
":",
"raise",
"ArgumentError",
"(",
"\"Attempted to add the same scenario name twice\"",
",",
"scenario_name",
"=",
"scenario_name",
",",
"previous_handler",
"=",
"self",
".",
"_known_scenarios",
"[",
"scenario_name",
"]",
")",
"self",
".",
"_known_scenarios",
"[",
"scenario_name",
"]",
"=",
"handler"
] |
Register a scenario handler for this object.
Scenario handlers are callable functions with no positional arguments
that can be called by name with the load_scenario function and should
prepare the emulated object into a known state. The purpose of a
scenario is to make it easy to get a device into a specific state for
testing purposes that may otherwise be difficult or time consuming to
prepare on the physical, non-emulated device.
Args:
scenario_name (str): The name of this scenario that can be passed to
load_scenario later in order to invoke the scenario.
handler (callable): A callable function that takes no positional
arguments and can prepare this object into the given scenario
state. It may take required or optional keyword arguments that
may be passed to `load_scenario` if needed.
|
[
"Register",
"a",
"scenario",
"handler",
"for",
"this",
"object",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L151-L174
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/hpcc.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for aCC & cc to an Environment."""
cc.generate(env)
env['CXX'] = 'aCC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z')
|
python
|
def generate(env):
"""Add Builders and construction variables for aCC & cc to an Environment."""
cc.generate(env)
env['CXX'] = 'aCC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z')
|
[
"def",
"generate",
"(",
"env",
")",
":",
"cc",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'CXX'",
"]",
"=",
"'aCC'",
"env",
"[",
"'SHCCFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CCFLAGS +Z'",
")"
] |
Add Builders and construction variables for aCC & cc to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"aCC",
"&",
"cc",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/hpcc.py#L39-L44
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/optimizer/optimizer.py
|
SensorGraphOptimizer.add_pass
|
def add_pass(self, name, opt_pass, before=None, after=None):
"""Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
"""
if before is None:
before = []
if after is None:
after = []
self._known_passes[name] = (opt_pass, before, after)
|
python
|
def add_pass(self, name, opt_pass, before=None, after=None):
"""Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
"""
if before is None:
before = []
if after is None:
after = []
self._known_passes[name] = (opt_pass, before, after)
|
[
"def",
"add_pass",
"(",
"self",
",",
"name",
",",
"opt_pass",
",",
"before",
"=",
"None",
",",
"after",
"=",
"None",
")",
":",
"if",
"before",
"is",
"None",
":",
"before",
"=",
"[",
"]",
"if",
"after",
"is",
"None",
":",
"after",
"=",
"[",
"]",
"self",
".",
"_known_passes",
"[",
"name",
"]",
"=",
"(",
"opt_pass",
",",
"before",
",",
"after",
")"
] |
Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
|
[
"Add",
"an",
"optimization",
"pass",
"to",
"the",
"optimizer",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/optimizer/optimizer.py#L24-L49
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/optimizer/optimizer.py
|
SensorGraphOptimizer._order_pases
|
def _order_pases(self, passes):
"""Topologically sort optimization passes.
This ensures that the resulting passes are run in order
respecting before/after constraints.
Args:
passes (iterable): An iterable of pass names that should
be included in the optimization passes run.
"""
passes = set(passes)
pass_deps = {}
for opt in passes:
_, before, after = self._known_passes[opt]
if opt not in pass_deps:
pass_deps[opt] = set()
for after_pass in after:
pass_deps[opt].add(after_pass)
# For passes that we are before, we may need to
# preemptively add them to the list early
for other in before:
if other not in passes:
continue
if other not in pass_deps:
pass_deps[other] = set()
pass_deps[other].add(opt)
return toposort_flatten(pass_deps)
|
python
|
def _order_pases(self, passes):
"""Topologically sort optimization passes.
This ensures that the resulting passes are run in order
respecting before/after constraints.
Args:
passes (iterable): An iterable of pass names that should
be included in the optimization passes run.
"""
passes = set(passes)
pass_deps = {}
for opt in passes:
_, before, after = self._known_passes[opt]
if opt not in pass_deps:
pass_deps[opt] = set()
for after_pass in after:
pass_deps[opt].add(after_pass)
# For passes that we are before, we may need to
# preemptively add them to the list early
for other in before:
if other not in passes:
continue
if other not in pass_deps:
pass_deps[other] = set()
pass_deps[other].add(opt)
return toposort_flatten(pass_deps)
|
[
"def",
"_order_pases",
"(",
"self",
",",
"passes",
")",
":",
"passes",
"=",
"set",
"(",
"passes",
")",
"pass_deps",
"=",
"{",
"}",
"for",
"opt",
"in",
"passes",
":",
"_",
",",
"before",
",",
"after",
"=",
"self",
".",
"_known_passes",
"[",
"opt",
"]",
"if",
"opt",
"not",
"in",
"pass_deps",
":",
"pass_deps",
"[",
"opt",
"]",
"=",
"set",
"(",
")",
"for",
"after_pass",
"in",
"after",
":",
"pass_deps",
"[",
"opt",
"]",
".",
"add",
"(",
"after_pass",
")",
"# For passes that we are before, we may need to",
"# preemptively add them to the list early",
"for",
"other",
"in",
"before",
":",
"if",
"other",
"not",
"in",
"passes",
":",
"continue",
"if",
"other",
"not",
"in",
"pass_deps",
":",
"pass_deps",
"[",
"other",
"]",
"=",
"set",
"(",
")",
"pass_deps",
"[",
"other",
"]",
".",
"add",
"(",
"opt",
")",
"return",
"toposort_flatten",
"(",
"pass_deps",
")"
] |
Topologically sort optimization passes.
This ensures that the resulting passes are run in order
respecting before/after constraints.
Args:
passes (iterable): An iterable of pass names that should
be included in the optimization passes run.
|
[
"Topologically",
"sort",
"optimization",
"passes",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/optimizer/optimizer.py#L51-L86
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/optimizer/optimizer.py
|
SensorGraphOptimizer.optimize
|
def optimize(self, sensor_graph, model):
"""Optimize a sensor graph by running optimization passes.
The passes are run one at a time and modify the sensor graph
for future passes.
Args:
sensor_graph (SensorGraph): The graph to be optimized
model (DeviceModel): The device that we are optimizing
for, that OptimizationPass objects are free to use
to guide their optimizations.
"""
passes = self._order_pases(self._known_passes.keys())
for opt_name in passes:
rerun = True
pass_instance = self._known_passes[opt_name][0]()
while rerun:
rerun = pass_instance.run(sensor_graph, model=model)
|
python
|
def optimize(self, sensor_graph, model):
"""Optimize a sensor graph by running optimization passes.
The passes are run one at a time and modify the sensor graph
for future passes.
Args:
sensor_graph (SensorGraph): The graph to be optimized
model (DeviceModel): The device that we are optimizing
for, that OptimizationPass objects are free to use
to guide their optimizations.
"""
passes = self._order_pases(self._known_passes.keys())
for opt_name in passes:
rerun = True
pass_instance = self._known_passes[opt_name][0]()
while rerun:
rerun = pass_instance.run(sensor_graph, model=model)
|
[
"def",
"optimize",
"(",
"self",
",",
"sensor_graph",
",",
"model",
")",
":",
"passes",
"=",
"self",
".",
"_order_pases",
"(",
"self",
".",
"_known_passes",
".",
"keys",
"(",
")",
")",
"for",
"opt_name",
"in",
"passes",
":",
"rerun",
"=",
"True",
"pass_instance",
"=",
"self",
".",
"_known_passes",
"[",
"opt_name",
"]",
"[",
"0",
"]",
"(",
")",
"while",
"rerun",
":",
"rerun",
"=",
"pass_instance",
".",
"run",
"(",
"sensor_graph",
",",
"model",
"=",
"model",
")"
] |
Optimize a sensor graph by running optimization passes.
The passes are run one at a time and modify the sensor graph
for future passes.
Args:
sensor_graph (SensorGraph): The graph to be optimized
model (DeviceModel): The device that we are optimizing
for, that OptimizationPass objects are free to use
to guide their optimizations.
|
[
"Optimize",
"a",
"sensor",
"graph",
"by",
"running",
"optimization",
"passes",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/optimizer/optimizer.py#L88-L108
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py
|
get_calling_namespaces
|
def get_calling_namespaces():
"""Return the locals and globals for the function that called
into this module in the current call stack."""
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals
|
python
|
def get_calling_namespaces():
"""Return the locals and globals for the function that called
into this module in the current call stack."""
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals
|
[
"def",
"get_calling_namespaces",
"(",
")",
":",
"try",
":",
"1",
"//",
"0",
"except",
"ZeroDivisionError",
":",
"# Don't start iterating with the current stack-frame to",
"# prevent creating reference cycles (f_back is safe).",
"frame",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
".",
"tb_frame",
".",
"f_back",
"# Find the first frame that *isn't* from this file. This means",
"# that we expect all of the SCons frames that implement an Export()",
"# or SConscript() call to be in this file, so that we can identify",
"# the first non-Script.SConscript frame as the user's local calling",
"# environment, and the locals and globals dictionaries from that",
"# frame as the calling namespaces. See the comment below preceding",
"# the DefaultEnvironmentCall block for even more explanation.",
"while",
"frame",
".",
"f_globals",
".",
"get",
"(",
"\"__name__\"",
")",
"==",
"__name__",
":",
"frame",
"=",
"frame",
".",
"f_back",
"return",
"frame",
".",
"f_locals",
",",
"frame",
".",
"f_globals"
] |
Return the locals and globals for the function that called
into this module in the current call stack.
|
[
"Return",
"the",
"locals",
"and",
"globals",
"for",
"the",
"function",
"that",
"called",
"into",
"this",
"module",
"in",
"the",
"current",
"call",
"stack",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L70-L89
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py
|
annotate
|
def annotate(node):
"""Annotate a node with the stack frame describing the
SConscript file and line number that created it."""
tb = sys.exc_info()[2]
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find any exec of an SConscript file: what?!
raise SCons.Errors.InternalError("could not find SConscript stack frame")
node.creator = traceback.extract_stack(tb)[0]
|
python
|
def annotate(node):
"""Annotate a node with the stack frame describing the
SConscript file and line number that created it."""
tb = sys.exc_info()[2]
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find any exec of an SConscript file: what?!
raise SCons.Errors.InternalError("could not find SConscript stack frame")
node.creator = traceback.extract_stack(tb)[0]
|
[
"def",
"annotate",
"(",
"node",
")",
":",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
"while",
"tb",
"and",
"stack_bottom",
"not",
"in",
"tb",
".",
"tb_frame",
".",
"f_locals",
":",
"tb",
"=",
"tb",
".",
"tb_next",
"if",
"not",
"tb",
":",
"# We did not find any exec of an SConscript file: what?!",
"raise",
"SCons",
".",
"Errors",
".",
"InternalError",
"(",
"\"could not find SConscript stack frame\"",
")",
"node",
".",
"creator",
"=",
"traceback",
".",
"extract_stack",
"(",
"tb",
")",
"[",
"0",
"]"
] |
Annotate a node with the stack frame describing the
SConscript file and line number that created it.
|
[
"Annotate",
"a",
"node",
"with",
"the",
"stack",
"frame",
"describing",
"the",
"SConscript",
"file",
"and",
"line",
"number",
"that",
"created",
"it",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L331-L340
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py
|
BuildDefaultGlobals
|
def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy()
|
python
|
def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy()
|
[
"def",
"BuildDefaultGlobals",
"(",
")",
":",
"global",
"GlobalDict",
"if",
"GlobalDict",
"is",
"None",
":",
"GlobalDict",
"=",
"{",
"}",
"import",
"SCons",
".",
"Script",
"d",
"=",
"SCons",
".",
"Script",
".",
"__dict__",
"def",
"not_a_module",
"(",
"m",
",",
"d",
"=",
"d",
",",
"mtype",
"=",
"type",
"(",
"SCons",
".",
"Script",
")",
")",
":",
"return",
"not",
"isinstance",
"(",
"d",
"[",
"m",
"]",
",",
"mtype",
")",
"for",
"m",
"in",
"filter",
"(",
"not_a_module",
",",
"dir",
"(",
"SCons",
".",
"Script",
")",
")",
":",
"GlobalDict",
"[",
"m",
"]",
"=",
"d",
"[",
"m",
"]",
"return",
"GlobalDict",
".",
"copy",
"(",
")"
] |
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
|
[
"Create",
"a",
"dictionary",
"containing",
"all",
"the",
"default",
"globals",
"for",
"SConstruct",
"and",
"SConscript",
"files",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L617-L634
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py
|
SConsEnvironment._exceeds_version
|
def _exceeds_version(self, major, minor, v_major, v_minor):
"""Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise."""
return (major > v_major or (major == v_major and minor > v_minor))
|
python
|
def _exceeds_version(self, major, minor, v_major, v_minor):
"""Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise."""
return (major > v_major or (major == v_major and minor > v_minor))
|
[
"def",
"_exceeds_version",
"(",
"self",
",",
"major",
",",
"minor",
",",
"v_major",
",",
"v_minor",
")",
":",
"return",
"(",
"major",
">",
"v_major",
"or",
"(",
"major",
"==",
"v_major",
"and",
"minor",
">",
"v_minor",
")",
")"
] |
Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise.
|
[
"Return",
"1",
"if",
"major",
"and",
"minor",
"are",
"greater",
"than",
"the",
"version",
"in",
"v_major",
"and",
"v_minor",
"and",
"0",
"otherwise",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L359-L362
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py
|
SConsEnvironment.EnsureSConsVersion
|
def EnsureSConsVersion(self, major, minor, revision=0):
"""Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
scons_ver_string = '%d.%d' % (major, minor)
print("SCons %s or greater required, but you have SCons %s" % \
(scons_ver_string, SCons.__version__))
sys.exit(2)
|
python
|
def EnsureSConsVersion(self, major, minor, revision=0):
"""Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
scons_ver_string = '%d.%d' % (major, minor)
print("SCons %s or greater required, but you have SCons %s" % \
(scons_ver_string, SCons.__version__))
sys.exit(2)
|
[
"def",
"EnsureSConsVersion",
"(",
"self",
",",
"major",
",",
"minor",
",",
"revision",
"=",
"0",
")",
":",
"# split string to avoid replacement during build process",
"if",
"SCons",
".",
"__version__",
"==",
"'__'",
"+",
"'VERSION__'",
":",
"SCons",
".",
"Warnings",
".",
"warn",
"(",
"SCons",
".",
"Warnings",
".",
"DevelopmentVersionWarning",
",",
"\"EnsureSConsVersion is ignored for development version\"",
")",
"return",
"scons_ver",
"=",
"self",
".",
"_get_major_minor_revision",
"(",
"SCons",
".",
"__version__",
")",
"if",
"scons_ver",
"<",
"(",
"major",
",",
"minor",
",",
"revision",
")",
":",
"if",
"revision",
":",
"scons_ver_string",
"=",
"'%d.%d.%d'",
"%",
"(",
"major",
",",
"minor",
",",
"revision",
")",
"else",
":",
"scons_ver_string",
"=",
"'%d.%d'",
"%",
"(",
"major",
",",
"minor",
")",
"print",
"(",
"\"SCons %s or greater required, but you have SCons %s\"",
"%",
"(",
"scons_ver_string",
",",
"SCons",
".",
"__version__",
")",
")",
"sys",
".",
"exit",
"(",
"2",
")"
] |
Exit abnormally if the SCons version is not late enough.
|
[
"Exit",
"abnormally",
"if",
"the",
"SCons",
"version",
"is",
"not",
"late",
"enough",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L462-L477
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py
|
SConsEnvironment.EnsurePythonVersion
|
def EnsurePythonVersion(self, major, minor):
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v))
sys.exit(2)
|
python
|
def EnsurePythonVersion(self, major, minor):
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v))
sys.exit(2)
|
[
"def",
"EnsurePythonVersion",
"(",
"self",
",",
"major",
",",
"minor",
")",
":",
"if",
"sys",
".",
"version_info",
"<",
"(",
"major",
",",
"minor",
")",
":",
"v",
"=",
"sys",
".",
"version",
".",
"split",
"(",
")",
"[",
"0",
"]",
"print",
"(",
"\"Python %d.%d or greater required, but you have Python %s\"",
"%",
"(",
"major",
",",
"minor",
",",
"v",
")",
")",
"sys",
".",
"exit",
"(",
"2",
")"
] |
Exit abnormally if the Python version is not late enough.
|
[
"Exit",
"abnormally",
"if",
"the",
"Python",
"version",
"is",
"not",
"late",
"enough",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L479-L484
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py
|
validate_vars
|
def validate_vars(env):
"""Validate the PCH and PCHSTOP construction variables."""
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.")
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP'])
|
python
|
def validate_vars(env):
"""Validate the PCH and PCHSTOP construction variables."""
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.")
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP'])
|
[
"def",
"validate_vars",
"(",
"env",
")",
":",
"if",
"'PCH'",
"in",
"env",
"and",
"env",
"[",
"'PCH'",
"]",
":",
"if",
"'PCHSTOP'",
"not",
"in",
"env",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"The PCHSTOP construction must be defined if PCH is defined.\"",
")",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_String",
"(",
"env",
"[",
"'PCHSTOP'",
"]",
")",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"The PCHSTOP construction variable must be a string: %r\"",
"%",
"env",
"[",
"'PCHSTOP'",
"]",
")"
] |
Validate the PCH and PCHSTOP construction variables.
|
[
"Validate",
"the",
"PCH",
"and",
"PCHSTOP",
"construction",
"variables",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L55-L61
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py
|
msvc_set_PCHPDBFLAGS
|
def msvc_set_PCHPDBFLAGS(env):
"""
Set appropriate PCHPDBFLAGS for the MSVC version being used.
"""
if env.get('MSVC_VERSION',False):
maj, min = msvc_version_to_maj_min(env['MSVC_VERSION'])
if maj < 8:
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
else:
env['PCHPDBFLAGS'] = ''
else:
# Default if we can't determine which version of MSVC we're using
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
|
python
|
def msvc_set_PCHPDBFLAGS(env):
"""
Set appropriate PCHPDBFLAGS for the MSVC version being used.
"""
if env.get('MSVC_VERSION',False):
maj, min = msvc_version_to_maj_min(env['MSVC_VERSION'])
if maj < 8:
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
else:
env['PCHPDBFLAGS'] = ''
else:
# Default if we can't determine which version of MSVC we're using
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
|
[
"def",
"msvc_set_PCHPDBFLAGS",
"(",
"env",
")",
":",
"if",
"env",
".",
"get",
"(",
"'MSVC_VERSION'",
",",
"False",
")",
":",
"maj",
",",
"min",
"=",
"msvc_version_to_maj_min",
"(",
"env",
"[",
"'MSVC_VERSION'",
"]",
")",
"if",
"maj",
"<",
"8",
":",
"env",
"[",
"'PCHPDBFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"[",
"'${(PDB and \"/Yd\") or \"\"}'",
"]",
")",
"else",
":",
"env",
"[",
"'PCHPDBFLAGS'",
"]",
"=",
"''",
"else",
":",
"# Default if we can't determine which version of MSVC we're using",
"env",
"[",
"'PCHPDBFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"[",
"'${(PDB and \"/Yd\") or \"\"}'",
"]",
")"
] |
Set appropriate PCHPDBFLAGS for the MSVC version being used.
|
[
"Set",
"appropriate",
"PCHPDBFLAGS",
"for",
"the",
"MSVC",
"version",
"being",
"used",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L63-L75
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py
|
pch_emitter
|
def pch_emitter(target, source, env):
"""Adds the object file target."""
validate_vars(env)
pch = None
obj = None
for t in target:
if SCons.Util.splitext(str(t))[1] == '.pch':
pch = t
if SCons.Util.splitext(str(t))[1] == '.obj':
obj = t
if not obj:
obj = SCons.Util.splitext(str(pch))[0]+'.obj'
target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work
return (target, source)
|
python
|
def pch_emitter(target, source, env):
"""Adds the object file target."""
validate_vars(env)
pch = None
obj = None
for t in target:
if SCons.Util.splitext(str(t))[1] == '.pch':
pch = t
if SCons.Util.splitext(str(t))[1] == '.obj':
obj = t
if not obj:
obj = SCons.Util.splitext(str(pch))[0]+'.obj'
target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work
return (target, source)
|
[
"def",
"pch_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"validate_vars",
"(",
"env",
")",
"pch",
"=",
"None",
"obj",
"=",
"None",
"for",
"t",
"in",
"target",
":",
"if",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"t",
")",
")",
"[",
"1",
"]",
"==",
"'.pch'",
":",
"pch",
"=",
"t",
"if",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"t",
")",
")",
"[",
"1",
"]",
"==",
"'.obj'",
":",
"obj",
"=",
"t",
"if",
"not",
"obj",
":",
"obj",
"=",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"pch",
")",
")",
"[",
"0",
"]",
"+",
"'.obj'",
"target",
"=",
"[",
"pch",
",",
"obj",
"]",
"# pch must be first, and obj second for the PCHCOM to work",
"return",
"(",
"target",
",",
"source",
")"
] |
Adds the object file target.
|
[
"Adds",
"the",
"object",
"file",
"target",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L78-L97
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py
|
object_emitter
|
def object_emitter(target, source, env, parent_emitter):
"""Sets up the PCH dependencies for an object file."""
validate_vars(env)
parent_emitter(target, source, env)
# Add a dependency, but only if the target (e.g. 'Source1.obj')
# doesn't correspond to the pre-compiled header ('Source1.pch').
# If the basenames match, then this was most likely caused by
# someone adding the source file to both the env.PCH() and the
# env.Program() calls, and adding the explicit dependency would
# cause a cycle on the .pch file itself.
#
# See issue #2505 for a discussion of what to do if it turns
# out this assumption causes trouble in the wild:
# http://scons.tigris.org/issues/show_bug.cgi?id=2505
if 'PCH' in env:
pch = env['PCH']
if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj':
env.Depends(target, pch)
return (target, source)
|
python
|
def object_emitter(target, source, env, parent_emitter):
"""Sets up the PCH dependencies for an object file."""
validate_vars(env)
parent_emitter(target, source, env)
# Add a dependency, but only if the target (e.g. 'Source1.obj')
# doesn't correspond to the pre-compiled header ('Source1.pch').
# If the basenames match, then this was most likely caused by
# someone adding the source file to both the env.PCH() and the
# env.Program() calls, and adding the explicit dependency would
# cause a cycle on the .pch file itself.
#
# See issue #2505 for a discussion of what to do if it turns
# out this assumption causes trouble in the wild:
# http://scons.tigris.org/issues/show_bug.cgi?id=2505
if 'PCH' in env:
pch = env['PCH']
if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj':
env.Depends(target, pch)
return (target, source)
|
[
"def",
"object_emitter",
"(",
"target",
",",
"source",
",",
"env",
",",
"parent_emitter",
")",
":",
"validate_vars",
"(",
"env",
")",
"parent_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
"# Add a dependency, but only if the target (e.g. 'Source1.obj')",
"# doesn't correspond to the pre-compiled header ('Source1.pch').",
"# If the basenames match, then this was most likely caused by",
"# someone adding the source file to both the env.PCH() and the",
"# env.Program() calls, and adding the explicit dependency would",
"# cause a cycle on the .pch file itself.",
"#",
"# See issue #2505 for a discussion of what to do if it turns",
"# out this assumption causes trouble in the wild:",
"# http://scons.tigris.org/issues/show_bug.cgi?id=2505",
"if",
"'PCH'",
"in",
"env",
":",
"pch",
"=",
"env",
"[",
"'PCH'",
"]",
"if",
"str",
"(",
"target",
"[",
"0",
"]",
")",
"!=",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"pch",
")",
")",
"[",
"0",
"]",
"+",
"'.obj'",
":",
"env",
".",
"Depends",
"(",
"target",
",",
"pch",
")",
"return",
"(",
"target",
",",
"source",
")"
] |
Sets up the PCH dependencies for an object file.
|
[
"Sets",
"up",
"the",
"PCH",
"dependencies",
"for",
"an",
"object",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L99-L121
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py
|
msvc_batch_key
|
def msvc_batch_key(action, env, target, source):
"""
Returns a key to identify unique batches of sources for compilation.
If batching is enabled (via the $MSVC_BATCH setting), then all
target+source pairs that use the same action, defined by the same
environment, and have the same target and source directories, will
be batched.
Returning None specifies that the specified target+source should not
be batched with other compilations.
"""
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better.
# Note we need to do the env.subst so $MSVC_BATCH can be a reference to
# another construction variable, which is why we test for False and 0
# as strings.
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
# We're not using batching; return no key.
return None
t = target[0]
s = source[0]
if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]:
# The base names are different, so this *must* be compiled
# separately; return no key.
return None
return (id(action), id(env), t.dir, s.dir)
|
python
|
def msvc_batch_key(action, env, target, source):
"""
Returns a key to identify unique batches of sources for compilation.
If batching is enabled (via the $MSVC_BATCH setting), then all
target+source pairs that use the same action, defined by the same
environment, and have the same target and source directories, will
be batched.
Returning None specifies that the specified target+source should not
be batched with other compilations.
"""
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better.
# Note we need to do the env.subst so $MSVC_BATCH can be a reference to
# another construction variable, which is why we test for False and 0
# as strings.
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
# We're not using batching; return no key.
return None
t = target[0]
s = source[0]
if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]:
# The base names are different, so this *must* be compiled
# separately; return no key.
return None
return (id(action), id(env), t.dir, s.dir)
|
[
"def",
"msvc_batch_key",
"(",
"action",
",",
"env",
",",
"target",
",",
"source",
")",
":",
"# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH",
"# was set to False. This new version should work better.",
"# Note we need to do the env.subst so $MSVC_BATCH can be a reference to",
"# another construction variable, which is why we test for False and 0",
"# as strings.",
"if",
"not",
"'MSVC_BATCH'",
"in",
"env",
"or",
"env",
".",
"subst",
"(",
"'$MSVC_BATCH'",
")",
"in",
"(",
"'0'",
",",
"'False'",
",",
"''",
",",
"None",
")",
":",
"# We're not using batching; return no key.",
"return",
"None",
"t",
"=",
"target",
"[",
"0",
"]",
"s",
"=",
"source",
"[",
"0",
"]",
"if",
"os",
".",
"path",
".",
"splitext",
"(",
"t",
".",
"name",
")",
"[",
"0",
"]",
"!=",
"os",
".",
"path",
".",
"splitext",
"(",
"s",
".",
"name",
")",
"[",
"0",
"]",
":",
"# The base names are different, so this *must* be compiled",
"# separately; return no key.",
"return",
"None",
"return",
"(",
"id",
"(",
"action",
")",
",",
"id",
"(",
"env",
")",
",",
"t",
".",
"dir",
",",
"s",
".",
"dir",
")"
] |
Returns a key to identify unique batches of sources for compilation.
If batching is enabled (via the $MSVC_BATCH setting), then all
target+source pairs that use the same action, defined by the same
environment, and have the same target and source directories, will
be batched.
Returning None specifies that the specified target+source should not
be batched with other compilations.
|
[
"Returns",
"a",
"key",
"to",
"identify",
"unique",
"batches",
"of",
"sources",
"for",
"compilation",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L146-L173
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for MSVC++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
# TODO(batch): shouldn't reach in to cmdgen this way; necessary
# for now to bypass the checks in Builder.DictCmdGenerator.__call__()
# and allow .cc and .cpp to be compiled in the same command line.
static_obj.cmdgen.source_ext_match = False
shared_obj.cmdgen.source_ext_match = False
for suffix in CSuffixes:
static_obj.add_action(suffix, CAction)
shared_obj.add_action(suffix, ShCAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, CXXAction)
shared_obj.add_action(suffix, ShCXXAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}'])
env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}'])
env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'
env['CC'] = 'cl'
env['CCFLAGS'] = SCons.Util.CLVar('/nologo')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}'
env['CXX'] = '$CC'
env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)')
env['CXXCOM'] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}'
env['CPPDEFPREFIX'] = '/D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '/I'
env['INCSUFFIX'] = ''
# env.Append(OBJEMITTER = [static_object_emitter])
# env.Append(SHOBJEMITTER = [shared_object_emitter])
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = 'rc'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCSUFFIXES']=['.rc','.rc2']
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'
env['BUILDERS']['RES'] = res_builder
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.obj'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
# Set-up ms tools paths
msvc_setup_env_once(env)
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cc'
msvc_set_PCHPDBFLAGS(env)
env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'
env['BUILDERS']['PCH'] = pch_builder
if 'ENV' not in env:
env['ENV'] = {}
if 'SystemRoot' not in env['ENV']: # required for dlls in the winsxs folders
env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root()
|
python
|
def generate(env):
"""Add Builders and construction variables for MSVC++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
# TODO(batch): shouldn't reach in to cmdgen this way; necessary
# for now to bypass the checks in Builder.DictCmdGenerator.__call__()
# and allow .cc and .cpp to be compiled in the same command line.
static_obj.cmdgen.source_ext_match = False
shared_obj.cmdgen.source_ext_match = False
for suffix in CSuffixes:
static_obj.add_action(suffix, CAction)
shared_obj.add_action(suffix, ShCAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, CXXAction)
shared_obj.add_action(suffix, ShCXXAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}'])
env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}'])
env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'
env['CC'] = 'cl'
env['CCFLAGS'] = SCons.Util.CLVar('/nologo')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}'
env['CXX'] = '$CC'
env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)')
env['CXXCOM'] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}'
env['CPPDEFPREFIX'] = '/D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '/I'
env['INCSUFFIX'] = ''
# env.Append(OBJEMITTER = [static_object_emitter])
# env.Append(SHOBJEMITTER = [shared_object_emitter])
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = 'rc'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCSUFFIXES']=['.rc','.rc2']
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'
env['BUILDERS']['RES'] = res_builder
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.obj'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
# Set-up ms tools paths
msvc_setup_env_once(env)
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cc'
msvc_set_PCHPDBFLAGS(env)
env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'
env['BUILDERS']['PCH'] = pch_builder
if 'ENV' not in env:
env['ENV'] = {}
if 'SystemRoot' not in env['ENV']: # required for dlls in the winsxs folders
env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root()
|
[
"def",
"generate",
"(",
"env",
")",
":",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"# TODO(batch): shouldn't reach in to cmdgen this way; necessary",
"# for now to bypass the checks in Builder.DictCmdGenerator.__call__()",
"# and allow .cc and .cpp to be compiled in the same command line.",
"static_obj",
".",
"cmdgen",
".",
"source_ext_match",
"=",
"False",
"shared_obj",
".",
"cmdgen",
".",
"source_ext_match",
"=",
"False",
"for",
"suffix",
"in",
"CSuffixes",
":",
"static_obj",
".",
"add_action",
"(",
"suffix",
",",
"CAction",
")",
"shared_obj",
".",
"add_action",
"(",
"suffix",
",",
"ShCAction",
")",
"static_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"static_object_emitter",
")",
"shared_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"shared_object_emitter",
")",
"for",
"suffix",
"in",
"CXXSuffixes",
":",
"static_obj",
".",
"add_action",
"(",
"suffix",
",",
"CXXAction",
")",
"shared_obj",
".",
"add_action",
"(",
"suffix",
",",
"ShCXXAction",
")",
"static_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"static_object_emitter",
")",
"shared_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"shared_object_emitter",
")",
"env",
"[",
"'CCPDBFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"[",
"'${(PDB and \"/Z7\") or \"\"}'",
"]",
")",
"env",
"[",
"'CCPCHFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"[",
"'${(PCH and \"/Yu%s \\\\\\\"/Fp%s\\\\\\\"\"%(PCHSTOP or \"\",File(PCH))) or \"\"}'",
"]",
")",
"env",
"[",
"'_MSVC_OUTPUT_FLAG'",
"]",
"=",
"msvc_output_flag",
"env",
"[",
"'_CCCOMCOM'",
"]",
"=",
"'$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'",
"env",
"[",
"'CC'",
"]",
"=",
"'cl'",
"env",
"[",
"'CCFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'/nologo'",
")",
"env",
"[",
"'CFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env",
"[",
"'CCCOM'",
"]",
"=",
"'${TEMPFILE(\"$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM\",\"$CCCOMSTR\")}'",
"env",
"[",
"'SHCC'",
"]",
"=",
"'$CC'",
"env",
"[",
"'SHCCFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CCFLAGS'",
")",
"env",
"[",
"'SHCFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CFLAGS'",
")",
"env",
"[",
"'SHCCCOM'",
"]",
"=",
"'${TEMPFILE(\"$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM\",\"$SHCCCOMSTR\")}'",
"env",
"[",
"'CXX'",
"]",
"=",
"'$CC'",
"env",
"[",
"'CXXFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$( /TP $)'",
")",
"env",
"[",
"'CXXCOM'",
"]",
"=",
"'${TEMPFILE(\"$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM\",\"$CXXCOMSTR\")}'",
"env",
"[",
"'SHCXX'",
"]",
"=",
"'$CXX'",
"env",
"[",
"'SHCXXFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CXXFLAGS'",
")",
"env",
"[",
"'SHCXXCOM'",
"]",
"=",
"'${TEMPFILE(\"$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM\",\"$SHCXXCOMSTR\")}'",
"env",
"[",
"'CPPDEFPREFIX'",
"]",
"=",
"'/D'",
"env",
"[",
"'CPPDEFSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'INCPREFIX'",
"]",
"=",
"'/I'",
"env",
"[",
"'INCSUFFIX'",
"]",
"=",
"''",
"# env.Append(OBJEMITTER = [static_object_emitter])",
"# env.Append(SHOBJEMITTER = [shared_object_emitter])",
"env",
"[",
"'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'",
"]",
"=",
"1",
"env",
"[",
"'RC'",
"]",
"=",
"'rc'",
"env",
"[",
"'RCFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env",
"[",
"'RCSUFFIXES'",
"]",
"=",
"[",
"'.rc'",
",",
"'.rc2'",
"]",
"env",
"[",
"'RCCOM'",
"]",
"=",
"'$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'RES'",
"]",
"=",
"res_builder",
"env",
"[",
"'OBJPREFIX'",
"]",
"=",
"''",
"env",
"[",
"'OBJSUFFIX'",
"]",
"=",
"'.obj'",
"env",
"[",
"'SHOBJPREFIX'",
"]",
"=",
"'$OBJPREFIX'",
"env",
"[",
"'SHOBJSUFFIX'",
"]",
"=",
"'$OBJSUFFIX'",
"# Set-up ms tools paths",
"msvc_setup_env_once",
"(",
"env",
")",
"env",
"[",
"'CFILESUFFIX'",
"]",
"=",
"'.c'",
"env",
"[",
"'CXXFILESUFFIX'",
"]",
"=",
"'.cc'",
"msvc_set_PCHPDBFLAGS",
"(",
"env",
")",
"env",
"[",
"'PCHCOM'",
"]",
"=",
"'$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'PCH'",
"]",
"=",
"pch_builder",
"if",
"'ENV'",
"not",
"in",
"env",
":",
"env",
"[",
"'ENV'",
"]",
"=",
"{",
"}",
"if",
"'SystemRoot'",
"not",
"in",
"env",
"[",
"'ENV'",
"]",
":",
"# required for dlls in the winsxs folders",
"env",
"[",
"'ENV'",
"]",
"[",
"'SystemRoot'",
"]",
"=",
"SCons",
".",
"Platform",
".",
"win32",
".",
"get_system_root",
"(",
")"
] |
Add Builders and construction variables for MSVC++ to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"MSVC",
"++",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L213-L286
|
train
|
iotile/coretools
|
iotileship/iotile/ship/resources/hardware_manager.py
|
HardwareManagerResource.open
|
def open(self):
"""Open and potentially connect to a device."""
self.hwman = HardwareManager(port=self._port)
self.opened = True
if self._connection_string is not None:
try:
self.hwman.connect_direct(self._connection_string)
except HardwareError:
self.hwman.close()
raise
elif self._connect_id is not None:
try:
self.hwman.connect(self._connect_id)
except HardwareError:
self.hwman.close()
raise
|
python
|
def open(self):
"""Open and potentially connect to a device."""
self.hwman = HardwareManager(port=self._port)
self.opened = True
if self._connection_string is not None:
try:
self.hwman.connect_direct(self._connection_string)
except HardwareError:
self.hwman.close()
raise
elif self._connect_id is not None:
try:
self.hwman.connect(self._connect_id)
except HardwareError:
self.hwman.close()
raise
|
[
"def",
"open",
"(",
"self",
")",
":",
"self",
".",
"hwman",
"=",
"HardwareManager",
"(",
"port",
"=",
"self",
".",
"_port",
")",
"self",
".",
"opened",
"=",
"True",
"if",
"self",
".",
"_connection_string",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"hwman",
".",
"connect_direct",
"(",
"self",
".",
"_connection_string",
")",
"except",
"HardwareError",
":",
"self",
".",
"hwman",
".",
"close",
"(",
")",
"raise",
"elif",
"self",
".",
"_connect_id",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"hwman",
".",
"connect",
"(",
"self",
".",
"_connect_id",
")",
"except",
"HardwareError",
":",
"self",
".",
"hwman",
".",
"close",
"(",
")",
"raise"
] |
Open and potentially connect to a device.
|
[
"Open",
"and",
"potentially",
"connect",
"to",
"a",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/resources/hardware_manager.py#L46-L64
|
train
|
iotile/coretools
|
iotileship/iotile/ship/resources/hardware_manager.py
|
HardwareManagerResource.close
|
def close(self):
"""Close and potentially disconnect from a device."""
if self.hwman.stream.connected:
self.hwman.disconnect()
self.hwman.close()
self.opened = False
|
python
|
def close(self):
"""Close and potentially disconnect from a device."""
if self.hwman.stream.connected:
self.hwman.disconnect()
self.hwman.close()
self.opened = False
|
[
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"hwman",
".",
"stream",
".",
"connected",
":",
"self",
".",
"hwman",
".",
"disconnect",
"(",
")",
"self",
".",
"hwman",
".",
"close",
"(",
")",
"self",
".",
"opened",
"=",
"False"
] |
Close and potentially disconnect from a device.
|
[
"Close",
"and",
"potentially",
"disconnect",
"from",
"a",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/resources/hardware_manager.py#L67-L74
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/site_scons/pythondist.py
|
get_support_package
|
def get_support_package(tile):
"""Returns the support_package product."""
packages = tile.find_products('support_package')
if len(packages) == 0:
return None
elif len(packages) == 1:
return packages[0]
raise BuildError("Tile declared multiple support packages, only one is supported", packages=packages)
|
python
|
def get_support_package(tile):
"""Returns the support_package product."""
packages = tile.find_products('support_package')
if len(packages) == 0:
return None
elif len(packages) == 1:
return packages[0]
raise BuildError("Tile declared multiple support packages, only one is supported", packages=packages)
|
[
"def",
"get_support_package",
"(",
"tile",
")",
":",
"packages",
"=",
"tile",
".",
"find_products",
"(",
"'support_package'",
")",
"if",
"len",
"(",
"packages",
")",
"==",
"0",
":",
"return",
"None",
"elif",
"len",
"(",
"packages",
")",
"==",
"1",
":",
"return",
"packages",
"[",
"0",
"]",
"raise",
"BuildError",
"(",
"\"Tile declared multiple support packages, only one is supported\"",
",",
"packages",
"=",
"packages",
")"
] |
Returns the support_package product.
|
[
"Returns",
"the",
"support_package",
"product",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L30-L39
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/site_scons/pythondist.py
|
iter_support_files
|
def iter_support_files(tile):
"""Iterate over all files that go in the support wheel.
This method has two possible behaviors. If there is a 'support_package'
product defined, then this recursively enumerates all .py files inside
that folder and adds them all in the same hierarchy to the support wheel.
If there is no support_package product defined, then the old behavior
takes over, where all files containing python entrypoints are iterated
over using iter_python_modules() and they are copied into the support
wheel and then an __init__.py file is added.
The files are yielded as tuples of (copy_name, input_path).
"""
support_package = get_support_package(tile)
if support_package is None:
for module, _, _ in iter_python_modules(tile):
yield os.path.basename(module), module
else:
for dirpath, _dirnames, filenames in os.walk(support_package):
for filename in filenames:
if not filename.endswith('.py'):
continue
input_path = os.path.join(dirpath, filename)
output_path = os.path.relpath(input_path, start=support_package)
if output_path == "__init__.py":
continue
yield output_path, input_path
|
python
|
def iter_support_files(tile):
"""Iterate over all files that go in the support wheel.
This method has two possible behaviors. If there is a 'support_package'
product defined, then this recursively enumerates all .py files inside
that folder and adds them all in the same hierarchy to the support wheel.
If there is no support_package product defined, then the old behavior
takes over, where all files containing python entrypoints are iterated
over using iter_python_modules() and they are copied into the support
wheel and then an __init__.py file is added.
The files are yielded as tuples of (copy_name, input_path).
"""
support_package = get_support_package(tile)
if support_package is None:
for module, _, _ in iter_python_modules(tile):
yield os.path.basename(module), module
else:
for dirpath, _dirnames, filenames in os.walk(support_package):
for filename in filenames:
if not filename.endswith('.py'):
continue
input_path = os.path.join(dirpath, filename)
output_path = os.path.relpath(input_path, start=support_package)
if output_path == "__init__.py":
continue
yield output_path, input_path
|
[
"def",
"iter_support_files",
"(",
"tile",
")",
":",
"support_package",
"=",
"get_support_package",
"(",
"tile",
")",
"if",
"support_package",
"is",
"None",
":",
"for",
"module",
",",
"_",
",",
"_",
"in",
"iter_python_modules",
"(",
"tile",
")",
":",
"yield",
"os",
".",
"path",
".",
"basename",
"(",
"module",
")",
",",
"module",
"else",
":",
"for",
"dirpath",
",",
"_dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"support_package",
")",
":",
"for",
"filename",
"in",
"filenames",
":",
"if",
"not",
"filename",
".",
"endswith",
"(",
"'.py'",
")",
":",
"continue",
"input_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"filename",
")",
"output_path",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"input_path",
",",
"start",
"=",
"support_package",
")",
"if",
"output_path",
"==",
"\"__init__.py\"",
":",
"continue",
"yield",
"output_path",
",",
"input_path"
] |
Iterate over all files that go in the support wheel.
This method has two possible behaviors. If there is a 'support_package'
product defined, then this recursively enumerates all .py files inside
that folder and adds them all in the same hierarchy to the support wheel.
If there is no support_package product defined, then the old behavior
takes over, where all files containing python entrypoints are iterated
over using iter_python_modules() and they are copied into the support
wheel and then an __init__.py file is added.
The files are yielded as tuples of (copy_name, input_path).
|
[
"Iterate",
"over",
"all",
"files",
"that",
"go",
"in",
"the",
"support",
"wheel",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L42-L73
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/site_scons/pythondist.py
|
iter_python_modules
|
def iter_python_modules(tile):
"""Iterate over all python products in the given tile.
This will yield tuples where the first entry is the path to the module
containing the product the second entry is the appropriate
import string to include in an entry point, and the third entry is
the entry point name.
"""
for product_type in tile.PYTHON_PRODUCTS:
for product in tile.find_products(product_type):
entry_point = ENTRY_POINT_MAP.get(product_type)
if entry_point is None:
raise BuildError("Found an unknown python product (%s) whose entrypoint could not be determined (%s)" %
(product_type, product))
if ':' in product:
module, _, obj_name = product.rpartition(':')
else:
module = product
obj_name = None
if not os.path.exists(module):
raise BuildError("Found a python product whose path did not exist: %s" % module)
product_name = os.path.basename(module)
if product_name.endswith(".py"):
product_name = product_name[:-3]
import_string = "{} = {}.{}".format(product_name, tile.support_distribution, product_name)
if obj_name is not None:
import_string += ":{}".format(obj_name)
yield (module, import_string, entry_point)
|
python
|
def iter_python_modules(tile):
"""Iterate over all python products in the given tile.
This will yield tuples where the first entry is the path to the module
containing the product the second entry is the appropriate
import string to include in an entry point, and the third entry is
the entry point name.
"""
for product_type in tile.PYTHON_PRODUCTS:
for product in tile.find_products(product_type):
entry_point = ENTRY_POINT_MAP.get(product_type)
if entry_point is None:
raise BuildError("Found an unknown python product (%s) whose entrypoint could not be determined (%s)" %
(product_type, product))
if ':' in product:
module, _, obj_name = product.rpartition(':')
else:
module = product
obj_name = None
if not os.path.exists(module):
raise BuildError("Found a python product whose path did not exist: %s" % module)
product_name = os.path.basename(module)
if product_name.endswith(".py"):
product_name = product_name[:-3]
import_string = "{} = {}.{}".format(product_name, tile.support_distribution, product_name)
if obj_name is not None:
import_string += ":{}".format(obj_name)
yield (module, import_string, entry_point)
|
[
"def",
"iter_python_modules",
"(",
"tile",
")",
":",
"for",
"product_type",
"in",
"tile",
".",
"PYTHON_PRODUCTS",
":",
"for",
"product",
"in",
"tile",
".",
"find_products",
"(",
"product_type",
")",
":",
"entry_point",
"=",
"ENTRY_POINT_MAP",
".",
"get",
"(",
"product_type",
")",
"if",
"entry_point",
"is",
"None",
":",
"raise",
"BuildError",
"(",
"\"Found an unknown python product (%s) whose entrypoint could not be determined (%s)\"",
"%",
"(",
"product_type",
",",
"product",
")",
")",
"if",
"':'",
"in",
"product",
":",
"module",
",",
"_",
",",
"obj_name",
"=",
"product",
".",
"rpartition",
"(",
"':'",
")",
"else",
":",
"module",
"=",
"product",
"obj_name",
"=",
"None",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"module",
")",
":",
"raise",
"BuildError",
"(",
"\"Found a python product whose path did not exist: %s\"",
"%",
"module",
")",
"product_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"module",
")",
"if",
"product_name",
".",
"endswith",
"(",
"\".py\"",
")",
":",
"product_name",
"=",
"product_name",
"[",
":",
"-",
"3",
"]",
"import_string",
"=",
"\"{} = {}.{}\"",
".",
"format",
"(",
"product_name",
",",
"tile",
".",
"support_distribution",
",",
"product_name",
")",
"if",
"obj_name",
"is",
"not",
"None",
":",
"import_string",
"+=",
"\":{}\"",
".",
"format",
"(",
"obj_name",
")",
"yield",
"(",
"module",
",",
"import_string",
",",
"entry_point",
")"
] |
Iterate over all python products in the given tile.
This will yield tuples where the first entry is the path to the module
containing the product the second entry is the appropriate
import string to include in an entry point, and the third entry is
the entry point name.
|
[
"Iterate",
"over",
"all",
"python",
"products",
"in",
"the",
"given",
"tile",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L76-L110
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/site_scons/pythondist.py
|
generate_setup_py
|
def generate_setup_py(target, source, env):
"""Generate the setup.py file for this distribution."""
tile = env['TILE']
data = {}
entry_points = {}
for _mod, import_string, entry_point in iter_python_modules(tile):
if entry_point not in entry_points:
entry_points[entry_point] = []
entry_points[entry_point].append(import_string)
data['name'] = tile.support_distribution
data['package'] = tile.support_distribution
data['version'] = tile.parsed_version.pep440_string()
data['deps'] = ["{0} {1}".format(x.support_distribution, x.parsed_version.pep440_compatibility_specifier())
for x in _iter_dependencies(tile) if x.has_wheel]
# If there are some python packages needed, we add them to the list of dependencies required
if tile.support_wheel_depends:
data['deps'] += tile.support_wheel_depends
data['entry_points'] = entry_points
outdir = os.path.dirname(str(target[0]))
render_template('setup.py.tpl', data, out_path=str(target[0]))
# Run setuptools to generate a wheel and an sdist
curr = os.getcwd()
os.chdir(outdir)
try:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'sdist'])
if "python_universal" in tile.settings:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel', '--universal'])
else:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel'])
finally:
os.chdir(curr)
|
python
|
def generate_setup_py(target, source, env):
"""Generate the setup.py file for this distribution."""
tile = env['TILE']
data = {}
entry_points = {}
for _mod, import_string, entry_point in iter_python_modules(tile):
if entry_point not in entry_points:
entry_points[entry_point] = []
entry_points[entry_point].append(import_string)
data['name'] = tile.support_distribution
data['package'] = tile.support_distribution
data['version'] = tile.parsed_version.pep440_string()
data['deps'] = ["{0} {1}".format(x.support_distribution, x.parsed_version.pep440_compatibility_specifier())
for x in _iter_dependencies(tile) if x.has_wheel]
# If there are some python packages needed, we add them to the list of dependencies required
if tile.support_wheel_depends:
data['deps'] += tile.support_wheel_depends
data['entry_points'] = entry_points
outdir = os.path.dirname(str(target[0]))
render_template('setup.py.tpl', data, out_path=str(target[0]))
# Run setuptools to generate a wheel and an sdist
curr = os.getcwd()
os.chdir(outdir)
try:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'sdist'])
if "python_universal" in tile.settings:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel', '--universal'])
else:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel'])
finally:
os.chdir(curr)
|
[
"def",
"generate_setup_py",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"tile",
"=",
"env",
"[",
"'TILE'",
"]",
"data",
"=",
"{",
"}",
"entry_points",
"=",
"{",
"}",
"for",
"_mod",
",",
"import_string",
",",
"entry_point",
"in",
"iter_python_modules",
"(",
"tile",
")",
":",
"if",
"entry_point",
"not",
"in",
"entry_points",
":",
"entry_points",
"[",
"entry_point",
"]",
"=",
"[",
"]",
"entry_points",
"[",
"entry_point",
"]",
".",
"append",
"(",
"import_string",
")",
"data",
"[",
"'name'",
"]",
"=",
"tile",
".",
"support_distribution",
"data",
"[",
"'package'",
"]",
"=",
"tile",
".",
"support_distribution",
"data",
"[",
"'version'",
"]",
"=",
"tile",
".",
"parsed_version",
".",
"pep440_string",
"(",
")",
"data",
"[",
"'deps'",
"]",
"=",
"[",
"\"{0} {1}\"",
".",
"format",
"(",
"x",
".",
"support_distribution",
",",
"x",
".",
"parsed_version",
".",
"pep440_compatibility_specifier",
"(",
")",
")",
"for",
"x",
"in",
"_iter_dependencies",
"(",
"tile",
")",
"if",
"x",
".",
"has_wheel",
"]",
"# If there are some python packages needed, we add them to the list of dependencies required",
"if",
"tile",
".",
"support_wheel_depends",
":",
"data",
"[",
"'deps'",
"]",
"+=",
"tile",
".",
"support_wheel_depends",
"data",
"[",
"'entry_points'",
"]",
"=",
"entry_points",
"outdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"render_template",
"(",
"'setup.py.tpl'",
",",
"data",
",",
"out_path",
"=",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"# Run setuptools to generate a wheel and an sdist",
"curr",
"=",
"os",
".",
"getcwd",
"(",
")",
"os",
".",
"chdir",
"(",
"outdir",
")",
"try",
":",
"setuptools",
".",
"sandbox",
".",
"run_setup",
"(",
"'setup.py'",
",",
"[",
"'-q'",
",",
"'clean'",
",",
"'sdist'",
"]",
")",
"if",
"\"python_universal\"",
"in",
"tile",
".",
"settings",
":",
"setuptools",
".",
"sandbox",
".",
"run_setup",
"(",
"'setup.py'",
",",
"[",
"'-q'",
",",
"'clean'",
",",
"'bdist_wheel'",
",",
"'--universal'",
"]",
")",
"else",
":",
"setuptools",
".",
"sandbox",
".",
"run_setup",
"(",
"'setup.py'",
",",
"[",
"'-q'",
",",
"'clean'",
",",
"'bdist_wheel'",
"]",
")",
"finally",
":",
"os",
".",
"chdir",
"(",
"curr",
")"
] |
Generate the setup.py file for this distribution.
|
[
"Generate",
"the",
"setup",
".",
"py",
"file",
"for",
"this",
"distribution",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L197-L237
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py
|
defaultMachine
|
def defaultMachine(use_rpm_default=True):
""" Return the canonicalized machine name. """
if use_rpm_default:
try:
# This should be the most reliable way to get the default arch
rmachine = subprocess.check_output(['rpm', '--eval=%_target_cpu'], shell=False).rstrip()
rmachine = SCons.Util.to_str(rmachine)
except Exception as e:
# Something went wrong, try again by looking up platform.machine()
return defaultMachine(False)
else:
rmachine = platform.machine()
# Try to lookup the string in the canon table
if rmachine in arch_canon:
rmachine = arch_canon[rmachine][0]
return rmachine
|
python
|
def defaultMachine(use_rpm_default=True):
""" Return the canonicalized machine name. """
if use_rpm_default:
try:
# This should be the most reliable way to get the default arch
rmachine = subprocess.check_output(['rpm', '--eval=%_target_cpu'], shell=False).rstrip()
rmachine = SCons.Util.to_str(rmachine)
except Exception as e:
# Something went wrong, try again by looking up platform.machine()
return defaultMachine(False)
else:
rmachine = platform.machine()
# Try to lookup the string in the canon table
if rmachine in arch_canon:
rmachine = arch_canon[rmachine][0]
return rmachine
|
[
"def",
"defaultMachine",
"(",
"use_rpm_default",
"=",
"True",
")",
":",
"if",
"use_rpm_default",
":",
"try",
":",
"# This should be the most reliable way to get the default arch",
"rmachine",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'rpm'",
",",
"'--eval=%_target_cpu'",
"]",
",",
"shell",
"=",
"False",
")",
".",
"rstrip",
"(",
")",
"rmachine",
"=",
"SCons",
".",
"Util",
".",
"to_str",
"(",
"rmachine",
")",
"except",
"Exception",
"as",
"e",
":",
"# Something went wrong, try again by looking up platform.machine()",
"return",
"defaultMachine",
"(",
"False",
")",
"else",
":",
"rmachine",
"=",
"platform",
".",
"machine",
"(",
")",
"# Try to lookup the string in the canon table",
"if",
"rmachine",
"in",
"arch_canon",
":",
"rmachine",
"=",
"arch_canon",
"[",
"rmachine",
"]",
"[",
"0",
"]",
"return",
"rmachine"
] |
Return the canonicalized machine name.
|
[
"Return",
"the",
"canonicalized",
"machine",
"name",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py#L441-L459
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py
|
defaultSystem
|
def defaultSystem():
""" Return the canonicalized system name. """
rsystem = platform.system()
# Try to lookup the string in the canon tables
if rsystem in os_canon:
rsystem = os_canon[rsystem][0]
return rsystem
|
python
|
def defaultSystem():
""" Return the canonicalized system name. """
rsystem = platform.system()
# Try to lookup the string in the canon tables
if rsystem in os_canon:
rsystem = os_canon[rsystem][0]
return rsystem
|
[
"def",
"defaultSystem",
"(",
")",
":",
"rsystem",
"=",
"platform",
".",
"system",
"(",
")",
"# Try to lookup the string in the canon tables",
"if",
"rsystem",
"in",
"os_canon",
":",
"rsystem",
"=",
"os_canon",
"[",
"rsystem",
"]",
"[",
"0",
"]",
"return",
"rsystem"
] |
Return the canonicalized system name.
|
[
"Return",
"the",
"canonicalized",
"system",
"name",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py#L461-L469
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.prepare
|
def prepare(self):
"""
Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
# this task.
self.exception_raise()
if self.tm.message:
self.display(self.tm.message)
self.tm.message = None
# Let the targets take care of any necessary preparations.
# This includes verifying that all of the necessary sources
# and dependencies exist, removing the target file(s), etc.
#
# As of April 2008, the get_executor().prepare() method makes
# sure that all of the aggregate sources necessary to build this
# Task's target(s) exist in one up-front check. The individual
# target t.prepare() methods check that each target's explicit
# or implicit dependencies exists, and also initialize the
# .sconsign info.
executor = self.targets[0].get_executor()
if executor is None:
return
executor.prepare()
for t in executor.get_action_targets():
if print_prepare:
print("Preparing target %s..."%t)
for s in t.side_effects:
print("...with side-effect %s..."%s)
t.prepare()
for s in t.side_effects:
if print_prepare:
print("...Preparing side-effect %s..."%s)
s.prepare()
|
python
|
def prepare(self):
"""
Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
# this task.
self.exception_raise()
if self.tm.message:
self.display(self.tm.message)
self.tm.message = None
# Let the targets take care of any necessary preparations.
# This includes verifying that all of the necessary sources
# and dependencies exist, removing the target file(s), etc.
#
# As of April 2008, the get_executor().prepare() method makes
# sure that all of the aggregate sources necessary to build this
# Task's target(s) exist in one up-front check. The individual
# target t.prepare() methods check that each target's explicit
# or implicit dependencies exists, and also initialize the
# .sconsign info.
executor = self.targets[0].get_executor()
if executor is None:
return
executor.prepare()
for t in executor.get_action_targets():
if print_prepare:
print("Preparing target %s..."%t)
for s in t.side_effects:
print("...with side-effect %s..."%s)
t.prepare()
for s in t.side_effects:
if print_prepare:
print("...Preparing side-effect %s..."%s)
s.prepare()
|
[
"def",
"prepare",
"(",
"self",
")",
":",
"global",
"print_prepare",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.prepare()'",
",",
"self",
".",
"node",
")",
")",
"# Now that it's the appropriate time, give the TaskMaster a",
"# chance to raise any exceptions it encountered while preparing",
"# this task.",
"self",
".",
"exception_raise",
"(",
")",
"if",
"self",
".",
"tm",
".",
"message",
":",
"self",
".",
"display",
"(",
"self",
".",
"tm",
".",
"message",
")",
"self",
".",
"tm",
".",
"message",
"=",
"None",
"# Let the targets take care of any necessary preparations.",
"# This includes verifying that all of the necessary sources",
"# and dependencies exist, removing the target file(s), etc.",
"#",
"# As of April 2008, the get_executor().prepare() method makes",
"# sure that all of the aggregate sources necessary to build this",
"# Task's target(s) exist in one up-front check. The individual",
"# target t.prepare() methods check that each target's explicit",
"# or implicit dependencies exists, and also initialize the",
"# .sconsign info.",
"executor",
"=",
"self",
".",
"targets",
"[",
"0",
"]",
".",
"get_executor",
"(",
")",
"if",
"executor",
"is",
"None",
":",
"return",
"executor",
".",
"prepare",
"(",
")",
"for",
"t",
"in",
"executor",
".",
"get_action_targets",
"(",
")",
":",
"if",
"print_prepare",
":",
"print",
"(",
"\"Preparing target %s...\"",
"%",
"t",
")",
"for",
"s",
"in",
"t",
".",
"side_effects",
":",
"print",
"(",
"\"...with side-effect %s...\"",
"%",
"s",
")",
"t",
".",
"prepare",
"(",
")",
"for",
"s",
"in",
"t",
".",
"side_effects",
":",
"if",
"print_prepare",
":",
"print",
"(",
"\"...Preparing side-effect %s...\"",
"%",
"s",
")",
"s",
".",
"prepare",
"(",
")"
] |
Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets.
|
[
"Called",
"just",
"before",
"the",
"task",
"is",
"executed",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L163-L207
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.execute
|
def execute(self):
"""
Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed().
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
try:
cached_targets = []
for t in self.targets:
if not t.retrieve_from_cache():
break
cached_targets.append(t)
if len(cached_targets) < len(self.targets):
# Remove targets before building. It's possible that we
# partially retrieved targets from the cache, leaving
# them in read-only mode. That might cause the command
# to fail.
#
for t in cached_targets:
try:
t.fs.unlink(t.get_internal_path())
except (IOError, OSError):
pass
self.targets[0].build()
else:
for t in cached_targets:
t.cached = 1
except SystemExit:
exc_value = sys.exc_info()[1]
raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
except SCons.Errors.UserError:
raise
except SCons.Errors.BuildError:
raise
except Exception as e:
buildError = SCons.Errors.convert_to_BuildError(e)
buildError.node = self.targets[0]
buildError.exc_info = sys.exc_info()
raise buildError
|
python
|
def execute(self):
"""
Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed().
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
try:
cached_targets = []
for t in self.targets:
if not t.retrieve_from_cache():
break
cached_targets.append(t)
if len(cached_targets) < len(self.targets):
# Remove targets before building. It's possible that we
# partially retrieved targets from the cache, leaving
# them in read-only mode. That might cause the command
# to fail.
#
for t in cached_targets:
try:
t.fs.unlink(t.get_internal_path())
except (IOError, OSError):
pass
self.targets[0].build()
else:
for t in cached_targets:
t.cached = 1
except SystemExit:
exc_value = sys.exc_info()[1]
raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
except SCons.Errors.UserError:
raise
except SCons.Errors.BuildError:
raise
except Exception as e:
buildError = SCons.Errors.convert_to_BuildError(e)
buildError.node = self.targets[0]
buildError.exc_info = sys.exc_info()
raise buildError
|
[
"def",
"execute",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.execute()'",
",",
"self",
".",
"node",
")",
")",
"try",
":",
"cached_targets",
"=",
"[",
"]",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"if",
"not",
"t",
".",
"retrieve_from_cache",
"(",
")",
":",
"break",
"cached_targets",
".",
"append",
"(",
"t",
")",
"if",
"len",
"(",
"cached_targets",
")",
"<",
"len",
"(",
"self",
".",
"targets",
")",
":",
"# Remove targets before building. It's possible that we",
"# partially retrieved targets from the cache, leaving",
"# them in read-only mode. That might cause the command",
"# to fail.",
"#",
"for",
"t",
"in",
"cached_targets",
":",
"try",
":",
"t",
".",
"fs",
".",
"unlink",
"(",
"t",
".",
"get_internal_path",
"(",
")",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
":",
"pass",
"self",
".",
"targets",
"[",
"0",
"]",
".",
"build",
"(",
")",
"else",
":",
"for",
"t",
"in",
"cached_targets",
":",
"t",
".",
"cached",
"=",
"1",
"except",
"SystemExit",
":",
"exc_value",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"raise",
"SCons",
".",
"Errors",
".",
"ExplicitExit",
"(",
"self",
".",
"targets",
"[",
"0",
"]",
",",
"exc_value",
".",
"code",
")",
"except",
"SCons",
".",
"Errors",
".",
"UserError",
":",
"raise",
"except",
"SCons",
".",
"Errors",
".",
"BuildError",
":",
"raise",
"except",
"Exception",
"as",
"e",
":",
"buildError",
"=",
"SCons",
".",
"Errors",
".",
"convert_to_BuildError",
"(",
"e",
")",
"buildError",
".",
"node",
"=",
"self",
".",
"targets",
"[",
"0",
"]",
"buildError",
".",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"raise",
"buildError"
] |
Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed().
|
[
"Called",
"to",
"execute",
"the",
"task",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L226-L269
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.executed_without_callbacks
|
def executed_without_callbacks(self):
"""
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
|
python
|
def executed_without_callbacks(self):
"""
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
|
[
"def",
"executed_without_callbacks",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.executed_without_callbacks()'",
",",
"self",
".",
"node",
")",
")",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"if",
"t",
".",
"get_state",
"(",
")",
"==",
"NODE_EXECUTING",
":",
"for",
"side_effect",
"in",
"t",
".",
"side_effects",
":",
"side_effect",
".",
"set_state",
"(",
"NODE_NO_STATE",
")",
"t",
".",
"set_state",
"(",
"NODE_EXECUTED",
")"
] |
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
|
[
"Called",
"when",
"the",
"task",
"has",
"been",
"successfully",
"executed",
"and",
"the",
"Taskmaster",
"instance",
"doesn",
"t",
"want",
"to",
"call",
"the",
"Node",
"s",
"callback",
"methods",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L271-L285
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.executed_with_callbacks
|
def executed_with_callbacks(self):
"""
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
else:
t.visited()
|
python
|
def executed_with_callbacks(self):
"""
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
else:
t.visited()
|
[
"def",
"executed_with_callbacks",
"(",
"self",
")",
":",
"global",
"print_prepare",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.executed_with_callbacks()'",
",",
"self",
".",
"node",
")",
")",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"if",
"t",
".",
"get_state",
"(",
")",
"==",
"NODE_EXECUTING",
":",
"for",
"side_effect",
"in",
"t",
".",
"side_effects",
":",
"side_effect",
".",
"set_state",
"(",
"NODE_NO_STATE",
")",
"t",
".",
"set_state",
"(",
"NODE_EXECUTED",
")",
"if",
"not",
"t",
".",
"cached",
":",
"t",
".",
"push_to_cache",
"(",
")",
"t",
".",
"built",
"(",
")",
"t",
".",
"visited",
"(",
")",
"if",
"(",
"not",
"print_prepare",
"and",
"(",
"not",
"hasattr",
"(",
"self",
",",
"'options'",
")",
"or",
"not",
"self",
".",
"options",
".",
"debug_includes",
")",
")",
":",
"t",
".",
"release_target_info",
"(",
")",
"else",
":",
"t",
".",
"visited",
"(",
")"
] |
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node.
|
[
"Called",
"when",
"the",
"task",
"has",
"been",
"successfully",
"executed",
"and",
"the",
"Taskmaster",
"instance",
"wants",
"to",
"call",
"the",
"Node",
"s",
"callback",
"methods",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L287-L318
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.fail_stop
|
def fail_stop(self):
"""
Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
# Invoke will_not_build() to clean-up the pending children
# list.
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
# Tell the taskmaster to not start any new tasks
self.tm.stop()
# We're stopping because of a build failure, but give the
# calling Task class a chance to postprocess() the top-level
# target under which the build failure occurred.
self.targets = [self.tm.current_top]
self.top = 1
|
python
|
def fail_stop(self):
"""
Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
# Invoke will_not_build() to clean-up the pending children
# list.
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
# Tell the taskmaster to not start any new tasks
self.tm.stop()
# We're stopping because of a build failure, but give the
# calling Task class a chance to postprocess() the top-level
# target under which the build failure occurred.
self.targets = [self.tm.current_top]
self.top = 1
|
[
"def",
"fail_stop",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.failed_stop()'",
",",
"self",
".",
"node",
")",
")",
"# Invoke will_not_build() to clean-up the pending children",
"# list.",
"self",
".",
"tm",
".",
"will_not_build",
"(",
"self",
".",
"targets",
",",
"lambda",
"n",
":",
"n",
".",
"set_state",
"(",
"NODE_FAILED",
")",
")",
"# Tell the taskmaster to not start any new tasks",
"self",
".",
"tm",
".",
"stop",
"(",
")",
"# We're stopping because of a build failure, but give the",
"# calling Task class a chance to postprocess() the top-level",
"# target under which the build failure occurred.",
"self",
".",
"targets",
"=",
"[",
"self",
".",
"tm",
".",
"current_top",
"]",
"self",
".",
"top",
"=",
"1"
] |
Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
|
[
"Explicit",
"stop",
"-",
"the",
"-",
"build",
"failure",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L332-L357
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.fail_continue
|
def fail_continue(self):
"""
Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
|
python
|
def fail_continue(self):
"""
Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
|
[
"def",
"fail_continue",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.failed_continue()'",
",",
"self",
".",
"node",
")",
")",
"self",
".",
"tm",
".",
"will_not_build",
"(",
"self",
".",
"targets",
",",
"lambda",
"n",
":",
"n",
".",
"set_state",
"(",
"NODE_FAILED",
")",
")"
] |
Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
|
[
"Explicit",
"continue",
"-",
"the",
"-",
"build",
"failure",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L359-L373
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.make_ready_all
|
def make_ready_all(self):
"""
Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
self.out_of_date = self.targets[:]
for t in self.targets:
t.disambiguate().set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets above
s.disambiguate().set_state(NODE_EXECUTING)
|
python
|
def make_ready_all(self):
"""
Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
self.out_of_date = self.targets[:]
for t in self.targets:
t.disambiguate().set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets above
s.disambiguate().set_state(NODE_EXECUTING)
|
[
"def",
"make_ready_all",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.make_ready_all()'",
",",
"self",
".",
"node",
")",
")",
"self",
".",
"out_of_date",
"=",
"self",
".",
"targets",
"[",
":",
"]",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"t",
".",
"disambiguate",
"(",
")",
".",
"set_state",
"(",
"NODE_EXECUTING",
")",
"for",
"s",
"in",
"t",
".",
"side_effects",
":",
"# add disambiguate here to mirror the call on targets above",
"s",
".",
"disambiguate",
"(",
")",
".",
"set_state",
"(",
"NODE_EXECUTING",
")"
] |
Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option.
|
[
"Marks",
"all",
"targets",
"in",
"a",
"task",
"ready",
"for",
"execution",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L375-L390
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.make_ready_current
|
def make_ready_current(self):
"""
Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
self.out_of_date = []
needs_executing = False
for t in self.targets:
try:
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
self.out_of_date.append(t)
needs_executing = True
if needs_executing:
for t in self.targets:
t.set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets in first loop above
s.disambiguate().set_state(NODE_EXECUTING)
else:
for t in self.targets:
# We must invoke visited() to ensure that the node
# information has been computed before allowing the
# parent nodes to execute. (That could occur in a
# parallel build...)
t.visited()
t.set_state(NODE_UP_TO_DATE)
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
|
python
|
def make_ready_current(self):
"""
Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
self.out_of_date = []
needs_executing = False
for t in self.targets:
try:
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
self.out_of_date.append(t)
needs_executing = True
if needs_executing:
for t in self.targets:
t.set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets in first loop above
s.disambiguate().set_state(NODE_EXECUTING)
else:
for t in self.targets:
# We must invoke visited() to ensure that the node
# information has been computed before allowing the
# parent nodes to execute. (That could occur in a
# parallel build...)
t.visited()
t.set_state(NODE_UP_TO_DATE)
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
|
[
"def",
"make_ready_current",
"(",
"self",
")",
":",
"global",
"print_prepare",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.make_ready_current()'",
",",
"self",
".",
"node",
")",
")",
"self",
".",
"out_of_date",
"=",
"[",
"]",
"needs_executing",
"=",
"False",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"try",
":",
"t",
".",
"disambiguate",
"(",
")",
".",
"make_ready",
"(",
")",
"is_up_to_date",
"=",
"not",
"t",
".",
"has_builder",
"(",
")",
"or",
"(",
"not",
"t",
".",
"always_build",
"and",
"t",
".",
"is_up_to_date",
"(",
")",
")",
"except",
"EnvironmentError",
"as",
"e",
":",
"raise",
"SCons",
".",
"Errors",
".",
"BuildError",
"(",
"node",
"=",
"t",
",",
"errstr",
"=",
"e",
".",
"strerror",
",",
"filename",
"=",
"e",
".",
"filename",
")",
"if",
"not",
"is_up_to_date",
":",
"self",
".",
"out_of_date",
".",
"append",
"(",
"t",
")",
"needs_executing",
"=",
"True",
"if",
"needs_executing",
":",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"t",
".",
"set_state",
"(",
"NODE_EXECUTING",
")",
"for",
"s",
"in",
"t",
".",
"side_effects",
":",
"# add disambiguate here to mirror the call on targets in first loop above",
"s",
".",
"disambiguate",
"(",
")",
".",
"set_state",
"(",
"NODE_EXECUTING",
")",
"else",
":",
"for",
"t",
"in",
"self",
".",
"targets",
":",
"# We must invoke visited() to ensure that the node",
"# information has been computed before allowing the",
"# parent nodes to execute. (That could occur in a",
"# parallel build...)",
"t",
".",
"visited",
"(",
")",
"t",
".",
"set_state",
"(",
"NODE_UP_TO_DATE",
")",
"if",
"(",
"not",
"print_prepare",
"and",
"(",
"not",
"hasattr",
"(",
"self",
",",
"'options'",
")",
"or",
"not",
"self",
".",
"options",
".",
"debug_includes",
")",
")",
":",
"t",
".",
"release_target_info",
"(",
")"
] |
Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary.
|
[
"Marks",
"all",
"targets",
"in",
"a",
"task",
"ready",
"for",
"execution",
"if",
"any",
"target",
"is",
"not",
"current",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L392-L434
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.postprocess
|
def postprocess(self):
"""
Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
# targets each parent was waiting for so we can subtract the
# values later, and so we *don't* put waiting side-effect Nodes
# back on the candidates list if the Node is also a waiting
# parent.
targets = set(self.targets)
pending_children = self.tm.pending_children
parents = {}
for t in targets:
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for t in targets:
if t.side_effects is not None:
for s in t.side_effects:
if s.get_state() == NODE_EXECUTING:
s.set_state(NODE_NO_STATE)
for p in s.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for p in s.waiting_s_e:
if p.ref_count == 0:
self.tm.candidates.append(p)
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if p.ref_count == 0:
self.tm.candidates.append(p)
for t in targets:
t.postprocess()
|
python
|
def postprocess(self):
"""
Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
# targets each parent was waiting for so we can subtract the
# values later, and so we *don't* put waiting side-effect Nodes
# back on the candidates list if the Node is also a waiting
# parent.
targets = set(self.targets)
pending_children = self.tm.pending_children
parents = {}
for t in targets:
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for t in targets:
if t.side_effects is not None:
for s in t.side_effects:
if s.get_state() == NODE_EXECUTING:
s.set_state(NODE_NO_STATE)
for p in s.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for p in s.waiting_s_e:
if p.ref_count == 0:
self.tm.candidates.append(p)
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if p.ref_count == 0:
self.tm.candidates.append(p)
for t in targets:
t.postprocess()
|
[
"def",
"postprocess",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.postprocess()'",
",",
"self",
".",
"node",
")",
")",
"# We may have built multiple targets, some of which may have",
"# common parents waiting for this build. Count up how many",
"# targets each parent was waiting for so we can subtract the",
"# values later, and so we *don't* put waiting side-effect Nodes",
"# back on the candidates list if the Node is also a waiting",
"# parent.",
"targets",
"=",
"set",
"(",
"self",
".",
"targets",
")",
"pending_children",
"=",
"self",
".",
"tm",
".",
"pending_children",
"parents",
"=",
"{",
"}",
"for",
"t",
"in",
"targets",
":",
"# A node can only be in the pending_children set if it has",
"# some waiting_parents.",
"if",
"t",
".",
"waiting_parents",
":",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.postprocess()'",
",",
"t",
",",
"'removing'",
")",
")",
"pending_children",
".",
"discard",
"(",
"t",
")",
"for",
"p",
"in",
"t",
".",
"waiting_parents",
":",
"parents",
"[",
"p",
"]",
"=",
"parents",
".",
"get",
"(",
"p",
",",
"0",
")",
"+",
"1",
"for",
"t",
"in",
"targets",
":",
"if",
"t",
".",
"side_effects",
"is",
"not",
"None",
":",
"for",
"s",
"in",
"t",
".",
"side_effects",
":",
"if",
"s",
".",
"get_state",
"(",
")",
"==",
"NODE_EXECUTING",
":",
"s",
".",
"set_state",
"(",
"NODE_NO_STATE",
")",
"for",
"p",
"in",
"s",
".",
"waiting_parents",
":",
"parents",
"[",
"p",
"]",
"=",
"parents",
".",
"get",
"(",
"p",
",",
"0",
")",
"+",
"1",
"for",
"p",
"in",
"s",
".",
"waiting_s_e",
":",
"if",
"p",
".",
"ref_count",
"==",
"0",
":",
"self",
".",
"tm",
".",
"candidates",
".",
"append",
"(",
"p",
")",
"for",
"p",
",",
"subtract",
"in",
"parents",
".",
"items",
"(",
")",
":",
"p",
".",
"ref_count",
"=",
"p",
".",
"ref_count",
"-",
"subtract",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.postprocess()'",
",",
"p",
",",
"'adjusted parent ref count'",
")",
")",
"if",
"p",
".",
"ref_count",
"==",
"0",
":",
"self",
".",
"tm",
".",
"candidates",
".",
"append",
"(",
"p",
")",
"for",
"t",
"in",
"targets",
":",
"t",
".",
"postprocess",
"(",
")"
] |
Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list.
|
[
"Post",
"-",
"processes",
"a",
"task",
"after",
"it",
"s",
"been",
"executed",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L438-L493
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
|
Task.exception_set
|
def exception_set(self, exception=None):
"""
Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact
"""
if not exception:
exception = sys.exc_info()
self.exception = exception
self.exception_raise = self._exception_raise
|
python
|
def exception_set(self, exception=None):
"""
Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact
"""
if not exception:
exception = sys.exc_info()
self.exception = exception
self.exception_raise = self._exception_raise
|
[
"def",
"exception_set",
"(",
"self",
",",
"exception",
"=",
"None",
")",
":",
"if",
"not",
"exception",
":",
"exception",
"=",
"sys",
".",
"exc_info",
"(",
")",
"self",
".",
"exception",
"=",
"exception",
"self",
".",
"exception_raise",
"=",
"self",
".",
"_exception_raise"
] |
Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact
|
[
"Records",
"an",
"exception",
"to",
"be",
"raised",
"at",
"the",
"appropriate",
"time",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L520-L530
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.