repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
amelchio/pysonos
|
pysonos/discovery.py
|
discover_thread
|
def discover_thread(callback,
timeout=5,
include_invisible=False,
interface_addr=None):
""" Return a started thread with a discovery callback. """
thread = StoppableThread(
target=_discover_thread,
args=(callback, timeout, include_invisible, interface_addr))
thread.start()
return thread
|
python
|
def discover_thread(callback,
timeout=5,
include_invisible=False,
interface_addr=None):
""" Return a started thread with a discovery callback. """
thread = StoppableThread(
target=_discover_thread,
args=(callback, timeout, include_invisible, interface_addr))
thread.start()
return thread
|
[
"def",
"discover_thread",
"(",
"callback",
",",
"timeout",
"=",
"5",
",",
"include_invisible",
"=",
"False",
",",
"interface_addr",
"=",
"None",
")",
":",
"thread",
"=",
"StoppableThread",
"(",
"target",
"=",
"_discover_thread",
",",
"args",
"=",
"(",
"callback",
",",
"timeout",
",",
"include_invisible",
",",
"interface_addr",
")",
")",
"thread",
".",
"start",
"(",
")",
"return",
"thread"
] |
Return a started thread with a discovery callback.
|
[
"Return",
"a",
"started",
"thread",
"with",
"a",
"discovery",
"callback",
"."
] |
23527c445a00e198fbb94d44b92f7f99d139e325
|
https://github.com/amelchio/pysonos/blob/23527c445a00e198fbb94d44b92f7f99d139e325/pysonos/discovery.py#L162-L171
|
train
|
amelchio/pysonos
|
pysonos/discovery.py
|
by_name
|
def by_name(name):
"""Return a device by name.
Args:
name (str): The name of the device to return.
Returns:
:class:`~.SoCo`: The first device encountered among all zone with the
given player name. If none are found `None` is returned.
"""
devices = discover(all_households=True)
for device in (devices or []):
if device.player_name == name:
return device
return None
|
python
|
def by_name(name):
"""Return a device by name.
Args:
name (str): The name of the device to return.
Returns:
:class:`~.SoCo`: The first device encountered among all zone with the
given player name. If none are found `None` is returned.
"""
devices = discover(all_households=True)
for device in (devices or []):
if device.player_name == name:
return device
return None
|
[
"def",
"by_name",
"(",
"name",
")",
":",
"devices",
"=",
"discover",
"(",
"all_households",
"=",
"True",
")",
"for",
"device",
"in",
"(",
"devices",
"or",
"[",
"]",
")",
":",
"if",
"device",
".",
"player_name",
"==",
"name",
":",
"return",
"device",
"return",
"None"
] |
Return a device by name.
Args:
name (str): The name of the device to return.
Returns:
:class:`~.SoCo`: The first device encountered among all zone with the
given player name. If none are found `None` is returned.
|
[
"Return",
"a",
"device",
"by",
"name",
"."
] |
23527c445a00e198fbb94d44b92f7f99d139e325
|
https://github.com/amelchio/pysonos/blob/23527c445a00e198fbb94d44b92f7f99d139e325/pysonos/discovery.py#L262-L276
|
train
|
vsoch/pokemon
|
pokemon/master.py
|
get_trainer
|
def get_trainer(name):
'''return the unique id for a trainer, determined by the md5 sum
'''
name = name.lower()
return int(hashlib.md5(name.encode('utf-8')).hexdigest(), 16) % 10**8
|
python
|
def get_trainer(name):
'''return the unique id for a trainer, determined by the md5 sum
'''
name = name.lower()
return int(hashlib.md5(name.encode('utf-8')).hexdigest(), 16) % 10**8
|
[
"def",
"get_trainer",
"(",
"name",
")",
":",
"name",
"=",
"name",
".",
"lower",
"(",
")",
"return",
"int",
"(",
"hashlib",
".",
"md5",
"(",
"name",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
",",
"16",
")",
"%",
"10",
"**",
"8"
] |
return the unique id for a trainer, determined by the md5 sum
|
[
"return",
"the",
"unique",
"id",
"for",
"a",
"trainer",
"determined",
"by",
"the",
"md5",
"sum"
] |
c9cd8c5d64897617867d38d45183476ea64a0620
|
https://github.com/vsoch/pokemon/blob/c9cd8c5d64897617867d38d45183476ea64a0620/pokemon/master.py#L64-L68
|
train
|
vsoch/pokemon
|
pokemon/convert.py
|
scale_image
|
def scale_image(image, new_width):
"""Resizes an image preserving the aspect ratio.
"""
(original_width, original_height) = image.size
aspect_ratio = original_height/float(original_width)
new_height = int(aspect_ratio * new_width)
# This scales it wider than tall, since characters are biased
new_image = image.resize((new_width*2, new_height))
return new_image
|
python
|
def scale_image(image, new_width):
"""Resizes an image preserving the aspect ratio.
"""
(original_width, original_height) = image.size
aspect_ratio = original_height/float(original_width)
new_height = int(aspect_ratio * new_width)
# This scales it wider than tall, since characters are biased
new_image = image.resize((new_width*2, new_height))
return new_image
|
[
"def",
"scale_image",
"(",
"image",
",",
"new_width",
")",
":",
"(",
"original_width",
",",
"original_height",
")",
"=",
"image",
".",
"size",
"aspect_ratio",
"=",
"original_height",
"/",
"float",
"(",
"original_width",
")",
"new_height",
"=",
"int",
"(",
"aspect_ratio",
"*",
"new_width",
")",
"# This scales it wider than tall, since characters are biased",
"new_image",
"=",
"image",
".",
"resize",
"(",
"(",
"new_width",
"*",
"2",
",",
"new_height",
")",
")",
"return",
"new_image"
] |
Resizes an image preserving the aspect ratio.
|
[
"Resizes",
"an",
"image",
"preserving",
"the",
"aspect",
"ratio",
"."
] |
c9cd8c5d64897617867d38d45183476ea64a0620
|
https://github.com/vsoch/pokemon/blob/c9cd8c5d64897617867d38d45183476ea64a0620/pokemon/convert.py#L34-L43
|
train
|
vsoch/pokemon
|
pokemon/convert.py
|
map_pixels_to_ascii_chars
|
def map_pixels_to_ascii_chars(image, range_width=25):
"""Maps each pixel to an ascii char based on the range
in which it lies.
0-255 is divided into 11 ranges of 25 pixels each.
"""
pixels_in_image = list(image.getdata())
pixels_to_chars = [ASCII_CHARS[pixel_value/range_width] for pixel_value in
pixels_in_image]
return "".join(pixels_to_chars)
|
python
|
def map_pixels_to_ascii_chars(image, range_width=25):
"""Maps each pixel to an ascii char based on the range
in which it lies.
0-255 is divided into 11 ranges of 25 pixels each.
"""
pixels_in_image = list(image.getdata())
pixels_to_chars = [ASCII_CHARS[pixel_value/range_width] for pixel_value in
pixels_in_image]
return "".join(pixels_to_chars)
|
[
"def",
"map_pixels_to_ascii_chars",
"(",
"image",
",",
"range_width",
"=",
"25",
")",
":",
"pixels_in_image",
"=",
"list",
"(",
"image",
".",
"getdata",
"(",
")",
")",
"pixels_to_chars",
"=",
"[",
"ASCII_CHARS",
"[",
"pixel_value",
"/",
"range_width",
"]",
"for",
"pixel_value",
"in",
"pixels_in_image",
"]",
"return",
"\"\"",
".",
"join",
"(",
"pixels_to_chars",
")"
] |
Maps each pixel to an ascii char based on the range
in which it lies.
0-255 is divided into 11 ranges of 25 pixels each.
|
[
"Maps",
"each",
"pixel",
"to",
"an",
"ascii",
"char",
"based",
"on",
"the",
"range",
"in",
"which",
"it",
"lies",
"."
] |
c9cd8c5d64897617867d38d45183476ea64a0620
|
https://github.com/vsoch/pokemon/blob/c9cd8c5d64897617867d38d45183476ea64a0620/pokemon/convert.py#L48-L59
|
train
|
NLeSC/scriptcwl
|
scriptcwl/library.py
|
load_steps
|
def load_steps(working_dir=None, steps_dir=None, step_file=None,
step_list=None):
"""Return a dictionary containing Steps read from file.
Args:
steps_dir (str, optional): path to directory containing CWL files.
step_file (str, optional): path or http(s) url to a single CWL file.
step_list (list, optional): a list of directories, urls or local file
paths to CWL files or directories containing CWL files.
Return:
dict containing (name, Step) entries.
"""
if steps_dir is not None:
step_files = glob.glob(os.path.join(steps_dir, '*.cwl'))
elif step_file is not None:
step_files = [step_file]
elif step_list is not None:
step_files = []
for path in step_list:
if os.path.isdir(path):
step_files += glob.glob(os.path.join(path, '*.cwl'))
else:
step_files.append(path)
else:
step_files = []
if working_dir is not None:
step_files = sort_loading_order(step_files)
steps = {}
for f in step_files:
if working_dir is not None:
# Copy file to working_dir
if not working_dir == os.path.dirname(f) and not is_url(f):
copied_file = os.path.join(working_dir, os.path.basename(f))
shutil.copy2(f, copied_file)
f = copied_file
# Create steps
try:
s = Step(f)
steps[s.name] = s
except (NotImplementedError, ValidationException,
PackedWorkflowException) as e:
logger.warning(e)
return steps
|
python
|
def load_steps(working_dir=None, steps_dir=None, step_file=None,
step_list=None):
"""Return a dictionary containing Steps read from file.
Args:
steps_dir (str, optional): path to directory containing CWL files.
step_file (str, optional): path or http(s) url to a single CWL file.
step_list (list, optional): a list of directories, urls or local file
paths to CWL files or directories containing CWL files.
Return:
dict containing (name, Step) entries.
"""
if steps_dir is not None:
step_files = glob.glob(os.path.join(steps_dir, '*.cwl'))
elif step_file is not None:
step_files = [step_file]
elif step_list is not None:
step_files = []
for path in step_list:
if os.path.isdir(path):
step_files += glob.glob(os.path.join(path, '*.cwl'))
else:
step_files.append(path)
else:
step_files = []
if working_dir is not None:
step_files = sort_loading_order(step_files)
steps = {}
for f in step_files:
if working_dir is not None:
# Copy file to working_dir
if not working_dir == os.path.dirname(f) and not is_url(f):
copied_file = os.path.join(working_dir, os.path.basename(f))
shutil.copy2(f, copied_file)
f = copied_file
# Create steps
try:
s = Step(f)
steps[s.name] = s
except (NotImplementedError, ValidationException,
PackedWorkflowException) as e:
logger.warning(e)
return steps
|
[
"def",
"load_steps",
"(",
"working_dir",
"=",
"None",
",",
"steps_dir",
"=",
"None",
",",
"step_file",
"=",
"None",
",",
"step_list",
"=",
"None",
")",
":",
"if",
"steps_dir",
"is",
"not",
"None",
":",
"step_files",
"=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"steps_dir",
",",
"'*.cwl'",
")",
")",
"elif",
"step_file",
"is",
"not",
"None",
":",
"step_files",
"=",
"[",
"step_file",
"]",
"elif",
"step_list",
"is",
"not",
"None",
":",
"step_files",
"=",
"[",
"]",
"for",
"path",
"in",
"step_list",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"step_files",
"+=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'*.cwl'",
")",
")",
"else",
":",
"step_files",
".",
"append",
"(",
"path",
")",
"else",
":",
"step_files",
"=",
"[",
"]",
"if",
"working_dir",
"is",
"not",
"None",
":",
"step_files",
"=",
"sort_loading_order",
"(",
"step_files",
")",
"steps",
"=",
"{",
"}",
"for",
"f",
"in",
"step_files",
":",
"if",
"working_dir",
"is",
"not",
"None",
":",
"# Copy file to working_dir",
"if",
"not",
"working_dir",
"==",
"os",
".",
"path",
".",
"dirname",
"(",
"f",
")",
"and",
"not",
"is_url",
"(",
"f",
")",
":",
"copied_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"working_dir",
",",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
")",
"shutil",
".",
"copy2",
"(",
"f",
",",
"copied_file",
")",
"f",
"=",
"copied_file",
"# Create steps",
"try",
":",
"s",
"=",
"Step",
"(",
"f",
")",
"steps",
"[",
"s",
".",
"name",
"]",
"=",
"s",
"except",
"(",
"NotImplementedError",
",",
"ValidationException",
",",
"PackedWorkflowException",
")",
"as",
"e",
":",
"logger",
".",
"warning",
"(",
"e",
")",
"return",
"steps"
] |
Return a dictionary containing Steps read from file.
Args:
steps_dir (str, optional): path to directory containing CWL files.
step_file (str, optional): path or http(s) url to a single CWL file.
step_list (list, optional): a list of directories, urls or local file
paths to CWL files or directories containing CWL files.
Return:
dict containing (name, Step) entries.
|
[
"Return",
"a",
"dictionary",
"containing",
"Steps",
"read",
"from",
"file",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/library.py#L83-L131
|
train
|
NLeSC/scriptcwl
|
scriptcwl/library.py
|
load_yaml
|
def load_yaml(filename):
"""Return object in yaml file."""
with open(filename) as myfile:
content = myfile.read()
if "win" in sys.platform:
content = content.replace("\\", "/")
return yaml.safe_load(content)
|
python
|
def load_yaml(filename):
"""Return object in yaml file."""
with open(filename) as myfile:
content = myfile.read()
if "win" in sys.platform:
content = content.replace("\\", "/")
return yaml.safe_load(content)
|
[
"def",
"load_yaml",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"myfile",
":",
"content",
"=",
"myfile",
".",
"read",
"(",
")",
"if",
"\"win\"",
"in",
"sys",
".",
"platform",
":",
"content",
"=",
"content",
".",
"replace",
"(",
"\"\\\\\"",
",",
"\"/\"",
")",
"return",
"yaml",
".",
"safe_load",
"(",
"content",
")"
] |
Return object in yaml file.
|
[
"Return",
"object",
"in",
"yaml",
"file",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/library.py#L134-L140
|
train
|
NLeSC/scriptcwl
|
scriptcwl/library.py
|
sort_loading_order
|
def sort_loading_order(step_files):
"""Sort step files into correct loading order.
The correct loading order is first tools, then workflows without
subworkflows, and then workflows with subworkflows. This order is
required to avoid error messages when a working directory is used.
"""
tools = []
workflows = []
workflows_with_subworkflows = []
for f in step_files:
# assume that urls are tools
if f.startswith('http://') or f.startswith('https://'):
tools.append(f)
else:
obj = load_yaml(f)
if obj.get('class', '') == 'Workflow':
if 'requirements' in obj.keys():
subw = {'class': 'SubworkflowFeatureRequirement'}
if subw in obj['requirements']:
workflows_with_subworkflows.append(f)
else:
workflows.append(f)
else:
workflows.append(f)
else:
tools.append(f)
return tools + workflows + workflows_with_subworkflows
|
python
|
def sort_loading_order(step_files):
"""Sort step files into correct loading order.
The correct loading order is first tools, then workflows without
subworkflows, and then workflows with subworkflows. This order is
required to avoid error messages when a working directory is used.
"""
tools = []
workflows = []
workflows_with_subworkflows = []
for f in step_files:
# assume that urls are tools
if f.startswith('http://') or f.startswith('https://'):
tools.append(f)
else:
obj = load_yaml(f)
if obj.get('class', '') == 'Workflow':
if 'requirements' in obj.keys():
subw = {'class': 'SubworkflowFeatureRequirement'}
if subw in obj['requirements']:
workflows_with_subworkflows.append(f)
else:
workflows.append(f)
else:
workflows.append(f)
else:
tools.append(f)
return tools + workflows + workflows_with_subworkflows
|
[
"def",
"sort_loading_order",
"(",
"step_files",
")",
":",
"tools",
"=",
"[",
"]",
"workflows",
"=",
"[",
"]",
"workflows_with_subworkflows",
"=",
"[",
"]",
"for",
"f",
"in",
"step_files",
":",
"# assume that urls are tools",
"if",
"f",
".",
"startswith",
"(",
"'http://'",
")",
"or",
"f",
".",
"startswith",
"(",
"'https://'",
")",
":",
"tools",
".",
"append",
"(",
"f",
")",
"else",
":",
"obj",
"=",
"load_yaml",
"(",
"f",
")",
"if",
"obj",
".",
"get",
"(",
"'class'",
",",
"''",
")",
"==",
"'Workflow'",
":",
"if",
"'requirements'",
"in",
"obj",
".",
"keys",
"(",
")",
":",
"subw",
"=",
"{",
"'class'",
":",
"'SubworkflowFeatureRequirement'",
"}",
"if",
"subw",
"in",
"obj",
"[",
"'requirements'",
"]",
":",
"workflows_with_subworkflows",
".",
"append",
"(",
"f",
")",
"else",
":",
"workflows",
".",
"append",
"(",
"f",
")",
"else",
":",
"workflows",
".",
"append",
"(",
"f",
")",
"else",
":",
"tools",
".",
"append",
"(",
"f",
")",
"return",
"tools",
"+",
"workflows",
"+",
"workflows_with_subworkflows"
] |
Sort step files into correct loading order.
The correct loading order is first tools, then workflows without
subworkflows, and then workflows with subworkflows. This order is
required to avoid error messages when a working directory is used.
|
[
"Sort",
"step",
"files",
"into",
"correct",
"loading",
"order",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/library.py#L143-L171
|
train
|
NLeSC/scriptcwl
|
scriptcwl/scriptcwl.py
|
load_cwl
|
def load_cwl(fname):
"""Load and validate CWL file using cwltool
"""
logger.debug('Loading CWL file "{}"'.format(fname))
# Fetching, preprocessing and validating cwl
# Older versions of cwltool
if legacy_cwltool:
try:
(document_loader, workflowobj, uri) = fetch_document(fname)
(document_loader, _, processobj, metadata, uri) = \
validate_document(document_loader, workflowobj, uri)
except TypeError:
from cwltool.context import LoadingContext, getdefault
from cwltool import workflow
from cwltool.resolver import tool_resolver
from cwltool.load_tool import resolve_tool_uri
loadingContext = LoadingContext()
loadingContext.construct_tool_object = getdefault(
loadingContext.construct_tool_object,
workflow.default_make_tool)
loadingContext.resolver = getdefault(loadingContext.resolver,
tool_resolver)
uri, tool_file_uri = resolve_tool_uri(
fname, resolver=loadingContext.resolver,
fetcher_constructor=loadingContext.fetcher_constructor)
document_loader, workflowobj, uri = fetch_document(
uri, resolver=loadingContext.resolver,
fetcher_constructor=loadingContext.fetcher_constructor)
document_loader, avsc_names, processobj, metadata, uri = \
validate_document(
document_loader, workflowobj, uri,
loadingContext.overrides_list, {},
enable_dev=loadingContext.enable_dev,
strict=loadingContext.strict,
preprocess_only=False,
fetcher_constructor=loadingContext.fetcher_constructor,
skip_schemas=False,
do_validate=loadingContext.do_validate)
# Recent versions of cwltool
else:
(loading_context, workflowobj, uri) = fetch_document(fname)
loading_context, uri = resolve_and_validate_document(loading_context,
workflowobj, uri)
document_loader = loading_context.loader
processobj = workflowobj
metadata = loading_context.metadata
return document_loader, processobj, metadata, uri
|
python
|
def load_cwl(fname):
"""Load and validate CWL file using cwltool
"""
logger.debug('Loading CWL file "{}"'.format(fname))
# Fetching, preprocessing and validating cwl
# Older versions of cwltool
if legacy_cwltool:
try:
(document_loader, workflowobj, uri) = fetch_document(fname)
(document_loader, _, processobj, metadata, uri) = \
validate_document(document_loader, workflowobj, uri)
except TypeError:
from cwltool.context import LoadingContext, getdefault
from cwltool import workflow
from cwltool.resolver import tool_resolver
from cwltool.load_tool import resolve_tool_uri
loadingContext = LoadingContext()
loadingContext.construct_tool_object = getdefault(
loadingContext.construct_tool_object,
workflow.default_make_tool)
loadingContext.resolver = getdefault(loadingContext.resolver,
tool_resolver)
uri, tool_file_uri = resolve_tool_uri(
fname, resolver=loadingContext.resolver,
fetcher_constructor=loadingContext.fetcher_constructor)
document_loader, workflowobj, uri = fetch_document(
uri, resolver=loadingContext.resolver,
fetcher_constructor=loadingContext.fetcher_constructor)
document_loader, avsc_names, processobj, metadata, uri = \
validate_document(
document_loader, workflowobj, uri,
loadingContext.overrides_list, {},
enable_dev=loadingContext.enable_dev,
strict=loadingContext.strict,
preprocess_only=False,
fetcher_constructor=loadingContext.fetcher_constructor,
skip_schemas=False,
do_validate=loadingContext.do_validate)
# Recent versions of cwltool
else:
(loading_context, workflowobj, uri) = fetch_document(fname)
loading_context, uri = resolve_and_validate_document(loading_context,
workflowobj, uri)
document_loader = loading_context.loader
processobj = workflowobj
metadata = loading_context.metadata
return document_loader, processobj, metadata, uri
|
[
"def",
"load_cwl",
"(",
"fname",
")",
":",
"logger",
".",
"debug",
"(",
"'Loading CWL file \"{}\"'",
".",
"format",
"(",
"fname",
")",
")",
"# Fetching, preprocessing and validating cwl",
"# Older versions of cwltool",
"if",
"legacy_cwltool",
":",
"try",
":",
"(",
"document_loader",
",",
"workflowobj",
",",
"uri",
")",
"=",
"fetch_document",
"(",
"fname",
")",
"(",
"document_loader",
",",
"_",
",",
"processobj",
",",
"metadata",
",",
"uri",
")",
"=",
"validate_document",
"(",
"document_loader",
",",
"workflowobj",
",",
"uri",
")",
"except",
"TypeError",
":",
"from",
"cwltool",
".",
"context",
"import",
"LoadingContext",
",",
"getdefault",
"from",
"cwltool",
"import",
"workflow",
"from",
"cwltool",
".",
"resolver",
"import",
"tool_resolver",
"from",
"cwltool",
".",
"load_tool",
"import",
"resolve_tool_uri",
"loadingContext",
"=",
"LoadingContext",
"(",
")",
"loadingContext",
".",
"construct_tool_object",
"=",
"getdefault",
"(",
"loadingContext",
".",
"construct_tool_object",
",",
"workflow",
".",
"default_make_tool",
")",
"loadingContext",
".",
"resolver",
"=",
"getdefault",
"(",
"loadingContext",
".",
"resolver",
",",
"tool_resolver",
")",
"uri",
",",
"tool_file_uri",
"=",
"resolve_tool_uri",
"(",
"fname",
",",
"resolver",
"=",
"loadingContext",
".",
"resolver",
",",
"fetcher_constructor",
"=",
"loadingContext",
".",
"fetcher_constructor",
")",
"document_loader",
",",
"workflowobj",
",",
"uri",
"=",
"fetch_document",
"(",
"uri",
",",
"resolver",
"=",
"loadingContext",
".",
"resolver",
",",
"fetcher_constructor",
"=",
"loadingContext",
".",
"fetcher_constructor",
")",
"document_loader",
",",
"avsc_names",
",",
"processobj",
",",
"metadata",
",",
"uri",
"=",
"validate_document",
"(",
"document_loader",
",",
"workflowobj",
",",
"uri",
",",
"loadingContext",
".",
"overrides_list",
",",
"{",
"}",
",",
"enable_dev",
"=",
"loadingContext",
".",
"enable_dev",
",",
"strict",
"=",
"loadingContext",
".",
"strict",
",",
"preprocess_only",
"=",
"False",
",",
"fetcher_constructor",
"=",
"loadingContext",
".",
"fetcher_constructor",
",",
"skip_schemas",
"=",
"False",
",",
"do_validate",
"=",
"loadingContext",
".",
"do_validate",
")",
"# Recent versions of cwltool",
"else",
":",
"(",
"loading_context",
",",
"workflowobj",
",",
"uri",
")",
"=",
"fetch_document",
"(",
"fname",
")",
"loading_context",
",",
"uri",
"=",
"resolve_and_validate_document",
"(",
"loading_context",
",",
"workflowobj",
",",
"uri",
")",
"document_loader",
"=",
"loading_context",
".",
"loader",
"processobj",
"=",
"workflowobj",
"metadata",
"=",
"loading_context",
".",
"metadata",
"return",
"document_loader",
",",
"processobj",
",",
"metadata",
",",
"uri"
] |
Load and validate CWL file using cwltool
|
[
"Load",
"and",
"validate",
"CWL",
"file",
"using",
"cwltool"
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/scriptcwl.py#L42-L93
|
train
|
NLeSC/scriptcwl
|
scriptcwl/step.py
|
Step.set_input
|
def set_input(self, p_name, value):
"""Set a Step's input variable to a certain value.
The value comes either from a workflow input or output of a previous
step.
Args:
name (str): the name of the Step input
value (str): the name of the output variable that provides the
value for this input.
Raises:
ValueError: The name provided is not a valid input name for this
Step.
"""
name = self.python_names.get(p_name)
if p_name is None or name not in self.get_input_names():
raise ValueError('Invalid input "{}"'.format(p_name))
self.step_inputs[name] = value
|
python
|
def set_input(self, p_name, value):
"""Set a Step's input variable to a certain value.
The value comes either from a workflow input or output of a previous
step.
Args:
name (str): the name of the Step input
value (str): the name of the output variable that provides the
value for this input.
Raises:
ValueError: The name provided is not a valid input name for this
Step.
"""
name = self.python_names.get(p_name)
if p_name is None or name not in self.get_input_names():
raise ValueError('Invalid input "{}"'.format(p_name))
self.step_inputs[name] = value
|
[
"def",
"set_input",
"(",
"self",
",",
"p_name",
",",
"value",
")",
":",
"name",
"=",
"self",
".",
"python_names",
".",
"get",
"(",
"p_name",
")",
"if",
"p_name",
"is",
"None",
"or",
"name",
"not",
"in",
"self",
".",
"get_input_names",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid input \"{}\"'",
".",
"format",
"(",
"p_name",
")",
")",
"self",
".",
"step_inputs",
"[",
"name",
"]",
"=",
"value"
] |
Set a Step's input variable to a certain value.
The value comes either from a workflow input or output of a previous
step.
Args:
name (str): the name of the Step input
value (str): the name of the output variable that provides the
value for this input.
Raises:
ValueError: The name provided is not a valid input name for this
Step.
|
[
"Set",
"a",
"Step",
"s",
"input",
"variable",
"to",
"a",
"certain",
"value",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/step.py#L90-L108
|
train
|
NLeSC/scriptcwl
|
scriptcwl/step.py
|
Step.output_reference
|
def output_reference(self, name):
"""Return a reference to the given output for use in an input
of a next Step.
For a Step named `echo` that has an output called `echoed`, the
reference `echo/echoed` is returned.
Args:
name (str): the name of the Step output
Raises:
ValueError: The name provided is not a valid output name for this
Step.
"""
if name not in self.output_names:
raise ValueError('Invalid output "{}"'.format(name))
return Reference(step_name=self.name_in_workflow, output_name=name)
|
python
|
def output_reference(self, name):
"""Return a reference to the given output for use in an input
of a next Step.
For a Step named `echo` that has an output called `echoed`, the
reference `echo/echoed` is returned.
Args:
name (str): the name of the Step output
Raises:
ValueError: The name provided is not a valid output name for this
Step.
"""
if name not in self.output_names:
raise ValueError('Invalid output "{}"'.format(name))
return Reference(step_name=self.name_in_workflow, output_name=name)
|
[
"def",
"output_reference",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"output_names",
":",
"raise",
"ValueError",
"(",
"'Invalid output \"{}\"'",
".",
"format",
"(",
"name",
")",
")",
"return",
"Reference",
"(",
"step_name",
"=",
"self",
".",
"name_in_workflow",
",",
"output_name",
"=",
"name",
")"
] |
Return a reference to the given output for use in an input
of a next Step.
For a Step named `echo` that has an output called `echoed`, the
reference `echo/echoed` is returned.
Args:
name (str): the name of the Step output
Raises:
ValueError: The name provided is not a valid output name for this
Step.
|
[
"Return",
"a",
"reference",
"to",
"the",
"given",
"output",
"for",
"use",
"in",
"an",
"input",
"of",
"a",
"next",
"Step",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/step.py#L113-L128
|
train
|
NLeSC/scriptcwl
|
scriptcwl/step.py
|
Step._input_optional
|
def _input_optional(inp):
"""Returns True if a step input parameter is optional.
Args:
inp (dict): a dictionary representation of an input.
Raises:
ValueError: The inp provided is not valid.
"""
if 'default' in inp.keys():
return True
typ = inp.get('type')
if isinstance(typ, six.string_types):
return typ.endswith('?')
elif isinstance(typ, dict):
# TODO: handle case where iput type is dict
return False
elif isinstance(typ, list):
# The cwltool validation expands optional arguments to
# [u'null', <type>]
return bool(u'null' in typ)
else:
raise ValueError('Invalid input "{}"'.format(inp.get['id']))
|
python
|
def _input_optional(inp):
"""Returns True if a step input parameter is optional.
Args:
inp (dict): a dictionary representation of an input.
Raises:
ValueError: The inp provided is not valid.
"""
if 'default' in inp.keys():
return True
typ = inp.get('type')
if isinstance(typ, six.string_types):
return typ.endswith('?')
elif isinstance(typ, dict):
# TODO: handle case where iput type is dict
return False
elif isinstance(typ, list):
# The cwltool validation expands optional arguments to
# [u'null', <type>]
return bool(u'null' in typ)
else:
raise ValueError('Invalid input "{}"'.format(inp.get['id']))
|
[
"def",
"_input_optional",
"(",
"inp",
")",
":",
"if",
"'default'",
"in",
"inp",
".",
"keys",
"(",
")",
":",
"return",
"True",
"typ",
"=",
"inp",
".",
"get",
"(",
"'type'",
")",
"if",
"isinstance",
"(",
"typ",
",",
"six",
".",
"string_types",
")",
":",
"return",
"typ",
".",
"endswith",
"(",
"'?'",
")",
"elif",
"isinstance",
"(",
"typ",
",",
"dict",
")",
":",
"# TODO: handle case where iput type is dict",
"return",
"False",
"elif",
"isinstance",
"(",
"typ",
",",
"list",
")",
":",
"# The cwltool validation expands optional arguments to",
"# [u'null', <type>]",
"return",
"bool",
"(",
"u'null'",
"in",
"typ",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid input \"{}\"'",
".",
"format",
"(",
"inp",
".",
"get",
"[",
"'id'",
"]",
")",
")"
] |
Returns True if a step input parameter is optional.
Args:
inp (dict): a dictionary representation of an input.
Raises:
ValueError: The inp provided is not valid.
|
[
"Returns",
"True",
"if",
"a",
"step",
"input",
"parameter",
"is",
"optional",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/step.py#L131-L154
|
train
|
NLeSC/scriptcwl
|
scriptcwl/step.py
|
Step.to_obj
|
def to_obj(self, wd=False, pack=False, relpath=None):
"""Return the step as an dict that can be written to a yaml file.
Returns:
dict: yaml representation of the step.
"""
obj = CommentedMap()
if pack:
obj['run'] = self.orig
elif relpath is not None:
if self.from_url:
obj['run'] = self.run
else:
obj['run'] = os.path.relpath(self.run, relpath)
elif wd:
if self.from_url:
obj['run'] = self.run
else:
obj['run'] = os.path.basename(self.run)
else:
obj['run'] = self.run
obj['in'] = self.step_inputs
obj['out'] = self.output_names
if self.is_scattered:
obj['scatter'] = self.scattered_inputs
# scatter_method is optional when scattering over a single variable
if self.scatter_method is not None:
obj['scatterMethod'] = self.scatter_method
return obj
|
python
|
def to_obj(self, wd=False, pack=False, relpath=None):
"""Return the step as an dict that can be written to a yaml file.
Returns:
dict: yaml representation of the step.
"""
obj = CommentedMap()
if pack:
obj['run'] = self.orig
elif relpath is not None:
if self.from_url:
obj['run'] = self.run
else:
obj['run'] = os.path.relpath(self.run, relpath)
elif wd:
if self.from_url:
obj['run'] = self.run
else:
obj['run'] = os.path.basename(self.run)
else:
obj['run'] = self.run
obj['in'] = self.step_inputs
obj['out'] = self.output_names
if self.is_scattered:
obj['scatter'] = self.scattered_inputs
# scatter_method is optional when scattering over a single variable
if self.scatter_method is not None:
obj['scatterMethod'] = self.scatter_method
return obj
|
[
"def",
"to_obj",
"(",
"self",
",",
"wd",
"=",
"False",
",",
"pack",
"=",
"False",
",",
"relpath",
"=",
"None",
")",
":",
"obj",
"=",
"CommentedMap",
"(",
")",
"if",
"pack",
":",
"obj",
"[",
"'run'",
"]",
"=",
"self",
".",
"orig",
"elif",
"relpath",
"is",
"not",
"None",
":",
"if",
"self",
".",
"from_url",
":",
"obj",
"[",
"'run'",
"]",
"=",
"self",
".",
"run",
"else",
":",
"obj",
"[",
"'run'",
"]",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"self",
".",
"run",
",",
"relpath",
")",
"elif",
"wd",
":",
"if",
"self",
".",
"from_url",
":",
"obj",
"[",
"'run'",
"]",
"=",
"self",
".",
"run",
"else",
":",
"obj",
"[",
"'run'",
"]",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"run",
")",
"else",
":",
"obj",
"[",
"'run'",
"]",
"=",
"self",
".",
"run",
"obj",
"[",
"'in'",
"]",
"=",
"self",
".",
"step_inputs",
"obj",
"[",
"'out'",
"]",
"=",
"self",
".",
"output_names",
"if",
"self",
".",
"is_scattered",
":",
"obj",
"[",
"'scatter'",
"]",
"=",
"self",
".",
"scattered_inputs",
"# scatter_method is optional when scattering over a single variable",
"if",
"self",
".",
"scatter_method",
"is",
"not",
"None",
":",
"obj",
"[",
"'scatterMethod'",
"]",
"=",
"self",
".",
"scatter_method",
"return",
"obj"
] |
Return the step as an dict that can be written to a yaml file.
Returns:
dict: yaml representation of the step.
|
[
"Return",
"the",
"step",
"as",
"an",
"dict",
"that",
"can",
"be",
"written",
"to",
"a",
"yaml",
"file",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/step.py#L205-L234
|
train
|
NLeSC/scriptcwl
|
scriptcwl/step.py
|
Step.list_inputs
|
def list_inputs(self):
"""Return a string listing all the Step's input names and their types.
The types are returned in a copy/pastable format, so if the type is
`string`, `'string'` (with single quotes) is returned.
Returns:
str containing all input names and types.
"""
doc = []
for inp, typ in self.input_types.items():
if isinstance(typ, six.string_types):
typ = "'{}'".format(typ)
doc.append('{}: {}'.format(inp, typ))
return '\n'.join(doc)
|
python
|
def list_inputs(self):
"""Return a string listing all the Step's input names and their types.
The types are returned in a copy/pastable format, so if the type is
`string`, `'string'` (with single quotes) is returned.
Returns:
str containing all input names and types.
"""
doc = []
for inp, typ in self.input_types.items():
if isinstance(typ, six.string_types):
typ = "'{}'".format(typ)
doc.append('{}: {}'.format(inp, typ))
return '\n'.join(doc)
|
[
"def",
"list_inputs",
"(",
"self",
")",
":",
"doc",
"=",
"[",
"]",
"for",
"inp",
",",
"typ",
"in",
"self",
".",
"input_types",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"typ",
",",
"six",
".",
"string_types",
")",
":",
"typ",
"=",
"\"'{}'\"",
".",
"format",
"(",
"typ",
")",
"doc",
".",
"append",
"(",
"'{}: {}'",
".",
"format",
"(",
"inp",
",",
"typ",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"doc",
")"
] |
Return a string listing all the Step's input names and their types.
The types are returned in a copy/pastable format, so if the type is
`string`, `'string'` (with single quotes) is returned.
Returns:
str containing all input names and types.
|
[
"Return",
"a",
"string",
"listing",
"all",
"the",
"Step",
"s",
"input",
"names",
"and",
"their",
"types",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/step.py#L251-L265
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.load
|
def load(self, steps_dir=None, step_file=None, step_list=None):
"""Load CWL steps into the WorkflowGenerator's steps library.
Adds steps (command line tools and workflows) to the
``WorkflowGenerator``'s steps library. These steps can be used to
create workflows.
Args:
steps_dir (str): path to directory containing CWL files. All CWL in
the directory are loaded.
step_file (str): path to a file containing a CWL step that will be
added to the steps library.
"""
self._closed()
self.steps_library.load(steps_dir=steps_dir, step_file=step_file,
step_list=step_list)
|
python
|
def load(self, steps_dir=None, step_file=None, step_list=None):
"""Load CWL steps into the WorkflowGenerator's steps library.
Adds steps (command line tools and workflows) to the
``WorkflowGenerator``'s steps library. These steps can be used to
create workflows.
Args:
steps_dir (str): path to directory containing CWL files. All CWL in
the directory are loaded.
step_file (str): path to a file containing a CWL step that will be
added to the steps library.
"""
self._closed()
self.steps_library.load(steps_dir=steps_dir, step_file=step_file,
step_list=step_list)
|
[
"def",
"load",
"(",
"self",
",",
"steps_dir",
"=",
"None",
",",
"step_file",
"=",
"None",
",",
"step_list",
"=",
"None",
")",
":",
"self",
".",
"_closed",
"(",
")",
"self",
".",
"steps_library",
".",
"load",
"(",
"steps_dir",
"=",
"steps_dir",
",",
"step_file",
"=",
"step_file",
",",
"step_list",
"=",
"step_list",
")"
] |
Load CWL steps into the WorkflowGenerator's steps library.
Adds steps (command line tools and workflows) to the
``WorkflowGenerator``'s steps library. These steps can be used to
create workflows.
Args:
steps_dir (str): path to directory containing CWL files. All CWL in
the directory are loaded.
step_file (str): path to a file containing a CWL step that will be
added to the steps library.
|
[
"Load",
"CWL",
"steps",
"into",
"the",
"WorkflowGenerator",
"s",
"steps",
"library",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L160-L176
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator._has_requirements
|
def _has_requirements(self):
"""Returns True if the workflow needs a requirements section.
Returns:
bool: True if the workflow needs a requirements section, False
otherwise.
"""
self._closed()
return any([self.has_workflow_step, self.has_scatter_requirement,
self.has_multiple_inputs])
|
python
|
def _has_requirements(self):
"""Returns True if the workflow needs a requirements section.
Returns:
bool: True if the workflow needs a requirements section, False
otherwise.
"""
self._closed()
return any([self.has_workflow_step, self.has_scatter_requirement,
self.has_multiple_inputs])
|
[
"def",
"_has_requirements",
"(",
"self",
")",
":",
"self",
".",
"_closed",
"(",
")",
"return",
"any",
"(",
"[",
"self",
".",
"has_workflow_step",
",",
"self",
".",
"has_scatter_requirement",
",",
"self",
".",
"has_multiple_inputs",
"]",
")"
] |
Returns True if the workflow needs a requirements section.
Returns:
bool: True if the workflow needs a requirements section, False
otherwise.
|
[
"Returns",
"True",
"if",
"the",
"workflow",
"needs",
"a",
"requirements",
"section",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L185-L195
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.inputs
|
def inputs(self, name):
"""List input names and types of a step in the steps library.
Args:
name (str): name of a step in the steps library.
"""
self._closed()
step = self._get_step(name, make_copy=False)
return step.list_inputs()
|
python
|
def inputs(self, name):
"""List input names and types of a step in the steps library.
Args:
name (str): name of a step in the steps library.
"""
self._closed()
step = self._get_step(name, make_copy=False)
return step.list_inputs()
|
[
"def",
"inputs",
"(",
"self",
",",
"name",
")",
":",
"self",
".",
"_closed",
"(",
")",
"step",
"=",
"self",
".",
"_get_step",
"(",
"name",
",",
"make_copy",
"=",
"False",
")",
"return",
"step",
".",
"list_inputs",
"(",
")"
] |
List input names and types of a step in the steps library.
Args:
name (str): name of a step in the steps library.
|
[
"List",
"input",
"names",
"and",
"types",
"of",
"a",
"step",
"in",
"the",
"steps",
"library",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L197-L206
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator._add_step
|
def _add_step(self, step):
"""Add a step to the workflow.
Args:
step (Step): a step from the steps library.
"""
self._closed()
self.has_workflow_step = self.has_workflow_step or step.is_workflow
self.wf_steps[step.name_in_workflow] = step
|
python
|
def _add_step(self, step):
"""Add a step to the workflow.
Args:
step (Step): a step from the steps library.
"""
self._closed()
self.has_workflow_step = self.has_workflow_step or step.is_workflow
self.wf_steps[step.name_in_workflow] = step
|
[
"def",
"_add_step",
"(",
"self",
",",
"step",
")",
":",
"self",
".",
"_closed",
"(",
")",
"self",
".",
"has_workflow_step",
"=",
"self",
".",
"has_workflow_step",
"or",
"step",
".",
"is_workflow",
"self",
".",
"wf_steps",
"[",
"step",
".",
"name_in_workflow",
"]",
"=",
"step"
] |
Add a step to the workflow.
Args:
step (Step): a step from the steps library.
|
[
"Add",
"a",
"step",
"to",
"the",
"workflow",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L208-L217
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.add_input
|
def add_input(self, **kwargs):
"""Add workflow input.
Args:
kwargs (dict): A dict with a `name: type` item
and optionally a `default: value` item, where name is the
name (id) of the workflow input (e.g., `dir_in`) and type is
the type of the input (e.g., `'Directory'`).
The type of input parameter can be learned from
`step.inputs(step_name=input_name)`.
Returns:
inputname
Raises:
ValueError: No or multiple parameter(s) have been specified.
"""
self._closed()
def _get_item(args):
"""Get a single item from args."""
if not args:
raise ValueError("No parameter specified.")
item = args.popitem()
if args:
raise ValueError("Too many parameters, not clear what to do "
"with {}".format(kwargs))
return item
symbols = None
input_dict = CommentedMap()
if 'default' in kwargs:
input_dict['default'] = kwargs.pop('default')
if 'label' in kwargs:
input_dict['label'] = kwargs.pop('label')
if 'symbols' in kwargs:
symbols = kwargs.pop('symbols')
name, input_type = _get_item(kwargs)
if input_type == 'enum':
typ = CommentedMap()
typ['type'] = 'enum'
# make sure symbols is set
if symbols is None:
raise ValueError("Please specify the enum's symbols.")
# make sure symbols is not empty
if symbols == []:
raise ValueError("The enum's symbols cannot be empty.")
# make sure the symbols are a list
if type(symbols) != list:
raise ValueError('Symbols should be a list.')
# make sure symbols is a list of strings
symbols = [str(s) for s in symbols]
typ['symbols'] = symbols
input_dict['type'] = typ
else:
# Set the 'type' if we can't use simple notation (because there is
# a default value or a label)
if bool(input_dict):
input_dict['type'] = input_type
msg = '"{}" is already used as a workflow input. Please use a ' +\
'different name.'
if name in self.wf_inputs:
raise ValueError(msg.format(name))
# Add 'type' for complex input types, so the user doesn't have to do it
if isinstance(input_type, dict):
input_dict['type'] = input_type
# Make sure we can use the notation without 'type' if the input allows
# it.
if bool(input_dict):
self.wf_inputs[name] = input_dict
else:
self.wf_inputs[name] = input_type
return Reference(input_name=name)
|
python
|
def add_input(self, **kwargs):
"""Add workflow input.
Args:
kwargs (dict): A dict with a `name: type` item
and optionally a `default: value` item, where name is the
name (id) of the workflow input (e.g., `dir_in`) and type is
the type of the input (e.g., `'Directory'`).
The type of input parameter can be learned from
`step.inputs(step_name=input_name)`.
Returns:
inputname
Raises:
ValueError: No or multiple parameter(s) have been specified.
"""
self._closed()
def _get_item(args):
"""Get a single item from args."""
if not args:
raise ValueError("No parameter specified.")
item = args.popitem()
if args:
raise ValueError("Too many parameters, not clear what to do "
"with {}".format(kwargs))
return item
symbols = None
input_dict = CommentedMap()
if 'default' in kwargs:
input_dict['default'] = kwargs.pop('default')
if 'label' in kwargs:
input_dict['label'] = kwargs.pop('label')
if 'symbols' in kwargs:
symbols = kwargs.pop('symbols')
name, input_type = _get_item(kwargs)
if input_type == 'enum':
typ = CommentedMap()
typ['type'] = 'enum'
# make sure symbols is set
if symbols is None:
raise ValueError("Please specify the enum's symbols.")
# make sure symbols is not empty
if symbols == []:
raise ValueError("The enum's symbols cannot be empty.")
# make sure the symbols are a list
if type(symbols) != list:
raise ValueError('Symbols should be a list.')
# make sure symbols is a list of strings
symbols = [str(s) for s in symbols]
typ['symbols'] = symbols
input_dict['type'] = typ
else:
# Set the 'type' if we can't use simple notation (because there is
# a default value or a label)
if bool(input_dict):
input_dict['type'] = input_type
msg = '"{}" is already used as a workflow input. Please use a ' +\
'different name.'
if name in self.wf_inputs:
raise ValueError(msg.format(name))
# Add 'type' for complex input types, so the user doesn't have to do it
if isinstance(input_type, dict):
input_dict['type'] = input_type
# Make sure we can use the notation without 'type' if the input allows
# it.
if bool(input_dict):
self.wf_inputs[name] = input_dict
else:
self.wf_inputs[name] = input_type
return Reference(input_name=name)
|
[
"def",
"add_input",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_closed",
"(",
")",
"def",
"_get_item",
"(",
"args",
")",
":",
"\"\"\"Get a single item from args.\"\"\"",
"if",
"not",
"args",
":",
"raise",
"ValueError",
"(",
"\"No parameter specified.\"",
")",
"item",
"=",
"args",
".",
"popitem",
"(",
")",
"if",
"args",
":",
"raise",
"ValueError",
"(",
"\"Too many parameters, not clear what to do \"",
"\"with {}\"",
".",
"format",
"(",
"kwargs",
")",
")",
"return",
"item",
"symbols",
"=",
"None",
"input_dict",
"=",
"CommentedMap",
"(",
")",
"if",
"'default'",
"in",
"kwargs",
":",
"input_dict",
"[",
"'default'",
"]",
"=",
"kwargs",
".",
"pop",
"(",
"'default'",
")",
"if",
"'label'",
"in",
"kwargs",
":",
"input_dict",
"[",
"'label'",
"]",
"=",
"kwargs",
".",
"pop",
"(",
"'label'",
")",
"if",
"'symbols'",
"in",
"kwargs",
":",
"symbols",
"=",
"kwargs",
".",
"pop",
"(",
"'symbols'",
")",
"name",
",",
"input_type",
"=",
"_get_item",
"(",
"kwargs",
")",
"if",
"input_type",
"==",
"'enum'",
":",
"typ",
"=",
"CommentedMap",
"(",
")",
"typ",
"[",
"'type'",
"]",
"=",
"'enum'",
"# make sure symbols is set",
"if",
"symbols",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Please specify the enum's symbols.\"",
")",
"# make sure symbols is not empty",
"if",
"symbols",
"==",
"[",
"]",
":",
"raise",
"ValueError",
"(",
"\"The enum's symbols cannot be empty.\"",
")",
"# make sure the symbols are a list",
"if",
"type",
"(",
"symbols",
")",
"!=",
"list",
":",
"raise",
"ValueError",
"(",
"'Symbols should be a list.'",
")",
"# make sure symbols is a list of strings",
"symbols",
"=",
"[",
"str",
"(",
"s",
")",
"for",
"s",
"in",
"symbols",
"]",
"typ",
"[",
"'symbols'",
"]",
"=",
"symbols",
"input_dict",
"[",
"'type'",
"]",
"=",
"typ",
"else",
":",
"# Set the 'type' if we can't use simple notation (because there is",
"# a default value or a label)",
"if",
"bool",
"(",
"input_dict",
")",
":",
"input_dict",
"[",
"'type'",
"]",
"=",
"input_type",
"msg",
"=",
"'\"{}\" is already used as a workflow input. Please use a '",
"+",
"'different name.'",
"if",
"name",
"in",
"self",
".",
"wf_inputs",
":",
"raise",
"ValueError",
"(",
"msg",
".",
"format",
"(",
"name",
")",
")",
"# Add 'type' for complex input types, so the user doesn't have to do it",
"if",
"isinstance",
"(",
"input_type",
",",
"dict",
")",
":",
"input_dict",
"[",
"'type'",
"]",
"=",
"input_type",
"# Make sure we can use the notation without 'type' if the input allows",
"# it.",
"if",
"bool",
"(",
"input_dict",
")",
":",
"self",
".",
"wf_inputs",
"[",
"name",
"]",
"=",
"input_dict",
"else",
":",
"self",
".",
"wf_inputs",
"[",
"name",
"]",
"=",
"input_type",
"return",
"Reference",
"(",
"input_name",
"=",
"name",
")"
] |
Add workflow input.
Args:
kwargs (dict): A dict with a `name: type` item
and optionally a `default: value` item, where name is the
name (id) of the workflow input (e.g., `dir_in`) and type is
the type of the input (e.g., `'Directory'`).
The type of input parameter can be learned from
`step.inputs(step_name=input_name)`.
Returns:
inputname
Raises:
ValueError: No or multiple parameter(s) have been specified.
|
[
"Add",
"workflow",
"input",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L219-L299
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.add_outputs
|
def add_outputs(self, **kwargs):
"""Add workflow outputs.
The output type is added automatically, based on the steps in the steps
library.
Args:
kwargs (dict): A dict containing ``name=source name`` pairs.
``name`` is the name of the workflow output (e.g.,
``txt_files``) and source name is the name of the step that
produced this output plus the output name (e.g.,
``saf-to-txt/out_files``).
"""
self._closed()
for name, source_name in kwargs.items():
obj = {}
obj['outputSource'] = source_name
obj['type'] = self.step_output_types[source_name]
self.wf_outputs[name] = obj
|
python
|
def add_outputs(self, **kwargs):
"""Add workflow outputs.
The output type is added automatically, based on the steps in the steps
library.
Args:
kwargs (dict): A dict containing ``name=source name`` pairs.
``name`` is the name of the workflow output (e.g.,
``txt_files``) and source name is the name of the step that
produced this output plus the output name (e.g.,
``saf-to-txt/out_files``).
"""
self._closed()
for name, source_name in kwargs.items():
obj = {}
obj['outputSource'] = source_name
obj['type'] = self.step_output_types[source_name]
self.wf_outputs[name] = obj
|
[
"def",
"add_outputs",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_closed",
"(",
")",
"for",
"name",
",",
"source_name",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"obj",
"=",
"{",
"}",
"obj",
"[",
"'outputSource'",
"]",
"=",
"source_name",
"obj",
"[",
"'type'",
"]",
"=",
"self",
".",
"step_output_types",
"[",
"source_name",
"]",
"self",
".",
"wf_outputs",
"[",
"name",
"]",
"=",
"obj"
] |
Add workflow outputs.
The output type is added automatically, based on the steps in the steps
library.
Args:
kwargs (dict): A dict containing ``name=source name`` pairs.
``name`` is the name of the workflow output (e.g.,
``txt_files``) and source name is the name of the step that
produced this output plus the output name (e.g.,
``saf-to-txt/out_files``).
|
[
"Add",
"workflow",
"outputs",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L301-L320
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator._get_step
|
def _get_step(self, name, make_copy=True):
"""Return step from steps library.
Optionally, the step returned is a deep copy from the step in the steps
library, so additional information (e.g., about whether the step was
scattered) can be stored in the copy.
Args:
name (str): name of the step in the steps library.
make_copy (bool): whether a deep copy of the step should be
returned or not (default: True).
Returns:
Step from steps library.
Raises:
ValueError: The requested step cannot be found in the steps
library.
"""
self._closed()
s = self.steps_library.get_step(name)
if s is None:
msg = '"{}" not found in steps library. Please check your ' \
'spelling or load additional steps'
raise ValueError(msg.format(name))
if make_copy:
s = copy.deepcopy(s)
return s
|
python
|
def _get_step(self, name, make_copy=True):
"""Return step from steps library.
Optionally, the step returned is a deep copy from the step in the steps
library, so additional information (e.g., about whether the step was
scattered) can be stored in the copy.
Args:
name (str): name of the step in the steps library.
make_copy (bool): whether a deep copy of the step should be
returned or not (default: True).
Returns:
Step from steps library.
Raises:
ValueError: The requested step cannot be found in the steps
library.
"""
self._closed()
s = self.steps_library.get_step(name)
if s is None:
msg = '"{}" not found in steps library. Please check your ' \
'spelling or load additional steps'
raise ValueError(msg.format(name))
if make_copy:
s = copy.deepcopy(s)
return s
|
[
"def",
"_get_step",
"(",
"self",
",",
"name",
",",
"make_copy",
"=",
"True",
")",
":",
"self",
".",
"_closed",
"(",
")",
"s",
"=",
"self",
".",
"steps_library",
".",
"get_step",
"(",
"name",
")",
"if",
"s",
"is",
"None",
":",
"msg",
"=",
"'\"{}\" not found in steps library. Please check your '",
"'spelling or load additional steps'",
"raise",
"ValueError",
"(",
"msg",
".",
"format",
"(",
"name",
")",
")",
"if",
"make_copy",
":",
"s",
"=",
"copy",
".",
"deepcopy",
"(",
"s",
")",
"return",
"s"
] |
Return step from steps library.
Optionally, the step returned is a deep copy from the step in the steps
library, so additional information (e.g., about whether the step was
scattered) can be stored in the copy.
Args:
name (str): name of the step in the steps library.
make_copy (bool): whether a deep copy of the step should be
returned or not (default: True).
Returns:
Step from steps library.
Raises:
ValueError: The requested step cannot be found in the steps
library.
|
[
"Return",
"step",
"from",
"steps",
"library",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L342-L370
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.to_obj
|
def to_obj(self, wd=False, pack=False, relpath=None):
"""Return the created workflow as a dict.
The dict can be written to a yaml file.
Returns:
A yaml-compatible dict representing the workflow.
"""
self._closed()
obj = CommentedMap()
obj['cwlVersion'] = 'v1.0'
obj['class'] = 'Workflow'
try:
obj['doc'] = self.documentation
except (AttributeError, ValueError):
pass
try:
obj['label'] = self.label
except (AttributeError, ValueError):
pass
if self._has_requirements():
obj['requirements'] = []
if self.has_workflow_step:
obj['requirements'].append(
{'class': 'SubworkflowFeatureRequirement'})
if self.has_scatter_requirement:
obj['requirements'].append({'class': 'ScatterFeatureRequirement'})
if self.has_multiple_inputs:
obj['requirements'].append(
{'class': 'MultipleInputFeatureRequirement'})
obj['inputs'] = self.wf_inputs
obj['outputs'] = self.wf_outputs
steps_obj = CommentedMap()
for key in self.wf_steps:
steps_obj[key] = self.wf_steps[key].to_obj(relpath=relpath,
pack=pack,
wd=wd)
obj['steps'] = steps_obj
return obj
|
python
|
def to_obj(self, wd=False, pack=False, relpath=None):
"""Return the created workflow as a dict.
The dict can be written to a yaml file.
Returns:
A yaml-compatible dict representing the workflow.
"""
self._closed()
obj = CommentedMap()
obj['cwlVersion'] = 'v1.0'
obj['class'] = 'Workflow'
try:
obj['doc'] = self.documentation
except (AttributeError, ValueError):
pass
try:
obj['label'] = self.label
except (AttributeError, ValueError):
pass
if self._has_requirements():
obj['requirements'] = []
if self.has_workflow_step:
obj['requirements'].append(
{'class': 'SubworkflowFeatureRequirement'})
if self.has_scatter_requirement:
obj['requirements'].append({'class': 'ScatterFeatureRequirement'})
if self.has_multiple_inputs:
obj['requirements'].append(
{'class': 'MultipleInputFeatureRequirement'})
obj['inputs'] = self.wf_inputs
obj['outputs'] = self.wf_outputs
steps_obj = CommentedMap()
for key in self.wf_steps:
steps_obj[key] = self.wf_steps[key].to_obj(relpath=relpath,
pack=pack,
wd=wd)
obj['steps'] = steps_obj
return obj
|
[
"def",
"to_obj",
"(",
"self",
",",
"wd",
"=",
"False",
",",
"pack",
"=",
"False",
",",
"relpath",
"=",
"None",
")",
":",
"self",
".",
"_closed",
"(",
")",
"obj",
"=",
"CommentedMap",
"(",
")",
"obj",
"[",
"'cwlVersion'",
"]",
"=",
"'v1.0'",
"obj",
"[",
"'class'",
"]",
"=",
"'Workflow'",
"try",
":",
"obj",
"[",
"'doc'",
"]",
"=",
"self",
".",
"documentation",
"except",
"(",
"AttributeError",
",",
"ValueError",
")",
":",
"pass",
"try",
":",
"obj",
"[",
"'label'",
"]",
"=",
"self",
".",
"label",
"except",
"(",
"AttributeError",
",",
"ValueError",
")",
":",
"pass",
"if",
"self",
".",
"_has_requirements",
"(",
")",
":",
"obj",
"[",
"'requirements'",
"]",
"=",
"[",
"]",
"if",
"self",
".",
"has_workflow_step",
":",
"obj",
"[",
"'requirements'",
"]",
".",
"append",
"(",
"{",
"'class'",
":",
"'SubworkflowFeatureRequirement'",
"}",
")",
"if",
"self",
".",
"has_scatter_requirement",
":",
"obj",
"[",
"'requirements'",
"]",
".",
"append",
"(",
"{",
"'class'",
":",
"'ScatterFeatureRequirement'",
"}",
")",
"if",
"self",
".",
"has_multiple_inputs",
":",
"obj",
"[",
"'requirements'",
"]",
".",
"append",
"(",
"{",
"'class'",
":",
"'MultipleInputFeatureRequirement'",
"}",
")",
"obj",
"[",
"'inputs'",
"]",
"=",
"self",
".",
"wf_inputs",
"obj",
"[",
"'outputs'",
"]",
"=",
"self",
".",
"wf_outputs",
"steps_obj",
"=",
"CommentedMap",
"(",
")",
"for",
"key",
"in",
"self",
".",
"wf_steps",
":",
"steps_obj",
"[",
"key",
"]",
"=",
"self",
".",
"wf_steps",
"[",
"key",
"]",
".",
"to_obj",
"(",
"relpath",
"=",
"relpath",
",",
"pack",
"=",
"pack",
",",
"wd",
"=",
"wd",
")",
"obj",
"[",
"'steps'",
"]",
"=",
"steps_obj",
"return",
"obj"
] |
Return the created workflow as a dict.
The dict can be written to a yaml file.
Returns:
A yaml-compatible dict representing the workflow.
|
[
"Return",
"the",
"created",
"workflow",
"as",
"a",
"dict",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L382-L423
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.to_script
|
def to_script(self, wf_name='wf'):
"""Generated and print the scriptcwl script for the currunt workflow.
Args:
wf_name (str): string used for the WorkflowGenerator object in the
generated script (default: ``wf``).
"""
self._closed()
script = []
# Workflow documentation
# if self.documentation:
# if is_multiline(self.documentation):
# print('doc = """')
# print(self.documentation)
# print('"""')
# print('{}.set_documentation(doc)'.format(wf_name))
# else:
# print('{}.set_documentation(\'{}\')'.format(wf_name,
# self.documentation))
# Workflow inputs
params = []
returns = []
for name, typ in self.wf_inputs.items():
params.append('{}=\'{}\''.format(name, typ))
returns.append(name)
script.append('{} = {}.add_inputs({})'.format(
', '.join(returns), wf_name, ', '.join(params)))
# Workflow steps
returns = []
for name, step in self.wf_steps.items():
pyname = step.python_name
returns = ['{}_{}'.format(pyname, o) for o in step['out']]
params = ['{}={}'.format(name, python_name(param))
for name, param in step['in'].items()]
script.append('{} = {}.{}({})'.format(
', '.join(returns), wf_name, pyname, ', '.join(params)))
# Workflow outputs
params = []
for name, details in self.wf_outputs.items():
params.append('{}={}'.format(
name, python_name(details['outputSource'])))
script.append('{}.add_outputs({})'.format(wf_name, ', '.join(params)))
return '\n'.join(script)
|
python
|
def to_script(self, wf_name='wf'):
"""Generated and print the scriptcwl script for the currunt workflow.
Args:
wf_name (str): string used for the WorkflowGenerator object in the
generated script (default: ``wf``).
"""
self._closed()
script = []
# Workflow documentation
# if self.documentation:
# if is_multiline(self.documentation):
# print('doc = """')
# print(self.documentation)
# print('"""')
# print('{}.set_documentation(doc)'.format(wf_name))
# else:
# print('{}.set_documentation(\'{}\')'.format(wf_name,
# self.documentation))
# Workflow inputs
params = []
returns = []
for name, typ in self.wf_inputs.items():
params.append('{}=\'{}\''.format(name, typ))
returns.append(name)
script.append('{} = {}.add_inputs({})'.format(
', '.join(returns), wf_name, ', '.join(params)))
# Workflow steps
returns = []
for name, step in self.wf_steps.items():
pyname = step.python_name
returns = ['{}_{}'.format(pyname, o) for o in step['out']]
params = ['{}={}'.format(name, python_name(param))
for name, param in step['in'].items()]
script.append('{} = {}.{}({})'.format(
', '.join(returns), wf_name, pyname, ', '.join(params)))
# Workflow outputs
params = []
for name, details in self.wf_outputs.items():
params.append('{}={}'.format(
name, python_name(details['outputSource'])))
script.append('{}.add_outputs({})'.format(wf_name, ', '.join(params)))
return '\n'.join(script)
|
[
"def",
"to_script",
"(",
"self",
",",
"wf_name",
"=",
"'wf'",
")",
":",
"self",
".",
"_closed",
"(",
")",
"script",
"=",
"[",
"]",
"# Workflow documentation",
"# if self.documentation:",
"# if is_multiline(self.documentation):",
"# print('doc = \"\"\"')",
"# print(self.documentation)",
"# print('\"\"\"')",
"# print('{}.set_documentation(doc)'.format(wf_name))",
"# else:",
"# print('{}.set_documentation(\\'{}\\')'.format(wf_name,",
"# self.documentation))",
"# Workflow inputs",
"params",
"=",
"[",
"]",
"returns",
"=",
"[",
"]",
"for",
"name",
",",
"typ",
"in",
"self",
".",
"wf_inputs",
".",
"items",
"(",
")",
":",
"params",
".",
"append",
"(",
"'{}=\\'{}\\''",
".",
"format",
"(",
"name",
",",
"typ",
")",
")",
"returns",
".",
"append",
"(",
"name",
")",
"script",
".",
"append",
"(",
"'{} = {}.add_inputs({})'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"returns",
")",
",",
"wf_name",
",",
"', '",
".",
"join",
"(",
"params",
")",
")",
")",
"# Workflow steps",
"returns",
"=",
"[",
"]",
"for",
"name",
",",
"step",
"in",
"self",
".",
"wf_steps",
".",
"items",
"(",
")",
":",
"pyname",
"=",
"step",
".",
"python_name",
"returns",
"=",
"[",
"'{}_{}'",
".",
"format",
"(",
"pyname",
",",
"o",
")",
"for",
"o",
"in",
"step",
"[",
"'out'",
"]",
"]",
"params",
"=",
"[",
"'{}={}'",
".",
"format",
"(",
"name",
",",
"python_name",
"(",
"param",
")",
")",
"for",
"name",
",",
"param",
"in",
"step",
"[",
"'in'",
"]",
".",
"items",
"(",
")",
"]",
"script",
".",
"append",
"(",
"'{} = {}.{}({})'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"returns",
")",
",",
"wf_name",
",",
"pyname",
",",
"', '",
".",
"join",
"(",
"params",
")",
")",
")",
"# Workflow outputs",
"params",
"=",
"[",
"]",
"for",
"name",
",",
"details",
"in",
"self",
".",
"wf_outputs",
".",
"items",
"(",
")",
":",
"params",
".",
"append",
"(",
"'{}={}'",
".",
"format",
"(",
"name",
",",
"python_name",
"(",
"details",
"[",
"'outputSource'",
"]",
")",
")",
")",
"script",
".",
"append",
"(",
"'{}.add_outputs({})'",
".",
"format",
"(",
"wf_name",
",",
"', '",
".",
"join",
"(",
"params",
")",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"script",
")"
] |
Generated and print the scriptcwl script for the currunt workflow.
Args:
wf_name (str): string used for the WorkflowGenerator object in the
generated script (default: ``wf``).
|
[
"Generated",
"and",
"print",
"the",
"scriptcwl",
"script",
"for",
"the",
"currunt",
"workflow",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L425-L473
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator._types_match
|
def _types_match(type1, type2):
"""Returns False only if it can show that no value of type1
can possibly match type2.
Supports only a limited selection of types.
"""
if isinstance(type1, six.string_types) and \
isinstance(type2, six.string_types):
type1 = type1.rstrip('?')
type2 = type2.rstrip('?')
if type1 != type2:
return False
return True
|
python
|
def _types_match(type1, type2):
"""Returns False only if it can show that no value of type1
can possibly match type2.
Supports only a limited selection of types.
"""
if isinstance(type1, six.string_types) and \
isinstance(type2, six.string_types):
type1 = type1.rstrip('?')
type2 = type2.rstrip('?')
if type1 != type2:
return False
return True
|
[
"def",
"_types_match",
"(",
"type1",
",",
"type2",
")",
":",
"if",
"isinstance",
"(",
"type1",
",",
"six",
".",
"string_types",
")",
"and",
"isinstance",
"(",
"type2",
",",
"six",
".",
"string_types",
")",
":",
"type1",
"=",
"type1",
".",
"rstrip",
"(",
"'?'",
")",
"type2",
"=",
"type2",
".",
"rstrip",
"(",
"'?'",
")",
"if",
"type1",
"!=",
"type2",
":",
"return",
"False",
"return",
"True"
] |
Returns False only if it can show that no value of type1
can possibly match type2.
Supports only a limited selection of types.
|
[
"Returns",
"False",
"only",
"if",
"it",
"can",
"show",
"that",
"no",
"value",
"of",
"type1",
"can",
"possibly",
"match",
"type2",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L506-L519
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.validate
|
def validate(self):
"""Validate workflow object.
This method currently validates the workflow object with the use of
cwltool. It writes the workflow to a tmp CWL file, reads it, validates
it and removes the tmp file again. By default, the workflow is written
to file using absolute paths to the steps.
"""
# define tmpfile
(fd, tmpfile) = tempfile.mkstemp()
os.close(fd)
try:
# save workflow object to tmpfile,
# do not recursively call validate function
self.save(tmpfile, mode='abs', validate=False)
# load workflow from tmpfile
document_loader, processobj, metadata, uri = load_cwl(tmpfile)
finally:
# cleanup tmpfile
os.remove(tmpfile)
|
python
|
def validate(self):
"""Validate workflow object.
This method currently validates the workflow object with the use of
cwltool. It writes the workflow to a tmp CWL file, reads it, validates
it and removes the tmp file again. By default, the workflow is written
to file using absolute paths to the steps.
"""
# define tmpfile
(fd, tmpfile) = tempfile.mkstemp()
os.close(fd)
try:
# save workflow object to tmpfile,
# do not recursively call validate function
self.save(tmpfile, mode='abs', validate=False)
# load workflow from tmpfile
document_loader, processobj, metadata, uri = load_cwl(tmpfile)
finally:
# cleanup tmpfile
os.remove(tmpfile)
|
[
"def",
"validate",
"(",
"self",
")",
":",
"# define tmpfile",
"(",
"fd",
",",
"tmpfile",
")",
"=",
"tempfile",
".",
"mkstemp",
"(",
")",
"os",
".",
"close",
"(",
"fd",
")",
"try",
":",
"# save workflow object to tmpfile,",
"# do not recursively call validate function",
"self",
".",
"save",
"(",
"tmpfile",
",",
"mode",
"=",
"'abs'",
",",
"validate",
"=",
"False",
")",
"# load workflow from tmpfile",
"document_loader",
",",
"processobj",
",",
"metadata",
",",
"uri",
"=",
"load_cwl",
"(",
"tmpfile",
")",
"finally",
":",
"# cleanup tmpfile",
"os",
".",
"remove",
"(",
"tmpfile",
")"
] |
Validate workflow object.
This method currently validates the workflow object with the use of
cwltool. It writes the workflow to a tmp CWL file, reads it, validates
it and removes the tmp file again. By default, the workflow is written
to file using absolute paths to the steps.
|
[
"Validate",
"workflow",
"object",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L652-L671
|
train
|
NLeSC/scriptcwl
|
scriptcwl/workflow.py
|
WorkflowGenerator.save
|
def save(self, fname, mode=None, validate=True, encoding='utf-8',
wd=False, inline=False, relative=False, pack=False):
"""Save the workflow to file.
Save the workflow to a CWL file that can be run with a CWL runner.
Args:
fname (str): file to save the workflow to.
mode (str): one of (rel, abs, wd, inline, pack)
encoding (str): file encoding to use (default: ``utf-8``).
"""
self._closed()
if mode is None:
mode = 'abs'
if pack:
mode = 'pack'
elif wd:
mode = 'wd'
elif relative:
mode = 'rel'
msg = 'Using deprecated save method. Please save the workflow ' \
'with: wf.save(\'{}\', mode=\'{}\'). Redirecting to new ' \
'save method.'.format(fname, mode)
warnings.warn(msg, DeprecationWarning)
modes = ('rel', 'abs', 'wd', 'inline', 'pack')
if mode not in modes:
msg = 'Illegal mode "{}". Choose one of ({}).'\
.format(mode, ','.join(modes))
raise ValueError(msg)
if validate:
self.validate()
dirname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(dirname):
os.makedirs(dirname)
if mode == 'inline':
msg = ('Inline saving is deprecated. Please save the workflow '
'using mode=\'pack\'. Setting mode to pack.')
warnings.warn(msg, DeprecationWarning)
mode = 'pack'
if mode == 'rel':
relpath = dirname
save_yaml(fname=fname, wf=self, pack=False, relpath=relpath,
wd=False)
if mode == 'abs':
save_yaml(fname=fname, wf=self, pack=False, relpath=None,
wd=False)
if mode == 'pack':
self._pack(fname, encoding)
if mode == 'wd':
if self.get_working_dir() is None:
raise ValueError('Working directory not set.')
else:
# save in working_dir
bn = os.path.basename(fname)
wd_file = os.path.join(self.working_dir, bn)
save_yaml(fname=wd_file, wf=self, pack=False, relpath=None,
wd=True)
# and copy workflow file to other location (as though all steps
# are in the same directory as the workflow)
try:
shutil.copy2(wd_file, fname)
except shutil.Error:
pass
|
python
|
def save(self, fname, mode=None, validate=True, encoding='utf-8',
wd=False, inline=False, relative=False, pack=False):
"""Save the workflow to file.
Save the workflow to a CWL file that can be run with a CWL runner.
Args:
fname (str): file to save the workflow to.
mode (str): one of (rel, abs, wd, inline, pack)
encoding (str): file encoding to use (default: ``utf-8``).
"""
self._closed()
if mode is None:
mode = 'abs'
if pack:
mode = 'pack'
elif wd:
mode = 'wd'
elif relative:
mode = 'rel'
msg = 'Using deprecated save method. Please save the workflow ' \
'with: wf.save(\'{}\', mode=\'{}\'). Redirecting to new ' \
'save method.'.format(fname, mode)
warnings.warn(msg, DeprecationWarning)
modes = ('rel', 'abs', 'wd', 'inline', 'pack')
if mode not in modes:
msg = 'Illegal mode "{}". Choose one of ({}).'\
.format(mode, ','.join(modes))
raise ValueError(msg)
if validate:
self.validate()
dirname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(dirname):
os.makedirs(dirname)
if mode == 'inline':
msg = ('Inline saving is deprecated. Please save the workflow '
'using mode=\'pack\'. Setting mode to pack.')
warnings.warn(msg, DeprecationWarning)
mode = 'pack'
if mode == 'rel':
relpath = dirname
save_yaml(fname=fname, wf=self, pack=False, relpath=relpath,
wd=False)
if mode == 'abs':
save_yaml(fname=fname, wf=self, pack=False, relpath=None,
wd=False)
if mode == 'pack':
self._pack(fname, encoding)
if mode == 'wd':
if self.get_working_dir() is None:
raise ValueError('Working directory not set.')
else:
# save in working_dir
bn = os.path.basename(fname)
wd_file = os.path.join(self.working_dir, bn)
save_yaml(fname=wd_file, wf=self, pack=False, relpath=None,
wd=True)
# and copy workflow file to other location (as though all steps
# are in the same directory as the workflow)
try:
shutil.copy2(wd_file, fname)
except shutil.Error:
pass
|
[
"def",
"save",
"(",
"self",
",",
"fname",
",",
"mode",
"=",
"None",
",",
"validate",
"=",
"True",
",",
"encoding",
"=",
"'utf-8'",
",",
"wd",
"=",
"False",
",",
"inline",
"=",
"False",
",",
"relative",
"=",
"False",
",",
"pack",
"=",
"False",
")",
":",
"self",
".",
"_closed",
"(",
")",
"if",
"mode",
"is",
"None",
":",
"mode",
"=",
"'abs'",
"if",
"pack",
":",
"mode",
"=",
"'pack'",
"elif",
"wd",
":",
"mode",
"=",
"'wd'",
"elif",
"relative",
":",
"mode",
"=",
"'rel'",
"msg",
"=",
"'Using deprecated save method. Please save the workflow '",
"'with: wf.save(\\'{}\\', mode=\\'{}\\'). Redirecting to new '",
"'save method.'",
".",
"format",
"(",
"fname",
",",
"mode",
")",
"warnings",
".",
"warn",
"(",
"msg",
",",
"DeprecationWarning",
")",
"modes",
"=",
"(",
"'rel'",
",",
"'abs'",
",",
"'wd'",
",",
"'inline'",
",",
"'pack'",
")",
"if",
"mode",
"not",
"in",
"modes",
":",
"msg",
"=",
"'Illegal mode \"{}\". Choose one of ({}).'",
".",
"format",
"(",
"mode",
",",
"','",
".",
"join",
"(",
"modes",
")",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"if",
"validate",
":",
"self",
".",
"validate",
"(",
")",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"fname",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dirname",
")",
":",
"os",
".",
"makedirs",
"(",
"dirname",
")",
"if",
"mode",
"==",
"'inline'",
":",
"msg",
"=",
"(",
"'Inline saving is deprecated. Please save the workflow '",
"'using mode=\\'pack\\'. Setting mode to pack.'",
")",
"warnings",
".",
"warn",
"(",
"msg",
",",
"DeprecationWarning",
")",
"mode",
"=",
"'pack'",
"if",
"mode",
"==",
"'rel'",
":",
"relpath",
"=",
"dirname",
"save_yaml",
"(",
"fname",
"=",
"fname",
",",
"wf",
"=",
"self",
",",
"pack",
"=",
"False",
",",
"relpath",
"=",
"relpath",
",",
"wd",
"=",
"False",
")",
"if",
"mode",
"==",
"'abs'",
":",
"save_yaml",
"(",
"fname",
"=",
"fname",
",",
"wf",
"=",
"self",
",",
"pack",
"=",
"False",
",",
"relpath",
"=",
"None",
",",
"wd",
"=",
"False",
")",
"if",
"mode",
"==",
"'pack'",
":",
"self",
".",
"_pack",
"(",
"fname",
",",
"encoding",
")",
"if",
"mode",
"==",
"'wd'",
":",
"if",
"self",
".",
"get_working_dir",
"(",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Working directory not set.'",
")",
"else",
":",
"# save in working_dir",
"bn",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"fname",
")",
"wd_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"working_dir",
",",
"bn",
")",
"save_yaml",
"(",
"fname",
"=",
"wd_file",
",",
"wf",
"=",
"self",
",",
"pack",
"=",
"False",
",",
"relpath",
"=",
"None",
",",
"wd",
"=",
"True",
")",
"# and copy workflow file to other location (as though all steps",
"# are in the same directory as the workflow)",
"try",
":",
"shutil",
".",
"copy2",
"(",
"wd_file",
",",
"fname",
")",
"except",
"shutil",
".",
"Error",
":",
"pass"
] |
Save the workflow to file.
Save the workflow to a CWL file that can be run with a CWL runner.
Args:
fname (str): file to save the workflow to.
mode (str): one of (rel, abs, wd, inline, pack)
encoding (str): file encoding to use (default: ``utf-8``).
|
[
"Save",
"the",
"workflow",
"to",
"file",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/workflow.py#L692-L764
|
train
|
NLeSC/scriptcwl
|
scriptcwl/yamlutils.py
|
str_presenter
|
def str_presenter(dmpr, data):
"""Return correct str_presenter to write multiple lines to a yaml field.
Source: http://stackoverflow.com/a/33300001
"""
if is_multiline(data):
return dmpr.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dmpr.represent_scalar('tag:yaml.org,2002:str', data)
|
python
|
def str_presenter(dmpr, data):
"""Return correct str_presenter to write multiple lines to a yaml field.
Source: http://stackoverflow.com/a/33300001
"""
if is_multiline(data):
return dmpr.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dmpr.represent_scalar('tag:yaml.org,2002:str', data)
|
[
"def",
"str_presenter",
"(",
"dmpr",
",",
"data",
")",
":",
"if",
"is_multiline",
"(",
"data",
")",
":",
"return",
"dmpr",
".",
"represent_scalar",
"(",
"'tag:yaml.org,2002:str'",
",",
"data",
",",
"style",
"=",
"'|'",
")",
"return",
"dmpr",
".",
"represent_scalar",
"(",
"'tag:yaml.org,2002:str'",
",",
"data",
")"
] |
Return correct str_presenter to write multiple lines to a yaml field.
Source: http://stackoverflow.com/a/33300001
|
[
"Return",
"correct",
"str_presenter",
"to",
"write",
"multiple",
"lines",
"to",
"a",
"yaml",
"field",
"."
] |
33bb847a875379da3a5702c7a98dfa585306b960
|
https://github.com/NLeSC/scriptcwl/blob/33bb847a875379da3a5702c7a98dfa585306b960/scriptcwl/yamlutils.py#L22-L30
|
train
|
nschloe/colorio
|
experiments/new-cs.py
|
build_grad_matrices
|
def build_grad_matrices(V, points):
"""Build the sparse m-by-n matrices that map a coefficient set for a function in V
to the values of dx and dy at a number m of points.
"""
# See <https://www.allanswered.com/post/lkbkm/#zxqgk>
mesh = V.mesh()
bbt = BoundingBoxTree()
bbt.build(mesh)
dofmap = V.dofmap()
el = V.element()
rows = []
cols = []
datax = []
datay = []
for i, xy in enumerate(points):
cell_id = bbt.compute_first_entity_collision(Point(*xy))
cell = Cell(mesh, cell_id)
coordinate_dofs = cell.get_vertex_coordinates()
rows.append([i, i, i])
cols.append(dofmap.cell_dofs(cell_id))
v = el.evaluate_basis_derivatives_all(1, xy, coordinate_dofs, cell_id)
v = v.reshape(3, 2)
datax.append(v[:, 0])
datay.append(v[:, 1])
rows = numpy.concatenate(rows)
cols = numpy.concatenate(cols)
datax = numpy.concatenate(datax)
datay = numpy.concatenate(datay)
m = len(points)
n = V.dim()
dx_matrix = sparse.csr_matrix((datax, (rows, cols)), shape=(m, n))
dy_matrix = sparse.csr_matrix((datay, (rows, cols)), shape=(m, n))
return dx_matrix, dy_matrix
|
python
|
def build_grad_matrices(V, points):
"""Build the sparse m-by-n matrices that map a coefficient set for a function in V
to the values of dx and dy at a number m of points.
"""
# See <https://www.allanswered.com/post/lkbkm/#zxqgk>
mesh = V.mesh()
bbt = BoundingBoxTree()
bbt.build(mesh)
dofmap = V.dofmap()
el = V.element()
rows = []
cols = []
datax = []
datay = []
for i, xy in enumerate(points):
cell_id = bbt.compute_first_entity_collision(Point(*xy))
cell = Cell(mesh, cell_id)
coordinate_dofs = cell.get_vertex_coordinates()
rows.append([i, i, i])
cols.append(dofmap.cell_dofs(cell_id))
v = el.evaluate_basis_derivatives_all(1, xy, coordinate_dofs, cell_id)
v = v.reshape(3, 2)
datax.append(v[:, 0])
datay.append(v[:, 1])
rows = numpy.concatenate(rows)
cols = numpy.concatenate(cols)
datax = numpy.concatenate(datax)
datay = numpy.concatenate(datay)
m = len(points)
n = V.dim()
dx_matrix = sparse.csr_matrix((datax, (rows, cols)), shape=(m, n))
dy_matrix = sparse.csr_matrix((datay, (rows, cols)), shape=(m, n))
return dx_matrix, dy_matrix
|
[
"def",
"build_grad_matrices",
"(",
"V",
",",
"points",
")",
":",
"# See <https://www.allanswered.com/post/lkbkm/#zxqgk>",
"mesh",
"=",
"V",
".",
"mesh",
"(",
")",
"bbt",
"=",
"BoundingBoxTree",
"(",
")",
"bbt",
".",
"build",
"(",
"mesh",
")",
"dofmap",
"=",
"V",
".",
"dofmap",
"(",
")",
"el",
"=",
"V",
".",
"element",
"(",
")",
"rows",
"=",
"[",
"]",
"cols",
"=",
"[",
"]",
"datax",
"=",
"[",
"]",
"datay",
"=",
"[",
"]",
"for",
"i",
",",
"xy",
"in",
"enumerate",
"(",
"points",
")",
":",
"cell_id",
"=",
"bbt",
".",
"compute_first_entity_collision",
"(",
"Point",
"(",
"*",
"xy",
")",
")",
"cell",
"=",
"Cell",
"(",
"mesh",
",",
"cell_id",
")",
"coordinate_dofs",
"=",
"cell",
".",
"get_vertex_coordinates",
"(",
")",
"rows",
".",
"append",
"(",
"[",
"i",
",",
"i",
",",
"i",
"]",
")",
"cols",
".",
"append",
"(",
"dofmap",
".",
"cell_dofs",
"(",
"cell_id",
")",
")",
"v",
"=",
"el",
".",
"evaluate_basis_derivatives_all",
"(",
"1",
",",
"xy",
",",
"coordinate_dofs",
",",
"cell_id",
")",
"v",
"=",
"v",
".",
"reshape",
"(",
"3",
",",
"2",
")",
"datax",
".",
"append",
"(",
"v",
"[",
":",
",",
"0",
"]",
")",
"datay",
".",
"append",
"(",
"v",
"[",
":",
",",
"1",
"]",
")",
"rows",
"=",
"numpy",
".",
"concatenate",
"(",
"rows",
")",
"cols",
"=",
"numpy",
".",
"concatenate",
"(",
"cols",
")",
"datax",
"=",
"numpy",
".",
"concatenate",
"(",
"datax",
")",
"datay",
"=",
"numpy",
".",
"concatenate",
"(",
"datay",
")",
"m",
"=",
"len",
"(",
"points",
")",
"n",
"=",
"V",
".",
"dim",
"(",
")",
"dx_matrix",
"=",
"sparse",
".",
"csr_matrix",
"(",
"(",
"datax",
",",
"(",
"rows",
",",
"cols",
")",
")",
",",
"shape",
"=",
"(",
"m",
",",
"n",
")",
")",
"dy_matrix",
"=",
"sparse",
".",
"csr_matrix",
"(",
"(",
"datay",
",",
"(",
"rows",
",",
"cols",
")",
")",
",",
"shape",
"=",
"(",
"m",
",",
"n",
")",
")",
"return",
"dx_matrix",
",",
"dy_matrix"
] |
Build the sparse m-by-n matrices that map a coefficient set for a function in V
to the values of dx and dy at a number m of points.
|
[
"Build",
"the",
"sparse",
"m",
"-",
"by",
"-",
"n",
"matrices",
"that",
"map",
"a",
"coefficient",
"set",
"for",
"a",
"function",
"in",
"V",
"to",
"the",
"values",
"of",
"dx",
"and",
"dy",
"at",
"a",
"number",
"m",
"of",
"points",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/experiments/new-cs.py#L309-L346
|
train
|
nschloe/colorio
|
experiments/new-cs.py
|
PiecewiseEllipse.apply_M
|
def apply_M(self, ax, ay):
"""Linear operator that converts ax, ay to abcd.
"""
jac = numpy.array(
[[self.dx.dot(ax), self.dy.dot(ax)], [self.dx.dot(ay), self.dy.dot(ay)]]
)
# jacs and J are of shape (2, 2, k). M must be of the same shape and
# contain the result of the k 2x2 dot products. Perhaps there's a
# dot() for this.
M = numpy.einsum("ijl,jkl->ikl", jac, self.J)
# M = numpy.array([
# [
# jac[0][0]*self.J[0][0] + jac[0][1]*self.J[1][0],
# jac[0][0]*self.J[0][1] + jac[0][1]*self.J[1][1],
# ],
# [
# jac[1][0]*self.J[0][0] + jac[1][1]*self.J[1][0],
# jac[1][0]*self.J[0][1] + jac[1][1]*self.J[1][1],
# ],
# ])
# One could use
#
# M = numpy.moveaxis(M, -1, 0)
# _, sigma, _ = numpy.linalg.svd(M)
#
# but computing the singular values explicitly via
# <https://scicomp.stackexchange.com/a/14103/3980> is faster and more
# explicit.
a = (M[0, 0] + M[1, 1]) / 2
b = (M[0, 0] - M[1, 1]) / 2
c = (M[1, 0] + M[0, 1]) / 2
d = (M[1, 0] - M[0, 1]) / 2
return a, b, c, d
|
python
|
def apply_M(self, ax, ay):
"""Linear operator that converts ax, ay to abcd.
"""
jac = numpy.array(
[[self.dx.dot(ax), self.dy.dot(ax)], [self.dx.dot(ay), self.dy.dot(ay)]]
)
# jacs and J are of shape (2, 2, k). M must be of the same shape and
# contain the result of the k 2x2 dot products. Perhaps there's a
# dot() for this.
M = numpy.einsum("ijl,jkl->ikl", jac, self.J)
# M = numpy.array([
# [
# jac[0][0]*self.J[0][0] + jac[0][1]*self.J[1][0],
# jac[0][0]*self.J[0][1] + jac[0][1]*self.J[1][1],
# ],
# [
# jac[1][0]*self.J[0][0] + jac[1][1]*self.J[1][0],
# jac[1][0]*self.J[0][1] + jac[1][1]*self.J[1][1],
# ],
# ])
# One could use
#
# M = numpy.moveaxis(M, -1, 0)
# _, sigma, _ = numpy.linalg.svd(M)
#
# but computing the singular values explicitly via
# <https://scicomp.stackexchange.com/a/14103/3980> is faster and more
# explicit.
a = (M[0, 0] + M[1, 1]) / 2
b = (M[0, 0] - M[1, 1]) / 2
c = (M[1, 0] + M[0, 1]) / 2
d = (M[1, 0] - M[0, 1]) / 2
return a, b, c, d
|
[
"def",
"apply_M",
"(",
"self",
",",
"ax",
",",
"ay",
")",
":",
"jac",
"=",
"numpy",
".",
"array",
"(",
"[",
"[",
"self",
".",
"dx",
".",
"dot",
"(",
"ax",
")",
",",
"self",
".",
"dy",
".",
"dot",
"(",
"ax",
")",
"]",
",",
"[",
"self",
".",
"dx",
".",
"dot",
"(",
"ay",
")",
",",
"self",
".",
"dy",
".",
"dot",
"(",
"ay",
")",
"]",
"]",
")",
"# jacs and J are of shape (2, 2, k). M must be of the same shape and",
"# contain the result of the k 2x2 dot products. Perhaps there's a",
"# dot() for this.",
"M",
"=",
"numpy",
".",
"einsum",
"(",
"\"ijl,jkl->ikl\"",
",",
"jac",
",",
"self",
".",
"J",
")",
"# M = numpy.array([",
"# [",
"# jac[0][0]*self.J[0][0] + jac[0][1]*self.J[1][0],",
"# jac[0][0]*self.J[0][1] + jac[0][1]*self.J[1][1],",
"# ],",
"# [",
"# jac[1][0]*self.J[0][0] + jac[1][1]*self.J[1][0],",
"# jac[1][0]*self.J[0][1] + jac[1][1]*self.J[1][1],",
"# ],",
"# ])",
"# One could use",
"#",
"# M = numpy.moveaxis(M, -1, 0)",
"# _, sigma, _ = numpy.linalg.svd(M)",
"#",
"# but computing the singular values explicitly via",
"# <https://scicomp.stackexchange.com/a/14103/3980> is faster and more",
"# explicit.",
"a",
"=",
"(",
"M",
"[",
"0",
",",
"0",
"]",
"+",
"M",
"[",
"1",
",",
"1",
"]",
")",
"/",
"2",
"b",
"=",
"(",
"M",
"[",
"0",
",",
"0",
"]",
"-",
"M",
"[",
"1",
",",
"1",
"]",
")",
"/",
"2",
"c",
"=",
"(",
"M",
"[",
"1",
",",
"0",
"]",
"+",
"M",
"[",
"0",
",",
"1",
"]",
")",
"/",
"2",
"d",
"=",
"(",
"M",
"[",
"1",
",",
"0",
"]",
"-",
"M",
"[",
"0",
",",
"1",
"]",
")",
"/",
"2",
"return",
"a",
",",
"b",
",",
"c",
",",
"d"
] |
Linear operator that converts ax, ay to abcd.
|
[
"Linear",
"operator",
"that",
"converts",
"ax",
"ay",
"to",
"abcd",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/experiments/new-cs.py#L423-L458
|
train
|
nschloe/colorio
|
experiments/new-cs.py
|
PiecewiseEllipse.cost_min2
|
def cost_min2(self, alpha):
"""Residual formulation, Hessian is a low-rank update of the identity.
"""
n = self.V.dim()
ax = alpha[:n]
ay = alpha[n:]
# ml = pyamg.ruge_stuben_solver(self.L)
# # ml = pyamg.smoothed_aggregation_solver(self.L)
# print(ml)
# print()
# print(self.L)
# print()
# x = ml.solve(ax, tol=1e-10)
# print('residual: {}'.format(numpy.linalg.norm(ax - self.L*x)))
# print()
# print(ax)
# print()
# print(x)
# exit(1)
# x = sparse.linalg.spsolve(self.L, ax)
# print('residual: {}'.format(numpy.linalg.norm(ax - self.L*x)))
# exit(1)
q2, r2 = self.get_q2_r2(ax, ay)
Lax = self.L * ax
Lay = self.L * ay
out = [
0.5 * numpy.dot(Lax, Lax),
0.5 * numpy.dot(Lay, Lay),
0.5 * numpy.dot(q2 - 1, q2 - 1),
0.5 * numpy.dot(r2, r2),
]
if self.num_f_eval % 10000 == 0:
print("{:7d} {:e} {:e} {:e} {:e}".format(self.num_f_eval, *out))
self.num_f_eval += 1
return numpy.sum(out)
|
python
|
def cost_min2(self, alpha):
"""Residual formulation, Hessian is a low-rank update of the identity.
"""
n = self.V.dim()
ax = alpha[:n]
ay = alpha[n:]
# ml = pyamg.ruge_stuben_solver(self.L)
# # ml = pyamg.smoothed_aggregation_solver(self.L)
# print(ml)
# print()
# print(self.L)
# print()
# x = ml.solve(ax, tol=1e-10)
# print('residual: {}'.format(numpy.linalg.norm(ax - self.L*x)))
# print()
# print(ax)
# print()
# print(x)
# exit(1)
# x = sparse.linalg.spsolve(self.L, ax)
# print('residual: {}'.format(numpy.linalg.norm(ax - self.L*x)))
# exit(1)
q2, r2 = self.get_q2_r2(ax, ay)
Lax = self.L * ax
Lay = self.L * ay
out = [
0.5 * numpy.dot(Lax, Lax),
0.5 * numpy.dot(Lay, Lay),
0.5 * numpy.dot(q2 - 1, q2 - 1),
0.5 * numpy.dot(r2, r2),
]
if self.num_f_eval % 10000 == 0:
print("{:7d} {:e} {:e} {:e} {:e}".format(self.num_f_eval, *out))
self.num_f_eval += 1
return numpy.sum(out)
|
[
"def",
"cost_min2",
"(",
"self",
",",
"alpha",
")",
":",
"n",
"=",
"self",
".",
"V",
".",
"dim",
"(",
")",
"ax",
"=",
"alpha",
"[",
":",
"n",
"]",
"ay",
"=",
"alpha",
"[",
"n",
":",
"]",
"# ml = pyamg.ruge_stuben_solver(self.L)",
"# # ml = pyamg.smoothed_aggregation_solver(self.L)",
"# print(ml)",
"# print()",
"# print(self.L)",
"# print()",
"# x = ml.solve(ax, tol=1e-10)",
"# print('residual: {}'.format(numpy.linalg.norm(ax - self.L*x)))",
"# print()",
"# print(ax)",
"# print()",
"# print(x)",
"# exit(1)",
"# x = sparse.linalg.spsolve(self.L, ax)",
"# print('residual: {}'.format(numpy.linalg.norm(ax - self.L*x)))",
"# exit(1)",
"q2",
",",
"r2",
"=",
"self",
".",
"get_q2_r2",
"(",
"ax",
",",
"ay",
")",
"Lax",
"=",
"self",
".",
"L",
"*",
"ax",
"Lay",
"=",
"self",
".",
"L",
"*",
"ay",
"out",
"=",
"[",
"0.5",
"*",
"numpy",
".",
"dot",
"(",
"Lax",
",",
"Lax",
")",
",",
"0.5",
"*",
"numpy",
".",
"dot",
"(",
"Lay",
",",
"Lay",
")",
",",
"0.5",
"*",
"numpy",
".",
"dot",
"(",
"q2",
"-",
"1",
",",
"q2",
"-",
"1",
")",
",",
"0.5",
"*",
"numpy",
".",
"dot",
"(",
"r2",
",",
"r2",
")",
",",
"]",
"if",
"self",
".",
"num_f_eval",
"%",
"10000",
"==",
"0",
":",
"print",
"(",
"\"{:7d} {:e} {:e} {:e} {:e}\"",
".",
"format",
"(",
"self",
".",
"num_f_eval",
",",
"*",
"out",
")",
")",
"self",
".",
"num_f_eval",
"+=",
"1",
"return",
"numpy",
".",
"sum",
"(",
"out",
")"
] |
Residual formulation, Hessian is a low-rank update of the identity.
|
[
"Residual",
"formulation",
"Hessian",
"is",
"a",
"low",
"-",
"rank",
"update",
"of",
"the",
"identity",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/experiments/new-cs.py#L757-L798
|
train
|
nschloe/colorio
|
colorio/tools.py
|
delta
|
def delta(a, b):
"""Computes the distances between two colors or color sets. The shape of
`a` and `b` must be equal.
"""
diff = a - b
return numpy.einsum("i...,i...->...", diff, diff)
|
python
|
def delta(a, b):
"""Computes the distances between two colors or color sets. The shape of
`a` and `b` must be equal.
"""
diff = a - b
return numpy.einsum("i...,i...->...", diff, diff)
|
[
"def",
"delta",
"(",
"a",
",",
"b",
")",
":",
"diff",
"=",
"a",
"-",
"b",
"return",
"numpy",
".",
"einsum",
"(",
"\"i...,i...->...\"",
",",
"diff",
",",
"diff",
")"
] |
Computes the distances between two colors or color sets. The shape of
`a` and `b` must be equal.
|
[
"Computes",
"the",
"distances",
"between",
"two",
"colors",
"or",
"color",
"sets",
".",
"The",
"shape",
"of",
"a",
"and",
"b",
"must",
"be",
"equal",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/colorio/tools.py#L24-L29
|
train
|
nschloe/colorio
|
colorio/tools.py
|
plot_flat_gamut
|
def plot_flat_gamut(
xy_to_2d=lambda xy: xy,
axes_labels=("x", "y"),
plot_rgb_triangle=True,
fill_horseshoe=True,
plot_planckian_locus=True,
):
"""Show a flat color gamut, by default xy. There exists a chroma gamut for
all color models which transform lines in XYZ to lines, and hence have a
natural decomposition into lightness and chroma components. Also, the flat
gamut is the same for every lightness value. Examples for color models with
this property are CIELUV and IPT, examples for color models without are
CIELAB and CIECAM02.
"""
observer = observers.cie_1931_2()
# observer = observers.cie_1964_10()
_plot_monochromatic(observer, xy_to_2d, fill_horseshoe=fill_horseshoe)
# plt.grid()
if plot_rgb_triangle:
_plot_rgb_triangle(xy_to_2d)
if plot_planckian_locus:
_plot_planckian_locus(observer, xy_to_2d)
plt.gca().set_aspect("equal")
# plt.legend()
plt.xlabel(axes_labels[0])
plt.ylabel(axes_labels[1])
return
|
python
|
def plot_flat_gamut(
xy_to_2d=lambda xy: xy,
axes_labels=("x", "y"),
plot_rgb_triangle=True,
fill_horseshoe=True,
plot_planckian_locus=True,
):
"""Show a flat color gamut, by default xy. There exists a chroma gamut for
all color models which transform lines in XYZ to lines, and hence have a
natural decomposition into lightness and chroma components. Also, the flat
gamut is the same for every lightness value. Examples for color models with
this property are CIELUV and IPT, examples for color models without are
CIELAB and CIECAM02.
"""
observer = observers.cie_1931_2()
# observer = observers.cie_1964_10()
_plot_monochromatic(observer, xy_to_2d, fill_horseshoe=fill_horseshoe)
# plt.grid()
if plot_rgb_triangle:
_plot_rgb_triangle(xy_to_2d)
if plot_planckian_locus:
_plot_planckian_locus(observer, xy_to_2d)
plt.gca().set_aspect("equal")
# plt.legend()
plt.xlabel(axes_labels[0])
plt.ylabel(axes_labels[1])
return
|
[
"def",
"plot_flat_gamut",
"(",
"xy_to_2d",
"=",
"lambda",
"xy",
":",
"xy",
",",
"axes_labels",
"=",
"(",
"\"x\"",
",",
"\"y\"",
")",
",",
"plot_rgb_triangle",
"=",
"True",
",",
"fill_horseshoe",
"=",
"True",
",",
"plot_planckian_locus",
"=",
"True",
",",
")",
":",
"observer",
"=",
"observers",
".",
"cie_1931_2",
"(",
")",
"# observer = observers.cie_1964_10()",
"_plot_monochromatic",
"(",
"observer",
",",
"xy_to_2d",
",",
"fill_horseshoe",
"=",
"fill_horseshoe",
")",
"# plt.grid()",
"if",
"plot_rgb_triangle",
":",
"_plot_rgb_triangle",
"(",
"xy_to_2d",
")",
"if",
"plot_planckian_locus",
":",
"_plot_planckian_locus",
"(",
"observer",
",",
"xy_to_2d",
")",
"plt",
".",
"gca",
"(",
")",
".",
"set_aspect",
"(",
"\"equal\"",
")",
"# plt.legend()",
"plt",
".",
"xlabel",
"(",
"axes_labels",
"[",
"0",
"]",
")",
"plt",
".",
"ylabel",
"(",
"axes_labels",
"[",
"1",
"]",
")",
"return"
] |
Show a flat color gamut, by default xy. There exists a chroma gamut for
all color models which transform lines in XYZ to lines, and hence have a
natural decomposition into lightness and chroma components. Also, the flat
gamut is the same for every lightness value. Examples for color models with
this property are CIELUV and IPT, examples for color models without are
CIELAB and CIECAM02.
|
[
"Show",
"a",
"flat",
"color",
"gamut",
"by",
"default",
"xy",
".",
"There",
"exists",
"a",
"chroma",
"gamut",
"for",
"all",
"color",
"models",
"which",
"transform",
"lines",
"in",
"XYZ",
"to",
"lines",
"and",
"hence",
"have",
"a",
"natural",
"decomposition",
"into",
"lightness",
"and",
"chroma",
"components",
".",
"Also",
"the",
"flat",
"gamut",
"is",
"the",
"same",
"for",
"every",
"lightness",
"value",
".",
"Examples",
"for",
"color",
"models",
"with",
"this",
"property",
"are",
"CIELUV",
"and",
"IPT",
"examples",
"for",
"color",
"models",
"without",
"are",
"CIELAB",
"and",
"CIECAM02",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/colorio/tools.py#L199-L228
|
train
|
nschloe/colorio
|
experiments/pade2d.py
|
_get_xy_tree
|
def _get_xy_tree(xy, degree):
"""Evaluates the entire tree of 2d mononomials.
The return value is a list of arrays, where `out[k]` hosts the `2*k+1`
values of the `k`th level of the tree
(0, 0)
(1, 0) (0, 1)
(2, 0) (1, 1) (0, 2)
... ... ...
"""
x, y = xy
tree = [numpy.array([numpy.ones(x.shape, dtype=int)])]
for d in range(degree):
tree.append(numpy.concatenate([tree[-1] * x, [tree[-1][-1] * y]]))
return tree
|
python
|
def _get_xy_tree(xy, degree):
"""Evaluates the entire tree of 2d mononomials.
The return value is a list of arrays, where `out[k]` hosts the `2*k+1`
values of the `k`th level of the tree
(0, 0)
(1, 0) (0, 1)
(2, 0) (1, 1) (0, 2)
... ... ...
"""
x, y = xy
tree = [numpy.array([numpy.ones(x.shape, dtype=int)])]
for d in range(degree):
tree.append(numpy.concatenate([tree[-1] * x, [tree[-1][-1] * y]]))
return tree
|
[
"def",
"_get_xy_tree",
"(",
"xy",
",",
"degree",
")",
":",
"x",
",",
"y",
"=",
"xy",
"tree",
"=",
"[",
"numpy",
".",
"array",
"(",
"[",
"numpy",
".",
"ones",
"(",
"x",
".",
"shape",
",",
"dtype",
"=",
"int",
")",
"]",
")",
"]",
"for",
"d",
"in",
"range",
"(",
"degree",
")",
":",
"tree",
".",
"append",
"(",
"numpy",
".",
"concatenate",
"(",
"[",
"tree",
"[",
"-",
"1",
"]",
"*",
"x",
",",
"[",
"tree",
"[",
"-",
"1",
"]",
"[",
"-",
"1",
"]",
"*",
"y",
"]",
"]",
")",
")",
"return",
"tree"
] |
Evaluates the entire tree of 2d mononomials.
The return value is a list of arrays, where `out[k]` hosts the `2*k+1`
values of the `k`th level of the tree
(0, 0)
(1, 0) (0, 1)
(2, 0) (1, 1) (0, 2)
... ... ...
|
[
"Evaluates",
"the",
"entire",
"tree",
"of",
"2d",
"mononomials",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/experiments/pade2d.py#L17-L32
|
train
|
nschloe/colorio
|
colorio/illuminants.py
|
spectrum_to_xyz100
|
def spectrum_to_xyz100(spectrum, observer):
"""Computes the tristimulus values XYZ from a given spectrum for a given
observer via
X_i = int_lambda spectrum_i(lambda) * observer_i(lambda) dlambda.
In section 7, the technical report CIE Standard Illuminants for
Colorimetry, 1999, gives a recommendation on how to perform the
computation.
"""
lambda_o, data_o = observer
lambda_s, data_s = spectrum
# form the union of lambdas
lmbda = numpy.sort(numpy.unique(numpy.concatenate([lambda_o, lambda_s])))
# The technical document prescribes that the integration be performed over
# the wavelength range corresponding to the entire visible spectrum, 360 nm
# to 830 nm.
assert lmbda[0] < 361e-9
assert lmbda[-1] > 829e-9
# interpolate data
idata_o = numpy.array([numpy.interp(lmbda, lambda_o, dt) for dt in data_o])
# The technical report specifies the interpolation techniques, too:
# ```
# Use one of the four following methods to calculate needed but unmeasured
# values of phi(l), R(l) or tau(l) within the range of measurements:
# 1) the third-order polynomial interpolation (Lagrange) from the four
# neighbouring data points around the point to be interpolated, or
# 2) cubic spline interpolation formula, or
# 3) a fifth order polynomial interpolation formula from the six
# neighboring data points around the point to be interpolated, or
# 4) a Sprague interpolation (see Seve, 2003).
# ```
# Well, don't do that but simply use linear interpolation now. We only use
# the midpoint rule for integration anyways.
idata_s = numpy.interp(lmbda, lambda_s, data_s)
# step sizes
delta = numpy.zeros(len(lmbda))
diff = lmbda[1:] - lmbda[:-1]
delta[1:] += diff
delta[:-1] += diff
delta /= 2
values = numpy.dot(idata_o, idata_s * delta)
return values * 100
|
python
|
def spectrum_to_xyz100(spectrum, observer):
"""Computes the tristimulus values XYZ from a given spectrum for a given
observer via
X_i = int_lambda spectrum_i(lambda) * observer_i(lambda) dlambda.
In section 7, the technical report CIE Standard Illuminants for
Colorimetry, 1999, gives a recommendation on how to perform the
computation.
"""
lambda_o, data_o = observer
lambda_s, data_s = spectrum
# form the union of lambdas
lmbda = numpy.sort(numpy.unique(numpy.concatenate([lambda_o, lambda_s])))
# The technical document prescribes that the integration be performed over
# the wavelength range corresponding to the entire visible spectrum, 360 nm
# to 830 nm.
assert lmbda[0] < 361e-9
assert lmbda[-1] > 829e-9
# interpolate data
idata_o = numpy.array([numpy.interp(lmbda, lambda_o, dt) for dt in data_o])
# The technical report specifies the interpolation techniques, too:
# ```
# Use one of the four following methods to calculate needed but unmeasured
# values of phi(l), R(l) or tau(l) within the range of measurements:
# 1) the third-order polynomial interpolation (Lagrange) from the four
# neighbouring data points around the point to be interpolated, or
# 2) cubic spline interpolation formula, or
# 3) a fifth order polynomial interpolation formula from the six
# neighboring data points around the point to be interpolated, or
# 4) a Sprague interpolation (see Seve, 2003).
# ```
# Well, don't do that but simply use linear interpolation now. We only use
# the midpoint rule for integration anyways.
idata_s = numpy.interp(lmbda, lambda_s, data_s)
# step sizes
delta = numpy.zeros(len(lmbda))
diff = lmbda[1:] - lmbda[:-1]
delta[1:] += diff
delta[:-1] += diff
delta /= 2
values = numpy.dot(idata_o, idata_s * delta)
return values * 100
|
[
"def",
"spectrum_to_xyz100",
"(",
"spectrum",
",",
"observer",
")",
":",
"lambda_o",
",",
"data_o",
"=",
"observer",
"lambda_s",
",",
"data_s",
"=",
"spectrum",
"# form the union of lambdas",
"lmbda",
"=",
"numpy",
".",
"sort",
"(",
"numpy",
".",
"unique",
"(",
"numpy",
".",
"concatenate",
"(",
"[",
"lambda_o",
",",
"lambda_s",
"]",
")",
")",
")",
"# The technical document prescribes that the integration be performed over",
"# the wavelength range corresponding to the entire visible spectrum, 360 nm",
"# to 830 nm.",
"assert",
"lmbda",
"[",
"0",
"]",
"<",
"361e-9",
"assert",
"lmbda",
"[",
"-",
"1",
"]",
">",
"829e-9",
"# interpolate data",
"idata_o",
"=",
"numpy",
".",
"array",
"(",
"[",
"numpy",
".",
"interp",
"(",
"lmbda",
",",
"lambda_o",
",",
"dt",
")",
"for",
"dt",
"in",
"data_o",
"]",
")",
"# The technical report specifies the interpolation techniques, too:",
"# ```",
"# Use one of the four following methods to calculate needed but unmeasured",
"# values of phi(l), R(l) or tau(l) within the range of measurements:",
"# 1) the third-order polynomial interpolation (Lagrange) from the four",
"# neighbouring data points around the point to be interpolated, or",
"# 2) cubic spline interpolation formula, or",
"# 3) a fifth order polynomial interpolation formula from the six",
"# neighboring data points around the point to be interpolated, or",
"# 4) a Sprague interpolation (see Seve, 2003).",
"# ```",
"# Well, don't do that but simply use linear interpolation now. We only use",
"# the midpoint rule for integration anyways.",
"idata_s",
"=",
"numpy",
".",
"interp",
"(",
"lmbda",
",",
"lambda_s",
",",
"data_s",
")",
"# step sizes",
"delta",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"lmbda",
")",
")",
"diff",
"=",
"lmbda",
"[",
"1",
":",
"]",
"-",
"lmbda",
"[",
":",
"-",
"1",
"]",
"delta",
"[",
"1",
":",
"]",
"+=",
"diff",
"delta",
"[",
":",
"-",
"1",
"]",
"+=",
"diff",
"delta",
"/=",
"2",
"values",
"=",
"numpy",
".",
"dot",
"(",
"idata_o",
",",
"idata_s",
"*",
"delta",
")",
"return",
"values",
"*",
"100"
] |
Computes the tristimulus values XYZ from a given spectrum for a given
observer via
X_i = int_lambda spectrum_i(lambda) * observer_i(lambda) dlambda.
In section 7, the technical report CIE Standard Illuminants for
Colorimetry, 1999, gives a recommendation on how to perform the
computation.
|
[
"Computes",
"the",
"tristimulus",
"values",
"XYZ",
"from",
"a",
"given",
"spectrum",
"for",
"a",
"given",
"observer",
"via"
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/colorio/illuminants.py#L36-L84
|
train
|
nschloe/colorio
|
colorio/illuminants.py
|
d
|
def d(nominal_temperature):
"""CIE D-series illuminants.
The technical report `Colorimetry, 3rd edition, 2004` gives the data for
D50, D55, and D65 explicitly, but also explains how it's computed for S0,
S1, S2. Values are given at 5nm resolution in the document, but really
every other value is just interpolated. Hence, only provide 10 nm data
here.
"""
# From CIE 15:2004. Colorimetry, 3rd edition, 2004 (page 69, note 5):
#
# The method required to calculate the values for the relative spectral
# power distributions of illuminants D50, D55, D65, and D75, in Table T.1
# is as follows
# 1. Multiply the nominal correlated colour temperature (5000 K, 5500 K,
# 6500 K or 7500 K) by 1,4388/1,4380.
# 2. Calculate XD and YD using the equations given in the text.
# 3. Calculate M1 and M2 using the equations given in the text.
# 4. Round M1 and M2 to three decimal places.
# 5. Calculate S(lambda) every 10 nm by
# S(lambda) = S0(lambda) + M1 S1(lambda) + M2 S2(lambda)
# using values of S0(lambda), S1(lambda) and S2(lambda) from
# Table T.2.
# 6. Interpolate the 10 nm values of S(lambda) linearly to obtain values
# at intermediate wavelengths.
tcp = 1.4388e-2 / 1.4380e-2 * nominal_temperature
if 4000 <= tcp <= 7000:
xd = ((-4.6070e9 / tcp + 2.9678e6) / tcp + 0.09911e3) / tcp + 0.244063
else:
assert 7000 < tcp <= 25000
xd = ((-2.0064e9 / tcp + 1.9018e6) / tcp + 0.24748e3) / tcp + 0.237040
yd = (-3.000 * xd + 2.870) * xd - 0.275
m1 = (-1.3515 - 1.7703 * xd + 5.9114 * yd) / (0.0241 + 0.2562 * xd - 0.7341 * yd)
m2 = (+0.0300 - 31.4424 * xd + 30.0717 * yd) / (0.0241 + 0.2562 * xd - 0.7341 * yd)
m1 = numpy.around(m1, decimals=3)
m2 = numpy.around(m2, decimals=3)
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(dir_path, "data/illuminants/d.yaml")) as f:
data = yaml.safe_load(f)
data = numpy.array(data).T
lmbda = data[0]
s = data[1:]
return lmbda, s[0] + m1 * s[1] + m2 * s[2]
|
python
|
def d(nominal_temperature):
"""CIE D-series illuminants.
The technical report `Colorimetry, 3rd edition, 2004` gives the data for
D50, D55, and D65 explicitly, but also explains how it's computed for S0,
S1, S2. Values are given at 5nm resolution in the document, but really
every other value is just interpolated. Hence, only provide 10 nm data
here.
"""
# From CIE 15:2004. Colorimetry, 3rd edition, 2004 (page 69, note 5):
#
# The method required to calculate the values for the relative spectral
# power distributions of illuminants D50, D55, D65, and D75, in Table T.1
# is as follows
# 1. Multiply the nominal correlated colour temperature (5000 K, 5500 K,
# 6500 K or 7500 K) by 1,4388/1,4380.
# 2. Calculate XD and YD using the equations given in the text.
# 3. Calculate M1 and M2 using the equations given in the text.
# 4. Round M1 and M2 to three decimal places.
# 5. Calculate S(lambda) every 10 nm by
# S(lambda) = S0(lambda) + M1 S1(lambda) + M2 S2(lambda)
# using values of S0(lambda), S1(lambda) and S2(lambda) from
# Table T.2.
# 6. Interpolate the 10 nm values of S(lambda) linearly to obtain values
# at intermediate wavelengths.
tcp = 1.4388e-2 / 1.4380e-2 * nominal_temperature
if 4000 <= tcp <= 7000:
xd = ((-4.6070e9 / tcp + 2.9678e6) / tcp + 0.09911e3) / tcp + 0.244063
else:
assert 7000 < tcp <= 25000
xd = ((-2.0064e9 / tcp + 1.9018e6) / tcp + 0.24748e3) / tcp + 0.237040
yd = (-3.000 * xd + 2.870) * xd - 0.275
m1 = (-1.3515 - 1.7703 * xd + 5.9114 * yd) / (0.0241 + 0.2562 * xd - 0.7341 * yd)
m2 = (+0.0300 - 31.4424 * xd + 30.0717 * yd) / (0.0241 + 0.2562 * xd - 0.7341 * yd)
m1 = numpy.around(m1, decimals=3)
m2 = numpy.around(m2, decimals=3)
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(dir_path, "data/illuminants/d.yaml")) as f:
data = yaml.safe_load(f)
data = numpy.array(data).T
lmbda = data[0]
s = data[1:]
return lmbda, s[0] + m1 * s[1] + m2 * s[2]
|
[
"def",
"d",
"(",
"nominal_temperature",
")",
":",
"# From CIE 15:2004. Colorimetry, 3rd edition, 2004 (page 69, note 5):",
"#",
"# The method required to calculate the values for the relative spectral",
"# power distributions of illuminants D50, D55, D65, and D75, in Table T.1",
"# is as follows",
"# 1. Multiply the nominal correlated colour temperature (5000 K, 5500 K,",
"# 6500 K or 7500 K) by 1,4388/1,4380.",
"# 2. Calculate XD and YD using the equations given in the text.",
"# 3. Calculate M1 and M2 using the equations given in the text.",
"# 4. Round M1 and M2 to three decimal places.",
"# 5. Calculate S(lambda) every 10 nm by",
"# S(lambda) = S0(lambda) + M1 S1(lambda) + M2 S2(lambda)",
"# using values of S0(lambda), S1(lambda) and S2(lambda) from",
"# Table T.2.",
"# 6. Interpolate the 10 nm values of S(lambda) linearly to obtain values",
"# at intermediate wavelengths.",
"tcp",
"=",
"1.4388e-2",
"/",
"1.4380e-2",
"*",
"nominal_temperature",
"if",
"4000",
"<=",
"tcp",
"<=",
"7000",
":",
"xd",
"=",
"(",
"(",
"-",
"4.6070e9",
"/",
"tcp",
"+",
"2.9678e6",
")",
"/",
"tcp",
"+",
"0.09911e3",
")",
"/",
"tcp",
"+",
"0.244063",
"else",
":",
"assert",
"7000",
"<",
"tcp",
"<=",
"25000",
"xd",
"=",
"(",
"(",
"-",
"2.0064e9",
"/",
"tcp",
"+",
"1.9018e6",
")",
"/",
"tcp",
"+",
"0.24748e3",
")",
"/",
"tcp",
"+",
"0.237040",
"yd",
"=",
"(",
"-",
"3.000",
"*",
"xd",
"+",
"2.870",
")",
"*",
"xd",
"-",
"0.275",
"m1",
"=",
"(",
"-",
"1.3515",
"-",
"1.7703",
"*",
"xd",
"+",
"5.9114",
"*",
"yd",
")",
"/",
"(",
"0.0241",
"+",
"0.2562",
"*",
"xd",
"-",
"0.7341",
"*",
"yd",
")",
"m2",
"=",
"(",
"+",
"0.0300",
"-",
"31.4424",
"*",
"xd",
"+",
"30.0717",
"*",
"yd",
")",
"/",
"(",
"0.0241",
"+",
"0.2562",
"*",
"xd",
"-",
"0.7341",
"*",
"yd",
")",
"m1",
"=",
"numpy",
".",
"around",
"(",
"m1",
",",
"decimals",
"=",
"3",
")",
"m2",
"=",
"numpy",
".",
"around",
"(",
"m2",
",",
"decimals",
"=",
"3",
")",
"dir_path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dir_path",
",",
"\"data/illuminants/d.yaml\"",
")",
")",
"as",
"f",
":",
"data",
"=",
"yaml",
".",
"safe_load",
"(",
"f",
")",
"data",
"=",
"numpy",
".",
"array",
"(",
"data",
")",
".",
"T",
"lmbda",
"=",
"data",
"[",
"0",
"]",
"s",
"=",
"data",
"[",
"1",
":",
"]",
"return",
"lmbda",
",",
"s",
"[",
"0",
"]",
"+",
"m1",
"*",
"s",
"[",
"1",
"]",
"+",
"m2",
"*",
"s",
"[",
"2",
"]"
] |
CIE D-series illuminants.
The technical report `Colorimetry, 3rd edition, 2004` gives the data for
D50, D55, and D65 explicitly, but also explains how it's computed for S0,
S1, S2. Values are given at 5nm resolution in the document, but really
every other value is just interpolated. Hence, only provide 10 nm data
here.
|
[
"CIE",
"D",
"-",
"series",
"illuminants",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/colorio/illuminants.py#L137-L186
|
train
|
nschloe/colorio
|
colorio/illuminants.py
|
e
|
def e():
"""This is a hypothetical reference radiator. All wavelengths in CIE
illuminant E are weighted equally with a relative spectral power of 100.0.
"""
lmbda = 1.0e-9 * numpy.arange(300, 831)
data = numpy.full(lmbda.shape, 100.0)
return lmbda, data
|
python
|
def e():
"""This is a hypothetical reference radiator. All wavelengths in CIE
illuminant E are weighted equally with a relative spectral power of 100.0.
"""
lmbda = 1.0e-9 * numpy.arange(300, 831)
data = numpy.full(lmbda.shape, 100.0)
return lmbda, data
|
[
"def",
"e",
"(",
")",
":",
"lmbda",
"=",
"1.0e-9",
"*",
"numpy",
".",
"arange",
"(",
"300",
",",
"831",
")",
"data",
"=",
"numpy",
".",
"full",
"(",
"lmbda",
".",
"shape",
",",
"100.0",
")",
"return",
"lmbda",
",",
"data"
] |
This is a hypothetical reference radiator. All wavelengths in CIE
illuminant E are weighted equally with a relative spectral power of 100.0.
|
[
"This",
"is",
"a",
"hypothetical",
"reference",
"radiator",
".",
"All",
"wavelengths",
"in",
"CIE",
"illuminant",
"E",
"are",
"weighted",
"equally",
"with",
"a",
"relative",
"spectral",
"power",
"of",
"100",
".",
"0",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/colorio/illuminants.py#L215-L221
|
train
|
nschloe/colorio
|
colorio/linalg.py
|
dot
|
def dot(a, b):
"""Take arrays `a` and `b` and form the dot product between the last axis
of `a` and the first of `b`.
"""
b = numpy.asarray(b)
return numpy.dot(a, b.reshape(b.shape[0], -1)).reshape(a.shape[:-1] + b.shape[1:])
|
python
|
def dot(a, b):
"""Take arrays `a` and `b` and form the dot product between the last axis
of `a` and the first of `b`.
"""
b = numpy.asarray(b)
return numpy.dot(a, b.reshape(b.shape[0], -1)).reshape(a.shape[:-1] + b.shape[1:])
|
[
"def",
"dot",
"(",
"a",
",",
"b",
")",
":",
"b",
"=",
"numpy",
".",
"asarray",
"(",
"b",
")",
"return",
"numpy",
".",
"dot",
"(",
"a",
",",
"b",
".",
"reshape",
"(",
"b",
".",
"shape",
"[",
"0",
"]",
",",
"-",
"1",
")",
")",
".",
"reshape",
"(",
"a",
".",
"shape",
"[",
":",
"-",
"1",
"]",
"+",
"b",
".",
"shape",
"[",
"1",
":",
"]",
")"
] |
Take arrays `a` and `b` and form the dot product between the last axis
of `a` and the first of `b`.
|
[
"Take",
"arrays",
"a",
"and",
"b",
"and",
"form",
"the",
"dot",
"product",
"between",
"the",
"last",
"axis",
"of",
"a",
"and",
"the",
"first",
"of",
"b",
"."
] |
357d6001b3cf30f752e23726bf429dc1d1c60b3a
|
https://github.com/nschloe/colorio/blob/357d6001b3cf30f752e23726bf429dc1d1c60b3a/colorio/linalg.py#L6-L11
|
train
|
dshean/demcoreg
|
demcoreg/dem_mask.py
|
get_nlcd_mask
|
def get_nlcd_mask(nlcd_ds, filter='not_forest', out_fn=None):
"""Generate raster mask for specified NLCD LULC filter
"""
print("Loading NLCD LULC")
b = nlcd_ds.GetRasterBand(1)
l = b.ReadAsArray()
print("Filtering NLCD LULC with: %s" % filter)
#Original nlcd products have nan as ndv
#12 - ice
#31 - rock
#11 - open water, includes rivers
#52 - shrub, <5 m tall, >20%
#42 - evergreeen forest
#Should use data dictionary here for general masking
#Using 'rock+ice+water' preserves the most pixels, although could be problematic over areas with lakes
if filter == 'rock':
mask = (l==31)
elif filter == 'rock+ice':
mask = np.logical_or((l==31),(l==12))
elif filter == 'rock+ice+water':
mask = np.logical_or(np.logical_or((l==31),(l==12)),(l==11))
elif filter == 'not_forest':
mask = ~(np.logical_or(np.logical_or((l==41),(l==42)),(l==43)))
elif filter == 'not_forest+not_water':
mask = ~(np.logical_or(np.logical_or(np.logical_or((l==41),(l==42)),(l==43)),(l==11)))
else:
print("Invalid mask type")
mask = None
#Write out original data
if out_fn is not None:
print("Writing out %s" % out_fn)
iolib.writeGTiff(l, out_fn, nlcd_ds)
l = None
return mask
|
python
|
def get_nlcd_mask(nlcd_ds, filter='not_forest', out_fn=None):
"""Generate raster mask for specified NLCD LULC filter
"""
print("Loading NLCD LULC")
b = nlcd_ds.GetRasterBand(1)
l = b.ReadAsArray()
print("Filtering NLCD LULC with: %s" % filter)
#Original nlcd products have nan as ndv
#12 - ice
#31 - rock
#11 - open water, includes rivers
#52 - shrub, <5 m tall, >20%
#42 - evergreeen forest
#Should use data dictionary here for general masking
#Using 'rock+ice+water' preserves the most pixels, although could be problematic over areas with lakes
if filter == 'rock':
mask = (l==31)
elif filter == 'rock+ice':
mask = np.logical_or((l==31),(l==12))
elif filter == 'rock+ice+water':
mask = np.logical_or(np.logical_or((l==31),(l==12)),(l==11))
elif filter == 'not_forest':
mask = ~(np.logical_or(np.logical_or((l==41),(l==42)),(l==43)))
elif filter == 'not_forest+not_water':
mask = ~(np.logical_or(np.logical_or(np.logical_or((l==41),(l==42)),(l==43)),(l==11)))
else:
print("Invalid mask type")
mask = None
#Write out original data
if out_fn is not None:
print("Writing out %s" % out_fn)
iolib.writeGTiff(l, out_fn, nlcd_ds)
l = None
return mask
|
[
"def",
"get_nlcd_mask",
"(",
"nlcd_ds",
",",
"filter",
"=",
"'not_forest'",
",",
"out_fn",
"=",
"None",
")",
":",
"print",
"(",
"\"Loading NLCD LULC\"",
")",
"b",
"=",
"nlcd_ds",
".",
"GetRasterBand",
"(",
"1",
")",
"l",
"=",
"b",
".",
"ReadAsArray",
"(",
")",
"print",
"(",
"\"Filtering NLCD LULC with: %s\"",
"%",
"filter",
")",
"#Original nlcd products have nan as ndv",
"#12 - ice",
"#31 - rock",
"#11 - open water, includes rivers",
"#52 - shrub, <5 m tall, >20%",
"#42 - evergreeen forest",
"#Should use data dictionary here for general masking",
"#Using 'rock+ice+water' preserves the most pixels, although could be problematic over areas with lakes",
"if",
"filter",
"==",
"'rock'",
":",
"mask",
"=",
"(",
"l",
"==",
"31",
")",
"elif",
"filter",
"==",
"'rock+ice'",
":",
"mask",
"=",
"np",
".",
"logical_or",
"(",
"(",
"l",
"==",
"31",
")",
",",
"(",
"l",
"==",
"12",
")",
")",
"elif",
"filter",
"==",
"'rock+ice+water'",
":",
"mask",
"=",
"np",
".",
"logical_or",
"(",
"np",
".",
"logical_or",
"(",
"(",
"l",
"==",
"31",
")",
",",
"(",
"l",
"==",
"12",
")",
")",
",",
"(",
"l",
"==",
"11",
")",
")",
"elif",
"filter",
"==",
"'not_forest'",
":",
"mask",
"=",
"~",
"(",
"np",
".",
"logical_or",
"(",
"np",
".",
"logical_or",
"(",
"(",
"l",
"==",
"41",
")",
",",
"(",
"l",
"==",
"42",
")",
")",
",",
"(",
"l",
"==",
"43",
")",
")",
")",
"elif",
"filter",
"==",
"'not_forest+not_water'",
":",
"mask",
"=",
"~",
"(",
"np",
".",
"logical_or",
"(",
"np",
".",
"logical_or",
"(",
"np",
".",
"logical_or",
"(",
"(",
"l",
"==",
"41",
")",
",",
"(",
"l",
"==",
"42",
")",
")",
",",
"(",
"l",
"==",
"43",
")",
")",
",",
"(",
"l",
"==",
"11",
")",
")",
")",
"else",
":",
"print",
"(",
"\"Invalid mask type\"",
")",
"mask",
"=",
"None",
"#Write out original data",
"if",
"out_fn",
"is",
"not",
"None",
":",
"print",
"(",
"\"Writing out %s\"",
"%",
"out_fn",
")",
"iolib",
".",
"writeGTiff",
"(",
"l",
",",
"out_fn",
",",
"nlcd_ds",
")",
"l",
"=",
"None",
"return",
"mask"
] |
Generate raster mask for specified NLCD LULC filter
|
[
"Generate",
"raster",
"mask",
"for",
"specified",
"NLCD",
"LULC",
"filter"
] |
abd6be75d326b35f52826ee30dff01f9e86b4b52
|
https://github.com/dshean/demcoreg/blob/abd6be75d326b35f52826ee30dff01f9e86b4b52/demcoreg/dem_mask.py#L108-L141
|
train
|
dshean/demcoreg
|
demcoreg/dem_mask.py
|
get_bareground_mask
|
def get_bareground_mask(bareground_ds, bareground_thresh=60, out_fn=None):
"""Generate raster mask for exposed bare ground from global bareground data
"""
print("Loading bareground")
b = bareground_ds.GetRasterBand(1)
l = b.ReadAsArray()
print("Masking pixels with <%0.1f%% bare ground" % bareground_thresh)
if bareground_thresh < 0.0 or bareground_thresh > 100.0:
sys.exit("Invalid bare ground percentage")
mask = (l>bareground_thresh)
#Write out original data
if out_fn is not None:
print("Writing out %s" % out_fn)
iolib.writeGTiff(l, out_fn, bareground_ds)
l = None
return mask
|
python
|
def get_bareground_mask(bareground_ds, bareground_thresh=60, out_fn=None):
"""Generate raster mask for exposed bare ground from global bareground data
"""
print("Loading bareground")
b = bareground_ds.GetRasterBand(1)
l = b.ReadAsArray()
print("Masking pixels with <%0.1f%% bare ground" % bareground_thresh)
if bareground_thresh < 0.0 or bareground_thresh > 100.0:
sys.exit("Invalid bare ground percentage")
mask = (l>bareground_thresh)
#Write out original data
if out_fn is not None:
print("Writing out %s" % out_fn)
iolib.writeGTiff(l, out_fn, bareground_ds)
l = None
return mask
|
[
"def",
"get_bareground_mask",
"(",
"bareground_ds",
",",
"bareground_thresh",
"=",
"60",
",",
"out_fn",
"=",
"None",
")",
":",
"print",
"(",
"\"Loading bareground\"",
")",
"b",
"=",
"bareground_ds",
".",
"GetRasterBand",
"(",
"1",
")",
"l",
"=",
"b",
".",
"ReadAsArray",
"(",
")",
"print",
"(",
"\"Masking pixels with <%0.1f%% bare ground\"",
"%",
"bareground_thresh",
")",
"if",
"bareground_thresh",
"<",
"0.0",
"or",
"bareground_thresh",
">",
"100.0",
":",
"sys",
".",
"exit",
"(",
"\"Invalid bare ground percentage\"",
")",
"mask",
"=",
"(",
"l",
">",
"bareground_thresh",
")",
"#Write out original data",
"if",
"out_fn",
"is",
"not",
"None",
":",
"print",
"(",
"\"Writing out %s\"",
"%",
"out_fn",
")",
"iolib",
".",
"writeGTiff",
"(",
"l",
",",
"out_fn",
",",
"bareground_ds",
")",
"l",
"=",
"None",
"return",
"mask"
] |
Generate raster mask for exposed bare ground from global bareground data
|
[
"Generate",
"raster",
"mask",
"for",
"exposed",
"bare",
"ground",
"from",
"global",
"bareground",
"data"
] |
abd6be75d326b35f52826ee30dff01f9e86b4b52
|
https://github.com/dshean/demcoreg/blob/abd6be75d326b35f52826ee30dff01f9e86b4b52/demcoreg/dem_mask.py#L143-L158
|
train
|
dshean/demcoreg
|
demcoreg/dem_mask.py
|
get_snodas_ds
|
def get_snodas_ds(dem_dt, code=1036):
"""Function to fetch and process SNODAS snow depth products for input datetime
http://nsidc.org/data/docs/noaa/g02158_snodas_snow_cover_model/index.html
Product codes:
1036 is snow depth
1034 is SWE
filename format: us_ssmv11036tS__T0001TTNATS2015042205HP001.Hdr
"""
import tarfile
import gzip
snodas_ds = None
snodas_url_str = None
outdir = os.path.join(datadir, 'snodas')
if not os.path.exists(outdir):
os.makedirs(outdir)
#Note: unmasked products (beyond CONUS) are only available from 2010-present
if dem_dt >= datetime(2003,9,30) and dem_dt < datetime(2010,1,1):
snodas_url_str = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/masked/%Y/%m_%b/SNODAS_%Y%m%d.tar'
tar_subfn_str_fmt = 'us_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'
elif dem_dt >= datetime(2010,1,1):
snodas_url_str = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/unmasked/%Y/%m_%b/SNODAS_unmasked_%Y%m%d.tar'
tar_subfn_str_fmt = './zz_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'
else:
print("No SNODAS data available for input date")
if snodas_url_str is not None:
snodas_url = dem_dt.strftime(snodas_url_str)
snodas_tar_fn = iolib.getfile(snodas_url, outdir=outdir)
print("Unpacking")
tar = tarfile.open(snodas_tar_fn)
#gunzip to extract both dat and Hdr files, tar.gz
for ext in ('dat', 'Hdr'):
tar_subfn_str = tar_subfn_str_fmt % (code, ext)
tar_subfn_gz = dem_dt.strftime(tar_subfn_str)
tar_subfn = os.path.splitext(tar_subfn_gz)[0]
print(tar_subfn)
if outdir is not None:
tar_subfn = os.path.join(outdir, tar_subfn)
if not os.path.exists(tar_subfn):
#Should be able to do this without writing intermediate gz to disk
tar.extract(tar_subfn_gz)
with gzip.open(tar_subfn_gz, 'rb') as f:
outf = open(tar_subfn, 'wb')
outf.write(f.read())
outf.close()
os.remove(tar_subfn_gz)
#Need to delete 'Created by module comment' line from Hdr, can contain too many characters
bad_str = 'Created by module comment'
snodas_fn = tar_subfn
f = open(snodas_fn)
output = []
for line in f:
if not bad_str in line:
output.append(line)
f.close()
f = open(snodas_fn, 'w')
f.writelines(output)
f.close()
#Return GDAL dataset for extracted product
snodas_ds = gdal.Open(snodas_fn)
return snodas_ds
|
python
|
def get_snodas_ds(dem_dt, code=1036):
"""Function to fetch and process SNODAS snow depth products for input datetime
http://nsidc.org/data/docs/noaa/g02158_snodas_snow_cover_model/index.html
Product codes:
1036 is snow depth
1034 is SWE
filename format: us_ssmv11036tS__T0001TTNATS2015042205HP001.Hdr
"""
import tarfile
import gzip
snodas_ds = None
snodas_url_str = None
outdir = os.path.join(datadir, 'snodas')
if not os.path.exists(outdir):
os.makedirs(outdir)
#Note: unmasked products (beyond CONUS) are only available from 2010-present
if dem_dt >= datetime(2003,9,30) and dem_dt < datetime(2010,1,1):
snodas_url_str = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/masked/%Y/%m_%b/SNODAS_%Y%m%d.tar'
tar_subfn_str_fmt = 'us_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'
elif dem_dt >= datetime(2010,1,1):
snodas_url_str = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/unmasked/%Y/%m_%b/SNODAS_unmasked_%Y%m%d.tar'
tar_subfn_str_fmt = './zz_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'
else:
print("No SNODAS data available for input date")
if snodas_url_str is not None:
snodas_url = dem_dt.strftime(snodas_url_str)
snodas_tar_fn = iolib.getfile(snodas_url, outdir=outdir)
print("Unpacking")
tar = tarfile.open(snodas_tar_fn)
#gunzip to extract both dat and Hdr files, tar.gz
for ext in ('dat', 'Hdr'):
tar_subfn_str = tar_subfn_str_fmt % (code, ext)
tar_subfn_gz = dem_dt.strftime(tar_subfn_str)
tar_subfn = os.path.splitext(tar_subfn_gz)[0]
print(tar_subfn)
if outdir is not None:
tar_subfn = os.path.join(outdir, tar_subfn)
if not os.path.exists(tar_subfn):
#Should be able to do this without writing intermediate gz to disk
tar.extract(tar_subfn_gz)
with gzip.open(tar_subfn_gz, 'rb') as f:
outf = open(tar_subfn, 'wb')
outf.write(f.read())
outf.close()
os.remove(tar_subfn_gz)
#Need to delete 'Created by module comment' line from Hdr, can contain too many characters
bad_str = 'Created by module comment'
snodas_fn = tar_subfn
f = open(snodas_fn)
output = []
for line in f:
if not bad_str in line:
output.append(line)
f.close()
f = open(snodas_fn, 'w')
f.writelines(output)
f.close()
#Return GDAL dataset for extracted product
snodas_ds = gdal.Open(snodas_fn)
return snodas_ds
|
[
"def",
"get_snodas_ds",
"(",
"dem_dt",
",",
"code",
"=",
"1036",
")",
":",
"import",
"tarfile",
"import",
"gzip",
"snodas_ds",
"=",
"None",
"snodas_url_str",
"=",
"None",
"outdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datadir",
",",
"'snodas'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"outdir",
")",
":",
"os",
".",
"makedirs",
"(",
"outdir",
")",
"#Note: unmasked products (beyond CONUS) are only available from 2010-present",
"if",
"dem_dt",
">=",
"datetime",
"(",
"2003",
",",
"9",
",",
"30",
")",
"and",
"dem_dt",
"<",
"datetime",
"(",
"2010",
",",
"1",
",",
"1",
")",
":",
"snodas_url_str",
"=",
"'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/masked/%Y/%m_%b/SNODAS_%Y%m%d.tar'",
"tar_subfn_str_fmt",
"=",
"'us_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'",
"elif",
"dem_dt",
">=",
"datetime",
"(",
"2010",
",",
"1",
",",
"1",
")",
":",
"snodas_url_str",
"=",
"'ftp://sidads.colorado.edu/DATASETS/NOAA/G02158/unmasked/%Y/%m_%b/SNODAS_unmasked_%Y%m%d.tar'",
"tar_subfn_str_fmt",
"=",
"'./zz_ssmv1%itS__T0001TTNATS%%Y%%m%%d05HP001.%s.gz'",
"else",
":",
"print",
"(",
"\"No SNODAS data available for input date\"",
")",
"if",
"snodas_url_str",
"is",
"not",
"None",
":",
"snodas_url",
"=",
"dem_dt",
".",
"strftime",
"(",
"snodas_url_str",
")",
"snodas_tar_fn",
"=",
"iolib",
".",
"getfile",
"(",
"snodas_url",
",",
"outdir",
"=",
"outdir",
")",
"print",
"(",
"\"Unpacking\"",
")",
"tar",
"=",
"tarfile",
".",
"open",
"(",
"snodas_tar_fn",
")",
"#gunzip to extract both dat and Hdr files, tar.gz",
"for",
"ext",
"in",
"(",
"'dat'",
",",
"'Hdr'",
")",
":",
"tar_subfn_str",
"=",
"tar_subfn_str_fmt",
"%",
"(",
"code",
",",
"ext",
")",
"tar_subfn_gz",
"=",
"dem_dt",
".",
"strftime",
"(",
"tar_subfn_str",
")",
"tar_subfn",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"tar_subfn_gz",
")",
"[",
"0",
"]",
"print",
"(",
"tar_subfn",
")",
"if",
"outdir",
"is",
"not",
"None",
":",
"tar_subfn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"outdir",
",",
"tar_subfn",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"tar_subfn",
")",
":",
"#Should be able to do this without writing intermediate gz to disk",
"tar",
".",
"extract",
"(",
"tar_subfn_gz",
")",
"with",
"gzip",
".",
"open",
"(",
"tar_subfn_gz",
",",
"'rb'",
")",
"as",
"f",
":",
"outf",
"=",
"open",
"(",
"tar_subfn",
",",
"'wb'",
")",
"outf",
".",
"write",
"(",
"f",
".",
"read",
"(",
")",
")",
"outf",
".",
"close",
"(",
")",
"os",
".",
"remove",
"(",
"tar_subfn_gz",
")",
"#Need to delete 'Created by module comment' line from Hdr, can contain too many characters",
"bad_str",
"=",
"'Created by module comment'",
"snodas_fn",
"=",
"tar_subfn",
"f",
"=",
"open",
"(",
"snodas_fn",
")",
"output",
"=",
"[",
"]",
"for",
"line",
"in",
"f",
":",
"if",
"not",
"bad_str",
"in",
"line",
":",
"output",
".",
"append",
"(",
"line",
")",
"f",
".",
"close",
"(",
")",
"f",
"=",
"open",
"(",
"snodas_fn",
",",
"'w'",
")",
"f",
".",
"writelines",
"(",
"output",
")",
"f",
".",
"close",
"(",
")",
"#Return GDAL dataset for extracted product",
"snodas_ds",
"=",
"gdal",
".",
"Open",
"(",
"snodas_fn",
")",
"return",
"snodas_ds"
] |
Function to fetch and process SNODAS snow depth products for input datetime
http://nsidc.org/data/docs/noaa/g02158_snodas_snow_cover_model/index.html
Product codes:
1036 is snow depth
1034 is SWE
filename format: us_ssmv11036tS__T0001TTNATS2015042205HP001.Hdr
|
[
"Function",
"to",
"fetch",
"and",
"process",
"SNODAS",
"snow",
"depth",
"products",
"for",
"input",
"datetime"
] |
abd6be75d326b35f52826ee30dff01f9e86b4b52
|
https://github.com/dshean/demcoreg/blob/abd6be75d326b35f52826ee30dff01f9e86b4b52/demcoreg/dem_mask.py#L160-L228
|
train
|
dshean/demcoreg
|
demcoreg/dem_mask.py
|
get_modis_tile_list
|
def get_modis_tile_list(ds):
"""Helper function to identify MODIS tiles that intersect input geometry
modis_gird.py contains dictionary of tile boundaries (tile name and WKT polygon ring from bbox)
See: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html
"""
from demcoreg import modis_grid
modis_dict = {}
for key in modis_grid.modis_dict:
modis_dict[key] = ogr.CreateGeometryFromWkt(modis_grid.modis_dict[key])
geom = geolib.ds_geom(ds)
geom_dup = geolib.geom_dup(geom)
ct = osr.CoordinateTransformation(geom_dup.GetSpatialReference(), geolib.wgs_srs)
geom_dup.Transform(ct)
tile_list = []
for key, val in list(modis_dict.items()):
if geom_dup.Intersects(val):
tile_list.append(key)
return tile_list
|
python
|
def get_modis_tile_list(ds):
"""Helper function to identify MODIS tiles that intersect input geometry
modis_gird.py contains dictionary of tile boundaries (tile name and WKT polygon ring from bbox)
See: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html
"""
from demcoreg import modis_grid
modis_dict = {}
for key in modis_grid.modis_dict:
modis_dict[key] = ogr.CreateGeometryFromWkt(modis_grid.modis_dict[key])
geom = geolib.ds_geom(ds)
geom_dup = geolib.geom_dup(geom)
ct = osr.CoordinateTransformation(geom_dup.GetSpatialReference(), geolib.wgs_srs)
geom_dup.Transform(ct)
tile_list = []
for key, val in list(modis_dict.items()):
if geom_dup.Intersects(val):
tile_list.append(key)
return tile_list
|
[
"def",
"get_modis_tile_list",
"(",
"ds",
")",
":",
"from",
"demcoreg",
"import",
"modis_grid",
"modis_dict",
"=",
"{",
"}",
"for",
"key",
"in",
"modis_grid",
".",
"modis_dict",
":",
"modis_dict",
"[",
"key",
"]",
"=",
"ogr",
".",
"CreateGeometryFromWkt",
"(",
"modis_grid",
".",
"modis_dict",
"[",
"key",
"]",
")",
"geom",
"=",
"geolib",
".",
"ds_geom",
"(",
"ds",
")",
"geom_dup",
"=",
"geolib",
".",
"geom_dup",
"(",
"geom",
")",
"ct",
"=",
"osr",
".",
"CoordinateTransformation",
"(",
"geom_dup",
".",
"GetSpatialReference",
"(",
")",
",",
"geolib",
".",
"wgs_srs",
")",
"geom_dup",
".",
"Transform",
"(",
"ct",
")",
"tile_list",
"=",
"[",
"]",
"for",
"key",
",",
"val",
"in",
"list",
"(",
"modis_dict",
".",
"items",
"(",
")",
")",
":",
"if",
"geom_dup",
".",
"Intersects",
"(",
"val",
")",
":",
"tile_list",
".",
"append",
"(",
"key",
")",
"return",
"tile_list"
] |
Helper function to identify MODIS tiles that intersect input geometry
modis_gird.py contains dictionary of tile boundaries (tile name and WKT polygon ring from bbox)
See: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html
|
[
"Helper",
"function",
"to",
"identify",
"MODIS",
"tiles",
"that",
"intersect",
"input",
"geometry"
] |
abd6be75d326b35f52826ee30dff01f9e86b4b52
|
https://github.com/dshean/demcoreg/blob/abd6be75d326b35f52826ee30dff01f9e86b4b52/demcoreg/dem_mask.py#L230-L249
|
train
|
dshean/demcoreg
|
demcoreg/dem_mask.py
|
get_modscag_fn_list
|
def get_modscag_fn_list(dem_dt, tile_list=('h08v04', 'h09v04', 'h10v04', 'h08v05', 'h09v05'), pad_days=7):
"""Function to fetch and process MODSCAG fractional snow cover products for input datetime
Products are tiled in MODIS sinusoidal projection
example url: https://snow-data.jpl.nasa.gov/modscag-historic/2015/001/MOD09GA.A2015001.h07v03.005.2015006001833.snow_fraction.tif
"""
#Could also use global MODIS 500 m snowcover grids, 8 day
#http://nsidc.org/data/docs/daac/modis_v5/mod10a2_modis_terra_snow_8-day_global_500m_grid.gd.html
#These are HDF4, sinusoidal
#Should be able to load up with warplib without issue
import re
import requests
from bs4 import BeautifulSoup
auth = iolib.get_auth()
pad_days = timedelta(days=pad_days)
dt_list = timelib.dt_range(dem_dt-pad_days, dem_dt+pad_days+timedelta(1), timedelta(1))
outdir = os.path.join(datadir, 'modscag')
if not os.path.exists(outdir):
os.makedirs(outdir)
out_vrt_fn_list = []
for dt in dt_list:
out_vrt_fn = os.path.join(outdir, dt.strftime('%Y%m%d_snow_fraction.vrt'))
#If we already have a vrt and it contains all of the necessary tiles
if os.path.exists(out_vrt_fn):
vrt_ds = gdal.Open(out_vrt_fn)
if np.all([np.any([tile in sub_fn for sub_fn in vrt_ds.GetFileList()]) for tile in tile_list]):
out_vrt_fn_list.append(out_vrt_fn)
continue
#Otherwise, download missing tiles and rebuild
#Try to use historic products
modscag_fn_list = []
#Note: not all tiles are available for same date ranges in historic vs. real-time
#Need to repeat search tile-by-tile
for tile in tile_list:
modscag_url_str = 'https://snow-data.jpl.nasa.gov/modscag-historic/%Y/%j/'
modscag_url_base = dt.strftime(modscag_url_str)
print("Trying: %s" % modscag_url_base)
r = requests.get(modscag_url_base, auth=auth)
modscag_url_fn = []
if r.ok:
parsed_html = BeautifulSoup(r.content, "html.parser")
modscag_url_fn = parsed_html.findAll(text=re.compile('%s.*snow_fraction.tif' % tile))
if not modscag_url_fn:
#Couldn't find historic, try to use real-time products
modscag_url_str = 'https://snow-data.jpl.nasa.gov/modscag/%Y/%j/'
modscag_url_base = dt.strftime(modscag_url_str)
print("Trying: %s" % modscag_url_base)
r = requests.get(modscag_url_base, auth=auth)
if r.ok:
parsed_html = BeautifulSoup(r.content, "html.parser")
modscag_url_fn = parsed_html.findAll(text=re.compile('%s.*snow_fraction.tif' % tile))
if not modscag_url_fn:
print("Unable to fetch MODSCAG for %s" % dt)
else:
#OK, we got
#Now extract actual tif filenames to fetch from html
parsed_html = BeautifulSoup(r.content, "html.parser")
#Fetch all tiles
modscag_url_fn = parsed_html.findAll(text=re.compile('%s.*snow_fraction.tif' % tile))
if modscag_url_fn:
modscag_url_fn = modscag_url_fn[0]
modscag_url = os.path.join(modscag_url_base, modscag_url_fn)
print(modscag_url)
modscag_fn = os.path.join(outdir, os.path.split(modscag_url_fn)[-1])
if not os.path.exists(modscag_fn):
iolib.getfile2(modscag_url, auth=auth, outdir=outdir)
modscag_fn_list.append(modscag_fn)
#Mosaic tiles - currently a hack
if modscag_fn_list:
cmd = ['gdalbuildvrt', '-vrtnodata', '255', out_vrt_fn]
cmd.extend(modscag_fn_list)
print(cmd)
subprocess.call(cmd, shell=False)
out_vrt_fn_list.append(out_vrt_fn)
return out_vrt_fn_list
|
python
|
def get_modscag_fn_list(dem_dt, tile_list=('h08v04', 'h09v04', 'h10v04', 'h08v05', 'h09v05'), pad_days=7):
"""Function to fetch and process MODSCAG fractional snow cover products for input datetime
Products are tiled in MODIS sinusoidal projection
example url: https://snow-data.jpl.nasa.gov/modscag-historic/2015/001/MOD09GA.A2015001.h07v03.005.2015006001833.snow_fraction.tif
"""
#Could also use global MODIS 500 m snowcover grids, 8 day
#http://nsidc.org/data/docs/daac/modis_v5/mod10a2_modis_terra_snow_8-day_global_500m_grid.gd.html
#These are HDF4, sinusoidal
#Should be able to load up with warplib without issue
import re
import requests
from bs4 import BeautifulSoup
auth = iolib.get_auth()
pad_days = timedelta(days=pad_days)
dt_list = timelib.dt_range(dem_dt-pad_days, dem_dt+pad_days+timedelta(1), timedelta(1))
outdir = os.path.join(datadir, 'modscag')
if not os.path.exists(outdir):
os.makedirs(outdir)
out_vrt_fn_list = []
for dt in dt_list:
out_vrt_fn = os.path.join(outdir, dt.strftime('%Y%m%d_snow_fraction.vrt'))
#If we already have a vrt and it contains all of the necessary tiles
if os.path.exists(out_vrt_fn):
vrt_ds = gdal.Open(out_vrt_fn)
if np.all([np.any([tile in sub_fn for sub_fn in vrt_ds.GetFileList()]) for tile in tile_list]):
out_vrt_fn_list.append(out_vrt_fn)
continue
#Otherwise, download missing tiles and rebuild
#Try to use historic products
modscag_fn_list = []
#Note: not all tiles are available for same date ranges in historic vs. real-time
#Need to repeat search tile-by-tile
for tile in tile_list:
modscag_url_str = 'https://snow-data.jpl.nasa.gov/modscag-historic/%Y/%j/'
modscag_url_base = dt.strftime(modscag_url_str)
print("Trying: %s" % modscag_url_base)
r = requests.get(modscag_url_base, auth=auth)
modscag_url_fn = []
if r.ok:
parsed_html = BeautifulSoup(r.content, "html.parser")
modscag_url_fn = parsed_html.findAll(text=re.compile('%s.*snow_fraction.tif' % tile))
if not modscag_url_fn:
#Couldn't find historic, try to use real-time products
modscag_url_str = 'https://snow-data.jpl.nasa.gov/modscag/%Y/%j/'
modscag_url_base = dt.strftime(modscag_url_str)
print("Trying: %s" % modscag_url_base)
r = requests.get(modscag_url_base, auth=auth)
if r.ok:
parsed_html = BeautifulSoup(r.content, "html.parser")
modscag_url_fn = parsed_html.findAll(text=re.compile('%s.*snow_fraction.tif' % tile))
if not modscag_url_fn:
print("Unable to fetch MODSCAG for %s" % dt)
else:
#OK, we got
#Now extract actual tif filenames to fetch from html
parsed_html = BeautifulSoup(r.content, "html.parser")
#Fetch all tiles
modscag_url_fn = parsed_html.findAll(text=re.compile('%s.*snow_fraction.tif' % tile))
if modscag_url_fn:
modscag_url_fn = modscag_url_fn[0]
modscag_url = os.path.join(modscag_url_base, modscag_url_fn)
print(modscag_url)
modscag_fn = os.path.join(outdir, os.path.split(modscag_url_fn)[-1])
if not os.path.exists(modscag_fn):
iolib.getfile2(modscag_url, auth=auth, outdir=outdir)
modscag_fn_list.append(modscag_fn)
#Mosaic tiles - currently a hack
if modscag_fn_list:
cmd = ['gdalbuildvrt', '-vrtnodata', '255', out_vrt_fn]
cmd.extend(modscag_fn_list)
print(cmd)
subprocess.call(cmd, shell=False)
out_vrt_fn_list.append(out_vrt_fn)
return out_vrt_fn_list
|
[
"def",
"get_modscag_fn_list",
"(",
"dem_dt",
",",
"tile_list",
"=",
"(",
"'h08v04'",
",",
"'h09v04'",
",",
"'h10v04'",
",",
"'h08v05'",
",",
"'h09v05'",
")",
",",
"pad_days",
"=",
"7",
")",
":",
"#Could also use global MODIS 500 m snowcover grids, 8 day",
"#http://nsidc.org/data/docs/daac/modis_v5/mod10a2_modis_terra_snow_8-day_global_500m_grid.gd.html",
"#These are HDF4, sinusoidal",
"#Should be able to load up with warplib without issue",
"import",
"re",
"import",
"requests",
"from",
"bs4",
"import",
"BeautifulSoup",
"auth",
"=",
"iolib",
".",
"get_auth",
"(",
")",
"pad_days",
"=",
"timedelta",
"(",
"days",
"=",
"pad_days",
")",
"dt_list",
"=",
"timelib",
".",
"dt_range",
"(",
"dem_dt",
"-",
"pad_days",
",",
"dem_dt",
"+",
"pad_days",
"+",
"timedelta",
"(",
"1",
")",
",",
"timedelta",
"(",
"1",
")",
")",
"outdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datadir",
",",
"'modscag'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"outdir",
")",
":",
"os",
".",
"makedirs",
"(",
"outdir",
")",
"out_vrt_fn_list",
"=",
"[",
"]",
"for",
"dt",
"in",
"dt_list",
":",
"out_vrt_fn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"outdir",
",",
"dt",
".",
"strftime",
"(",
"'%Y%m%d_snow_fraction.vrt'",
")",
")",
"#If we already have a vrt and it contains all of the necessary tiles",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"out_vrt_fn",
")",
":",
"vrt_ds",
"=",
"gdal",
".",
"Open",
"(",
"out_vrt_fn",
")",
"if",
"np",
".",
"all",
"(",
"[",
"np",
".",
"any",
"(",
"[",
"tile",
"in",
"sub_fn",
"for",
"sub_fn",
"in",
"vrt_ds",
".",
"GetFileList",
"(",
")",
"]",
")",
"for",
"tile",
"in",
"tile_list",
"]",
")",
":",
"out_vrt_fn_list",
".",
"append",
"(",
"out_vrt_fn",
")",
"continue",
"#Otherwise, download missing tiles and rebuild",
"#Try to use historic products",
"modscag_fn_list",
"=",
"[",
"]",
"#Note: not all tiles are available for same date ranges in historic vs. real-time",
"#Need to repeat search tile-by-tile",
"for",
"tile",
"in",
"tile_list",
":",
"modscag_url_str",
"=",
"'https://snow-data.jpl.nasa.gov/modscag-historic/%Y/%j/'",
"modscag_url_base",
"=",
"dt",
".",
"strftime",
"(",
"modscag_url_str",
")",
"print",
"(",
"\"Trying: %s\"",
"%",
"modscag_url_base",
")",
"r",
"=",
"requests",
".",
"get",
"(",
"modscag_url_base",
",",
"auth",
"=",
"auth",
")",
"modscag_url_fn",
"=",
"[",
"]",
"if",
"r",
".",
"ok",
":",
"parsed_html",
"=",
"BeautifulSoup",
"(",
"r",
".",
"content",
",",
"\"html.parser\"",
")",
"modscag_url_fn",
"=",
"parsed_html",
".",
"findAll",
"(",
"text",
"=",
"re",
".",
"compile",
"(",
"'%s.*snow_fraction.tif'",
"%",
"tile",
")",
")",
"if",
"not",
"modscag_url_fn",
":",
"#Couldn't find historic, try to use real-time products",
"modscag_url_str",
"=",
"'https://snow-data.jpl.nasa.gov/modscag/%Y/%j/'",
"modscag_url_base",
"=",
"dt",
".",
"strftime",
"(",
"modscag_url_str",
")",
"print",
"(",
"\"Trying: %s\"",
"%",
"modscag_url_base",
")",
"r",
"=",
"requests",
".",
"get",
"(",
"modscag_url_base",
",",
"auth",
"=",
"auth",
")",
"if",
"r",
".",
"ok",
":",
"parsed_html",
"=",
"BeautifulSoup",
"(",
"r",
".",
"content",
",",
"\"html.parser\"",
")",
"modscag_url_fn",
"=",
"parsed_html",
".",
"findAll",
"(",
"text",
"=",
"re",
".",
"compile",
"(",
"'%s.*snow_fraction.tif'",
"%",
"tile",
")",
")",
"if",
"not",
"modscag_url_fn",
":",
"print",
"(",
"\"Unable to fetch MODSCAG for %s\"",
"%",
"dt",
")",
"else",
":",
"#OK, we got",
"#Now extract actual tif filenames to fetch from html",
"parsed_html",
"=",
"BeautifulSoup",
"(",
"r",
".",
"content",
",",
"\"html.parser\"",
")",
"#Fetch all tiles",
"modscag_url_fn",
"=",
"parsed_html",
".",
"findAll",
"(",
"text",
"=",
"re",
".",
"compile",
"(",
"'%s.*snow_fraction.tif'",
"%",
"tile",
")",
")",
"if",
"modscag_url_fn",
":",
"modscag_url_fn",
"=",
"modscag_url_fn",
"[",
"0",
"]",
"modscag_url",
"=",
"os",
".",
"path",
".",
"join",
"(",
"modscag_url_base",
",",
"modscag_url_fn",
")",
"print",
"(",
"modscag_url",
")",
"modscag_fn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"outdir",
",",
"os",
".",
"path",
".",
"split",
"(",
"modscag_url_fn",
")",
"[",
"-",
"1",
"]",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"modscag_fn",
")",
":",
"iolib",
".",
"getfile2",
"(",
"modscag_url",
",",
"auth",
"=",
"auth",
",",
"outdir",
"=",
"outdir",
")",
"modscag_fn_list",
".",
"append",
"(",
"modscag_fn",
")",
"#Mosaic tiles - currently a hack",
"if",
"modscag_fn_list",
":",
"cmd",
"=",
"[",
"'gdalbuildvrt'",
",",
"'-vrtnodata'",
",",
"'255'",
",",
"out_vrt_fn",
"]",
"cmd",
".",
"extend",
"(",
"modscag_fn_list",
")",
"print",
"(",
"cmd",
")",
"subprocess",
".",
"call",
"(",
"cmd",
",",
"shell",
"=",
"False",
")",
"out_vrt_fn_list",
".",
"append",
"(",
"out_vrt_fn",
")",
"return",
"out_vrt_fn_list"
] |
Function to fetch and process MODSCAG fractional snow cover products for input datetime
Products are tiled in MODIS sinusoidal projection
example url: https://snow-data.jpl.nasa.gov/modscag-historic/2015/001/MOD09GA.A2015001.h07v03.005.2015006001833.snow_fraction.tif
|
[
"Function",
"to",
"fetch",
"and",
"process",
"MODSCAG",
"fractional",
"snow",
"cover",
"products",
"for",
"input",
"datetime"
] |
abd6be75d326b35f52826ee30dff01f9e86b4b52
|
https://github.com/dshean/demcoreg/blob/abd6be75d326b35f52826ee30dff01f9e86b4b52/demcoreg/dem_mask.py#L251-L331
|
train
|
dshean/demcoreg
|
demcoreg/dem_mask.py
|
proc_modscag
|
def proc_modscag(fn_list, extent=None, t_srs=None):
"""Process the MODSCAG products for full date range, create composites and reproject
"""
#Use cubic spline here for improve upsampling
ds_list = warplib.memwarp_multi_fn(fn_list, res='min', extent=extent, t_srs=t_srs, r='cubicspline')
stack_fn = os.path.splitext(fn_list[0])[0] + '_' + os.path.splitext(os.path.split(fn_list[-1])[1])[0] + '_stack_%i' % len(fn_list)
#Create stack here - no need for most of mastack machinery, just make 3D array
#Mask values greater than 100% (clouds, bad pixels, etc)
ma_stack = np.ma.array([np.ma.masked_greater(iolib.ds_getma(ds), 100) for ds in np.array(ds_list)], dtype=np.uint8)
stack_count = np.ma.masked_equal(ma_stack.count(axis=0), 0).astype(np.uint8)
stack_count.set_fill_value(0)
stack_min = ma_stack.min(axis=0).astype(np.uint8)
stack_min.set_fill_value(0)
stack_max = ma_stack.max(axis=0).astype(np.uint8)
stack_max.set_fill_value(0)
stack_med = np.ma.median(ma_stack, axis=0).astype(np.uint8)
stack_med.set_fill_value(0)
out_fn = stack_fn + '_count.tif'
iolib.writeGTiff(stack_count, out_fn, ds_list[0])
out_fn = stack_fn + '_max.tif'
iolib.writeGTiff(stack_max, out_fn, ds_list[0])
out_fn = stack_fn + '_min.tif'
iolib.writeGTiff(stack_min, out_fn, ds_list[0])
out_fn = stack_fn + '_med.tif'
iolib.writeGTiff(stack_med, out_fn, ds_list[0])
ds = gdal.Open(out_fn)
return ds
|
python
|
def proc_modscag(fn_list, extent=None, t_srs=None):
"""Process the MODSCAG products for full date range, create composites and reproject
"""
#Use cubic spline here for improve upsampling
ds_list = warplib.memwarp_multi_fn(fn_list, res='min', extent=extent, t_srs=t_srs, r='cubicspline')
stack_fn = os.path.splitext(fn_list[0])[0] + '_' + os.path.splitext(os.path.split(fn_list[-1])[1])[0] + '_stack_%i' % len(fn_list)
#Create stack here - no need for most of mastack machinery, just make 3D array
#Mask values greater than 100% (clouds, bad pixels, etc)
ma_stack = np.ma.array([np.ma.masked_greater(iolib.ds_getma(ds), 100) for ds in np.array(ds_list)], dtype=np.uint8)
stack_count = np.ma.masked_equal(ma_stack.count(axis=0), 0).astype(np.uint8)
stack_count.set_fill_value(0)
stack_min = ma_stack.min(axis=0).astype(np.uint8)
stack_min.set_fill_value(0)
stack_max = ma_stack.max(axis=0).astype(np.uint8)
stack_max.set_fill_value(0)
stack_med = np.ma.median(ma_stack, axis=0).astype(np.uint8)
stack_med.set_fill_value(0)
out_fn = stack_fn + '_count.tif'
iolib.writeGTiff(stack_count, out_fn, ds_list[0])
out_fn = stack_fn + '_max.tif'
iolib.writeGTiff(stack_max, out_fn, ds_list[0])
out_fn = stack_fn + '_min.tif'
iolib.writeGTiff(stack_min, out_fn, ds_list[0])
out_fn = stack_fn + '_med.tif'
iolib.writeGTiff(stack_med, out_fn, ds_list[0])
ds = gdal.Open(out_fn)
return ds
|
[
"def",
"proc_modscag",
"(",
"fn_list",
",",
"extent",
"=",
"None",
",",
"t_srs",
"=",
"None",
")",
":",
"#Use cubic spline here for improve upsampling ",
"ds_list",
"=",
"warplib",
".",
"memwarp_multi_fn",
"(",
"fn_list",
",",
"res",
"=",
"'min'",
",",
"extent",
"=",
"extent",
",",
"t_srs",
"=",
"t_srs",
",",
"r",
"=",
"'cubicspline'",
")",
"stack_fn",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fn_list",
"[",
"0",
"]",
")",
"[",
"0",
"]",
"+",
"'_'",
"+",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"split",
"(",
"fn_list",
"[",
"-",
"1",
"]",
")",
"[",
"1",
"]",
")",
"[",
"0",
"]",
"+",
"'_stack_%i'",
"%",
"len",
"(",
"fn_list",
")",
"#Create stack here - no need for most of mastack machinery, just make 3D array",
"#Mask values greater than 100% (clouds, bad pixels, etc)",
"ma_stack",
"=",
"np",
".",
"ma",
".",
"array",
"(",
"[",
"np",
".",
"ma",
".",
"masked_greater",
"(",
"iolib",
".",
"ds_getma",
"(",
"ds",
")",
",",
"100",
")",
"for",
"ds",
"in",
"np",
".",
"array",
"(",
"ds_list",
")",
"]",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
"stack_count",
"=",
"np",
".",
"ma",
".",
"masked_equal",
"(",
"ma_stack",
".",
"count",
"(",
"axis",
"=",
"0",
")",
",",
"0",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"stack_count",
".",
"set_fill_value",
"(",
"0",
")",
"stack_min",
"=",
"ma_stack",
".",
"min",
"(",
"axis",
"=",
"0",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"stack_min",
".",
"set_fill_value",
"(",
"0",
")",
"stack_max",
"=",
"ma_stack",
".",
"max",
"(",
"axis",
"=",
"0",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"stack_max",
".",
"set_fill_value",
"(",
"0",
")",
"stack_med",
"=",
"np",
".",
"ma",
".",
"median",
"(",
"ma_stack",
",",
"axis",
"=",
"0",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"stack_med",
".",
"set_fill_value",
"(",
"0",
")",
"out_fn",
"=",
"stack_fn",
"+",
"'_count.tif'",
"iolib",
".",
"writeGTiff",
"(",
"stack_count",
",",
"out_fn",
",",
"ds_list",
"[",
"0",
"]",
")",
"out_fn",
"=",
"stack_fn",
"+",
"'_max.tif'",
"iolib",
".",
"writeGTiff",
"(",
"stack_max",
",",
"out_fn",
",",
"ds_list",
"[",
"0",
"]",
")",
"out_fn",
"=",
"stack_fn",
"+",
"'_min.tif'",
"iolib",
".",
"writeGTiff",
"(",
"stack_min",
",",
"out_fn",
",",
"ds_list",
"[",
"0",
"]",
")",
"out_fn",
"=",
"stack_fn",
"+",
"'_med.tif'",
"iolib",
".",
"writeGTiff",
"(",
"stack_med",
",",
"out_fn",
",",
"ds_list",
"[",
"0",
"]",
")",
"ds",
"=",
"gdal",
".",
"Open",
"(",
"out_fn",
")",
"return",
"ds"
] |
Process the MODSCAG products for full date range, create composites and reproject
|
[
"Process",
"the",
"MODSCAG",
"products",
"for",
"full",
"date",
"range",
"create",
"composites",
"and",
"reproject"
] |
abd6be75d326b35f52826ee30dff01f9e86b4b52
|
https://github.com/dshean/demcoreg/blob/abd6be75d326b35f52826ee30dff01f9e86b4b52/demcoreg/dem_mask.py#L333-L362
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/tilebus/descriptor.py
|
TBDescriptor._value_length
|
def _value_length(self, value, t):
"""Given an integer or list of them, convert it to an array of bytes."""
if isinstance(value, int):
fmt = '<%s' % (type_codes[t])
output = struct.pack(fmt, value)
return len(output)
elif isinstance(value, str):
return len(value) + 1 # Account for final 0
len_accum = 0
for x in value:
len_accum += self._value_length(x, t)
return len_accum
|
python
|
def _value_length(self, value, t):
"""Given an integer or list of them, convert it to an array of bytes."""
if isinstance(value, int):
fmt = '<%s' % (type_codes[t])
output = struct.pack(fmt, value)
return len(output)
elif isinstance(value, str):
return len(value) + 1 # Account for final 0
len_accum = 0
for x in value:
len_accum += self._value_length(x, t)
return len_accum
|
[
"def",
"_value_length",
"(",
"self",
",",
"value",
",",
"t",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"fmt",
"=",
"'<%s'",
"%",
"(",
"type_codes",
"[",
"t",
"]",
")",
"output",
"=",
"struct",
".",
"pack",
"(",
"fmt",
",",
"value",
")",
"return",
"len",
"(",
"output",
")",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"return",
"len",
"(",
"value",
")",
"+",
"1",
"# Account for final 0",
"len_accum",
"=",
"0",
"for",
"x",
"in",
"value",
":",
"len_accum",
"+=",
"self",
".",
"_value_length",
"(",
"x",
",",
"t",
")",
"return",
"len_accum"
] |
Given an integer or list of them, convert it to an array of bytes.
|
[
"Given",
"an",
"integer",
"or",
"list",
"of",
"them",
"convert",
"it",
"to",
"an",
"array",
"of",
"bytes",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/tilebus/descriptor.py#L152-L166
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/tilebus/descriptor.py
|
TBDescriptor._parse_line
|
def _parse_line(self, line_no, line):
"""Parse a line in a TileBus file
Args:
line_no (int): The line number for printing useful error messages
line (string): The line that we are trying to parse
"""
try:
matched = statement.parseString(line)
except ParseException as exc:
raise DataError("Error parsing line in TileBus file", line_number=line_no, column=exc.col, contents=line)
if 'symbol' in matched:
self._parse_cmd(matched)
elif 'filename' in matched:
self._parse_include(matched)
elif 'variable' in matched:
self._parse_assignment(matched)
elif 'configvar' in matched:
self._parse_configvar(matched)
|
python
|
def _parse_line(self, line_no, line):
"""Parse a line in a TileBus file
Args:
line_no (int): The line number for printing useful error messages
line (string): The line that we are trying to parse
"""
try:
matched = statement.parseString(line)
except ParseException as exc:
raise DataError("Error parsing line in TileBus file", line_number=line_no, column=exc.col, contents=line)
if 'symbol' in matched:
self._parse_cmd(matched)
elif 'filename' in matched:
self._parse_include(matched)
elif 'variable' in matched:
self._parse_assignment(matched)
elif 'configvar' in matched:
self._parse_configvar(matched)
|
[
"def",
"_parse_line",
"(",
"self",
",",
"line_no",
",",
"line",
")",
":",
"try",
":",
"matched",
"=",
"statement",
".",
"parseString",
"(",
"line",
")",
"except",
"ParseException",
"as",
"exc",
":",
"raise",
"DataError",
"(",
"\"Error parsing line in TileBus file\"",
",",
"line_number",
"=",
"line_no",
",",
"column",
"=",
"exc",
".",
"col",
",",
"contents",
"=",
"line",
")",
"if",
"'symbol'",
"in",
"matched",
":",
"self",
".",
"_parse_cmd",
"(",
"matched",
")",
"elif",
"'filename'",
"in",
"matched",
":",
"self",
".",
"_parse_include",
"(",
"matched",
")",
"elif",
"'variable'",
"in",
"matched",
":",
"self",
".",
"_parse_assignment",
"(",
"matched",
")",
"elif",
"'configvar'",
"in",
"matched",
":",
"self",
".",
"_parse_configvar",
"(",
"matched",
")"
] |
Parse a line in a TileBus file
Args:
line_no (int): The line number for printing useful error messages
line (string): The line that we are trying to parse
|
[
"Parse",
"a",
"line",
"in",
"a",
"TileBus",
"file"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/tilebus/descriptor.py#L213-L233
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/tilebus/descriptor.py
|
TBDescriptor._validate_information
|
def _validate_information(self):
"""Validate that all information has been filled in"""
needed_variables = ["ModuleName", "ModuleVersion", "APIVersion"]
for var in needed_variables:
if var not in self.variables:
raise DataError("Needed variable was not defined in mib file.", variable=var)
# Make sure ModuleName is <= 6 characters
if len(self.variables["ModuleName"]) > 6:
raise DataError("ModuleName too long, must be 6 or fewer characters.",
module_name=self.variables["ModuleName"])
if not isinstance(self.variables["ModuleVersion"], str):
raise ValueError("ModuleVersion ('%s') must be a string of the form X.Y.Z" %
str(self.variables['ModuleVersion']))
if not isinstance(self.variables["APIVersion"], str):
raise ValueError("APIVersion ('%s') must be a string of the form X.Y" % str(self.variables['APIVersion']))
self.variables['ModuleVersion'] = self._convert_module_version(self.variables["ModuleVersion"])
self.variables['APIVersion'] = self._convert_api_version(self.variables["APIVersion"])
self.variables["ModuleName"] = self.variables["ModuleName"].ljust(6)
self.valid = True
|
python
|
def _validate_information(self):
"""Validate that all information has been filled in"""
needed_variables = ["ModuleName", "ModuleVersion", "APIVersion"]
for var in needed_variables:
if var not in self.variables:
raise DataError("Needed variable was not defined in mib file.", variable=var)
# Make sure ModuleName is <= 6 characters
if len(self.variables["ModuleName"]) > 6:
raise DataError("ModuleName too long, must be 6 or fewer characters.",
module_name=self.variables["ModuleName"])
if not isinstance(self.variables["ModuleVersion"], str):
raise ValueError("ModuleVersion ('%s') must be a string of the form X.Y.Z" %
str(self.variables['ModuleVersion']))
if not isinstance(self.variables["APIVersion"], str):
raise ValueError("APIVersion ('%s') must be a string of the form X.Y" % str(self.variables['APIVersion']))
self.variables['ModuleVersion'] = self._convert_module_version(self.variables["ModuleVersion"])
self.variables['APIVersion'] = self._convert_api_version(self.variables["APIVersion"])
self.variables["ModuleName"] = self.variables["ModuleName"].ljust(6)
self.valid = True
|
[
"def",
"_validate_information",
"(",
"self",
")",
":",
"needed_variables",
"=",
"[",
"\"ModuleName\"",
",",
"\"ModuleVersion\"",
",",
"\"APIVersion\"",
"]",
"for",
"var",
"in",
"needed_variables",
":",
"if",
"var",
"not",
"in",
"self",
".",
"variables",
":",
"raise",
"DataError",
"(",
"\"Needed variable was not defined in mib file.\"",
",",
"variable",
"=",
"var",
")",
"# Make sure ModuleName is <= 6 characters",
"if",
"len",
"(",
"self",
".",
"variables",
"[",
"\"ModuleName\"",
"]",
")",
">",
"6",
":",
"raise",
"DataError",
"(",
"\"ModuleName too long, must be 6 or fewer characters.\"",
",",
"module_name",
"=",
"self",
".",
"variables",
"[",
"\"ModuleName\"",
"]",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"variables",
"[",
"\"ModuleVersion\"",
"]",
",",
"str",
")",
":",
"raise",
"ValueError",
"(",
"\"ModuleVersion ('%s') must be a string of the form X.Y.Z\"",
"%",
"str",
"(",
"self",
".",
"variables",
"[",
"'ModuleVersion'",
"]",
")",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"variables",
"[",
"\"APIVersion\"",
"]",
",",
"str",
")",
":",
"raise",
"ValueError",
"(",
"\"APIVersion ('%s') must be a string of the form X.Y\"",
"%",
"str",
"(",
"self",
".",
"variables",
"[",
"'APIVersion'",
"]",
")",
")",
"self",
".",
"variables",
"[",
"'ModuleVersion'",
"]",
"=",
"self",
".",
"_convert_module_version",
"(",
"self",
".",
"variables",
"[",
"\"ModuleVersion\"",
"]",
")",
"self",
".",
"variables",
"[",
"'APIVersion'",
"]",
"=",
"self",
".",
"_convert_api_version",
"(",
"self",
".",
"variables",
"[",
"\"APIVersion\"",
"]",
")",
"self",
".",
"variables",
"[",
"\"ModuleName\"",
"]",
"=",
"self",
".",
"variables",
"[",
"\"ModuleName\"",
"]",
".",
"ljust",
"(",
"6",
")",
"self",
".",
"valid",
"=",
"True"
] |
Validate that all information has been filled in
|
[
"Validate",
"that",
"all",
"information",
"has",
"been",
"filled",
"in"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/tilebus/descriptor.py#L235-L260
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/tilebus/descriptor.py
|
TBDescriptor.get_block
|
def get_block(self, config_only=False):
"""Create a TileBus Block based on the information in this descriptor"""
mib = TBBlock()
for cid, config in self.configs.items():
mib.add_config(cid, config)
if not config_only:
for key, val in self.commands.items():
mib.add_command(key, val)
if not self.valid:
self._validate_information()
mib.set_api_version(*self.variables["APIVersion"])
mib.set_module_version(*self.variables["ModuleVersion"])
mib.set_name(self.variables["ModuleName"])
return mib
|
python
|
def get_block(self, config_only=False):
"""Create a TileBus Block based on the information in this descriptor"""
mib = TBBlock()
for cid, config in self.configs.items():
mib.add_config(cid, config)
if not config_only:
for key, val in self.commands.items():
mib.add_command(key, val)
if not self.valid:
self._validate_information()
mib.set_api_version(*self.variables["APIVersion"])
mib.set_module_version(*self.variables["ModuleVersion"])
mib.set_name(self.variables["ModuleName"])
return mib
|
[
"def",
"get_block",
"(",
"self",
",",
"config_only",
"=",
"False",
")",
":",
"mib",
"=",
"TBBlock",
"(",
")",
"for",
"cid",
",",
"config",
"in",
"self",
".",
"configs",
".",
"items",
"(",
")",
":",
"mib",
".",
"add_config",
"(",
"cid",
",",
"config",
")",
"if",
"not",
"config_only",
":",
"for",
"key",
",",
"val",
"in",
"self",
".",
"commands",
".",
"items",
"(",
")",
":",
"mib",
".",
"add_command",
"(",
"key",
",",
"val",
")",
"if",
"not",
"self",
".",
"valid",
":",
"self",
".",
"_validate_information",
"(",
")",
"mib",
".",
"set_api_version",
"(",
"*",
"self",
".",
"variables",
"[",
"\"APIVersion\"",
"]",
")",
"mib",
".",
"set_module_version",
"(",
"*",
"self",
".",
"variables",
"[",
"\"ModuleVersion\"",
"]",
")",
"mib",
".",
"set_name",
"(",
"self",
".",
"variables",
"[",
"\"ModuleName\"",
"]",
")",
"return",
"mib"
] |
Create a TileBus Block based on the information in this descriptor
|
[
"Create",
"a",
"TileBus",
"Block",
"based",
"on",
"the",
"information",
"in",
"this",
"descriptor"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/tilebus/descriptor.py#L286-L305
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.add_adapter
|
def add_adapter(self, adapter):
"""Add a device adapter to this aggregating adapter."""
if self._started:
raise InternalError("New adapters cannot be added after start() is called")
if isinstance(adapter, DeviceAdapter):
self._logger.warning("Wrapping legacy device adapter %s in async wrapper", adapter)
adapter = AsynchronousModernWrapper(adapter, loop=self._loop)
self.adapters.append(adapter)
adapter_callback = functools.partial(self.handle_adapter_event,
len(self.adapters) - 1)
events = ['device_seen', 'broadcast', 'report', 'connection',
'disconnection', 'trace', 'progress']
adapter.register_monitor([None], events, adapter_callback)
|
python
|
def add_adapter(self, adapter):
"""Add a device adapter to this aggregating adapter."""
if self._started:
raise InternalError("New adapters cannot be added after start() is called")
if isinstance(adapter, DeviceAdapter):
self._logger.warning("Wrapping legacy device adapter %s in async wrapper", adapter)
adapter = AsynchronousModernWrapper(adapter, loop=self._loop)
self.adapters.append(adapter)
adapter_callback = functools.partial(self.handle_adapter_event,
len(self.adapters) - 1)
events = ['device_seen', 'broadcast', 'report', 'connection',
'disconnection', 'trace', 'progress']
adapter.register_monitor([None], events, adapter_callback)
|
[
"def",
"add_adapter",
"(",
"self",
",",
"adapter",
")",
":",
"if",
"self",
".",
"_started",
":",
"raise",
"InternalError",
"(",
"\"New adapters cannot be added after start() is called\"",
")",
"if",
"isinstance",
"(",
"adapter",
",",
"DeviceAdapter",
")",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"Wrapping legacy device adapter %s in async wrapper\"",
",",
"adapter",
")",
"adapter",
"=",
"AsynchronousModernWrapper",
"(",
"adapter",
",",
"loop",
"=",
"self",
".",
"_loop",
")",
"self",
".",
"adapters",
".",
"append",
"(",
"adapter",
")",
"adapter_callback",
"=",
"functools",
".",
"partial",
"(",
"self",
".",
"handle_adapter_event",
",",
"len",
"(",
"self",
".",
"adapters",
")",
"-",
"1",
")",
"events",
"=",
"[",
"'device_seen'",
",",
"'broadcast'",
",",
"'report'",
",",
"'connection'",
",",
"'disconnection'",
",",
"'trace'",
",",
"'progress'",
"]",
"adapter",
".",
"register_monitor",
"(",
"[",
"None",
"]",
",",
"events",
",",
"adapter_callback",
")"
] |
Add a device adapter to this aggregating adapter.
|
[
"Add",
"a",
"device",
"adapter",
"to",
"this",
"aggregating",
"adapter",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L78-L95
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.get_config
|
def get_config(self, name, default=_MISSING):
"""Get a configuration setting from this DeviceAdapter.
See :meth:`AbstractDeviceAdapter.get_config`.
"""
val = self._config.get(name, default)
if val is _MISSING:
raise ArgumentError("DeviceAdapter config {} did not exist and no default".format(name))
return val
|
python
|
def get_config(self, name, default=_MISSING):
"""Get a configuration setting from this DeviceAdapter.
See :meth:`AbstractDeviceAdapter.get_config`.
"""
val = self._config.get(name, default)
if val is _MISSING:
raise ArgumentError("DeviceAdapter config {} did not exist and no default".format(name))
return val
|
[
"def",
"get_config",
"(",
"self",
",",
"name",
",",
"default",
"=",
"_MISSING",
")",
":",
"val",
"=",
"self",
".",
"_config",
".",
"get",
"(",
"name",
",",
"default",
")",
"if",
"val",
"is",
"_MISSING",
":",
"raise",
"ArgumentError",
"(",
"\"DeviceAdapter config {} did not exist and no default\"",
".",
"format",
"(",
"name",
")",
")",
"return",
"val"
] |
Get a configuration setting from this DeviceAdapter.
See :meth:`AbstractDeviceAdapter.get_config`.
|
[
"Get",
"a",
"configuration",
"setting",
"from",
"this",
"DeviceAdapter",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L110-L120
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.start
|
async def start(self):
"""Start all adapters managed by this device adapter.
If there is an error starting one or more adapters, this method will
stop any adapters that we successfully started and raise an exception.
"""
successful = 0
try:
for adapter in self.adapters:
await adapter.start()
successful += 1
self._started = True
except:
for adapter in self.adapters[:successful]:
await adapter.stop()
raise
|
python
|
async def start(self):
"""Start all adapters managed by this device adapter.
If there is an error starting one or more adapters, this method will
stop any adapters that we successfully started and raise an exception.
"""
successful = 0
try:
for adapter in self.adapters:
await adapter.start()
successful += 1
self._started = True
except:
for adapter in self.adapters[:successful]:
await adapter.stop()
raise
|
[
"async",
"def",
"start",
"(",
"self",
")",
":",
"successful",
"=",
"0",
"try",
":",
"for",
"adapter",
"in",
"self",
".",
"adapters",
":",
"await",
"adapter",
".",
"start",
"(",
")",
"successful",
"+=",
"1",
"self",
".",
"_started",
"=",
"True",
"except",
":",
"for",
"adapter",
"in",
"self",
".",
"adapters",
"[",
":",
"successful",
"]",
":",
"await",
"adapter",
".",
"stop",
"(",
")",
"raise"
] |
Start all adapters managed by this device adapter.
If there is an error starting one or more adapters, this method will
stop any adapters that we successfully started and raise an exception.
|
[
"Start",
"all",
"adapters",
"managed",
"by",
"this",
"device",
"adapter",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L141-L160
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.visible_devices
|
def visible_devices(self):
"""Unify all visible devices across all connected adapters
Returns:
dict: A dictionary mapping UUIDs to device information dictionaries
"""
devs = {}
for device_id, adapters in self._devices.items():
dev = None
max_signal = None
best_adapter = None
for adapter_id, devinfo in adapters.items():
connstring = "adapter/{0}/{1}".format(adapter_id, devinfo['connection_string'])
if dev is None:
dev = copy.deepcopy(devinfo)
del dev['connection_string']
if 'adapters' not in dev:
dev['adapters'] = []
best_adapter = adapter_id
dev['adapters'].append((adapter_id, devinfo['signal_strength'], connstring))
if max_signal is None:
max_signal = devinfo['signal_strength']
elif devinfo['signal_strength'] > max_signal:
max_signal = devinfo['signal_strength']
best_adapter = adapter_id
# If device has been seen in no adapters, it will get expired
# don't return it
if dev is None:
continue
dev['connection_string'] = "device/%x" % dev['uuid']
dev['adapters'] = sorted(dev['adapters'], key=lambda x: x[1], reverse=True)
dev['best_adapter'] = best_adapter
dev['signal_strength'] = max_signal
devs[device_id] = dev
return devs
|
python
|
def visible_devices(self):
"""Unify all visible devices across all connected adapters
Returns:
dict: A dictionary mapping UUIDs to device information dictionaries
"""
devs = {}
for device_id, adapters in self._devices.items():
dev = None
max_signal = None
best_adapter = None
for adapter_id, devinfo in adapters.items():
connstring = "adapter/{0}/{1}".format(adapter_id, devinfo['connection_string'])
if dev is None:
dev = copy.deepcopy(devinfo)
del dev['connection_string']
if 'adapters' not in dev:
dev['adapters'] = []
best_adapter = adapter_id
dev['adapters'].append((adapter_id, devinfo['signal_strength'], connstring))
if max_signal is None:
max_signal = devinfo['signal_strength']
elif devinfo['signal_strength'] > max_signal:
max_signal = devinfo['signal_strength']
best_adapter = adapter_id
# If device has been seen in no adapters, it will get expired
# don't return it
if dev is None:
continue
dev['connection_string'] = "device/%x" % dev['uuid']
dev['adapters'] = sorted(dev['adapters'], key=lambda x: x[1], reverse=True)
dev['best_adapter'] = best_adapter
dev['signal_strength'] = max_signal
devs[device_id] = dev
return devs
|
[
"def",
"visible_devices",
"(",
"self",
")",
":",
"devs",
"=",
"{",
"}",
"for",
"device_id",
",",
"adapters",
"in",
"self",
".",
"_devices",
".",
"items",
"(",
")",
":",
"dev",
"=",
"None",
"max_signal",
"=",
"None",
"best_adapter",
"=",
"None",
"for",
"adapter_id",
",",
"devinfo",
"in",
"adapters",
".",
"items",
"(",
")",
":",
"connstring",
"=",
"\"adapter/{0}/{1}\"",
".",
"format",
"(",
"adapter_id",
",",
"devinfo",
"[",
"'connection_string'",
"]",
")",
"if",
"dev",
"is",
"None",
":",
"dev",
"=",
"copy",
".",
"deepcopy",
"(",
"devinfo",
")",
"del",
"dev",
"[",
"'connection_string'",
"]",
"if",
"'adapters'",
"not",
"in",
"dev",
":",
"dev",
"[",
"'adapters'",
"]",
"=",
"[",
"]",
"best_adapter",
"=",
"adapter_id",
"dev",
"[",
"'adapters'",
"]",
".",
"append",
"(",
"(",
"adapter_id",
",",
"devinfo",
"[",
"'signal_strength'",
"]",
",",
"connstring",
")",
")",
"if",
"max_signal",
"is",
"None",
":",
"max_signal",
"=",
"devinfo",
"[",
"'signal_strength'",
"]",
"elif",
"devinfo",
"[",
"'signal_strength'",
"]",
">",
"max_signal",
":",
"max_signal",
"=",
"devinfo",
"[",
"'signal_strength'",
"]",
"best_adapter",
"=",
"adapter_id",
"# If device has been seen in no adapters, it will get expired",
"# don't return it",
"if",
"dev",
"is",
"None",
":",
"continue",
"dev",
"[",
"'connection_string'",
"]",
"=",
"\"device/%x\"",
"%",
"dev",
"[",
"'uuid'",
"]",
"dev",
"[",
"'adapters'",
"]",
"=",
"sorted",
"(",
"dev",
"[",
"'adapters'",
"]",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
",",
"reverse",
"=",
"True",
")",
"dev",
"[",
"'best_adapter'",
"]",
"=",
"best_adapter",
"dev",
"[",
"'signal_strength'",
"]",
"=",
"max_signal",
"devs",
"[",
"device_id",
"]",
"=",
"dev",
"return",
"devs"
] |
Unify all visible devices across all connected adapters
Returns:
dict: A dictionary mapping UUIDs to device information dictionaries
|
[
"Unify",
"all",
"visible",
"devices",
"across",
"all",
"connected",
"adapters"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L168-L212
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.probe
|
async def probe(self):
"""Probe for devices.
This method will probe all adapters that can probe and will send a
notification for all devices that we have seen from all adapters.
See :meth:`AbstractDeviceAdapter.probe`.
"""
for adapter in self.adapters:
if adapter.get_config('probe_supported', False):
await adapter.probe()
|
python
|
async def probe(self):
"""Probe for devices.
This method will probe all adapters that can probe and will send a
notification for all devices that we have seen from all adapters.
See :meth:`AbstractDeviceAdapter.probe`.
"""
for adapter in self.adapters:
if adapter.get_config('probe_supported', False):
await adapter.probe()
|
[
"async",
"def",
"probe",
"(",
"self",
")",
":",
"for",
"adapter",
"in",
"self",
".",
"adapters",
":",
"if",
"adapter",
".",
"get_config",
"(",
"'probe_supported'",
",",
"False",
")",
":",
"await",
"adapter",
".",
"probe",
"(",
")"
] |
Probe for devices.
This method will probe all adapters that can probe and will send a
notification for all devices that we have seen from all adapters.
See :meth:`AbstractDeviceAdapter.probe`.
|
[
"Probe",
"for",
"devices",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L275-L286
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.send_script
|
async def send_script(self, conn_id, data):
"""Send a script to a device.
See :meth:`AbstractDeviceAdapter.send_script`.
"""
adapter_id = self._get_property(conn_id, 'adapter')
return await self.adapters[adapter_id].send_script(conn_id, data)
|
python
|
async def send_script(self, conn_id, data):
"""Send a script to a device.
See :meth:`AbstractDeviceAdapter.send_script`.
"""
adapter_id = self._get_property(conn_id, 'adapter')
return await self.adapters[adapter_id].send_script(conn_id, data)
|
[
"async",
"def",
"send_script",
"(",
"self",
",",
"conn_id",
",",
"data",
")",
":",
"adapter_id",
"=",
"self",
".",
"_get_property",
"(",
"conn_id",
",",
"'adapter'",
")",
"return",
"await",
"self",
".",
"adapters",
"[",
"adapter_id",
"]",
".",
"send_script",
"(",
"conn_id",
",",
"data",
")"
] |
Send a script to a device.
See :meth:`AbstractDeviceAdapter.send_script`.
|
[
"Send",
"a",
"script",
"to",
"a",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L306-L313
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter.handle_adapter_event
|
async def handle_adapter_event(self, adapter_id, conn_string, conn_id, name, event):
"""Handle an event received from an adapter."""
if name == 'device_seen':
self._track_device_seen(adapter_id, conn_string, event)
event = self._translate_device_seen(adapter_id, conn_string, event)
conn_string = self._translate_conn_string(adapter_id, conn_string)
elif conn_id is not None and self._get_property(conn_id, 'translate'):
conn_string = self._translate_conn_string(adapter_id, conn_string)
else:
conn_string = "adapter/%d/%s" % (adapter_id, conn_string)
await self.notify_event(conn_string, name, event)
|
python
|
async def handle_adapter_event(self, adapter_id, conn_string, conn_id, name, event):
"""Handle an event received from an adapter."""
if name == 'device_seen':
self._track_device_seen(adapter_id, conn_string, event)
event = self._translate_device_seen(adapter_id, conn_string, event)
conn_string = self._translate_conn_string(adapter_id, conn_string)
elif conn_id is not None and self._get_property(conn_id, 'translate'):
conn_string = self._translate_conn_string(adapter_id, conn_string)
else:
conn_string = "adapter/%d/%s" % (adapter_id, conn_string)
await self.notify_event(conn_string, name, event)
|
[
"async",
"def",
"handle_adapter_event",
"(",
"self",
",",
"adapter_id",
",",
"conn_string",
",",
"conn_id",
",",
"name",
",",
"event",
")",
":",
"if",
"name",
"==",
"'device_seen'",
":",
"self",
".",
"_track_device_seen",
"(",
"adapter_id",
",",
"conn_string",
",",
"event",
")",
"event",
"=",
"self",
".",
"_translate_device_seen",
"(",
"adapter_id",
",",
"conn_string",
",",
"event",
")",
"conn_string",
"=",
"self",
".",
"_translate_conn_string",
"(",
"adapter_id",
",",
"conn_string",
")",
"elif",
"conn_id",
"is",
"not",
"None",
"and",
"self",
".",
"_get_property",
"(",
"conn_id",
",",
"'translate'",
")",
":",
"conn_string",
"=",
"self",
".",
"_translate_conn_string",
"(",
"adapter_id",
",",
"conn_string",
")",
"else",
":",
"conn_string",
"=",
"\"adapter/%d/%s\"",
"%",
"(",
"adapter_id",
",",
"conn_string",
")",
"await",
"self",
".",
"notify_event",
"(",
"conn_string",
",",
"name",
",",
"event",
")"
] |
Handle an event received from an adapter.
|
[
"Handle",
"an",
"event",
"received",
"from",
"an",
"adapter",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L315-L328
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/device.py
|
AggregatingDeviceAdapter._device_expiry_callback
|
def _device_expiry_callback(self):
"""Periodic callback to remove expired devices from visible_devices."""
expired = 0
for adapters in self._devices.values():
to_remove = []
now = monotonic()
for adapter_id, dev in adapters.items():
if 'expires' not in dev:
continue
if now > dev['expires']:
to_remove.append(adapter_id)
local_conn = "adapter/%d/%s" % (adapter_id, dev['connection_string'])
if local_conn in self._conn_strings:
del self._conn_strings[local_conn]
for entry in to_remove:
del adapters[entry]
expired += 1
if expired > 0:
self._logger.info('Expired %d devices', expired)
|
python
|
def _device_expiry_callback(self):
"""Periodic callback to remove expired devices from visible_devices."""
expired = 0
for adapters in self._devices.values():
to_remove = []
now = monotonic()
for adapter_id, dev in adapters.items():
if 'expires' not in dev:
continue
if now > dev['expires']:
to_remove.append(adapter_id)
local_conn = "adapter/%d/%s" % (adapter_id, dev['connection_string'])
if local_conn in self._conn_strings:
del self._conn_strings[local_conn]
for entry in to_remove:
del adapters[entry]
expired += 1
if expired > 0:
self._logger.info('Expired %d devices', expired)
|
[
"def",
"_device_expiry_callback",
"(",
"self",
")",
":",
"expired",
"=",
"0",
"for",
"adapters",
"in",
"self",
".",
"_devices",
".",
"values",
"(",
")",
":",
"to_remove",
"=",
"[",
"]",
"now",
"=",
"monotonic",
"(",
")",
"for",
"adapter_id",
",",
"dev",
"in",
"adapters",
".",
"items",
"(",
")",
":",
"if",
"'expires'",
"not",
"in",
"dev",
":",
"continue",
"if",
"now",
">",
"dev",
"[",
"'expires'",
"]",
":",
"to_remove",
".",
"append",
"(",
"adapter_id",
")",
"local_conn",
"=",
"\"adapter/%d/%s\"",
"%",
"(",
"adapter_id",
",",
"dev",
"[",
"'connection_string'",
"]",
")",
"if",
"local_conn",
"in",
"self",
".",
"_conn_strings",
":",
"del",
"self",
".",
"_conn_strings",
"[",
"local_conn",
"]",
"for",
"entry",
"in",
"to_remove",
":",
"del",
"adapters",
"[",
"entry",
"]",
"expired",
"+=",
"1",
"if",
"expired",
">",
"0",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"'Expired %d devices'",
",",
"expired",
")"
] |
Periodic callback to remove expired devices from visible_devices.
|
[
"Periodic",
"callback",
"to",
"remove",
"expired",
"devices",
"from",
"visible_devices",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/device.py#L361-L385
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Variables/PathVariable.py
|
_PathVariableClass.PathIsDir
|
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
|
python
|
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
|
[
"def",
"PathIsDir",
"(",
"self",
",",
"key",
",",
"val",
",",
"env",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"val",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"val",
")",
":",
"m",
"=",
"'Directory path for option %s is a file: %s'",
"else",
":",
"m",
"=",
"'Directory path for option %s does not exist: %s'",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"m",
"%",
"(",
"key",
",",
"val",
")",
")"
] |
Validator to check if Path is a directory.
|
[
"Validator",
"to",
"check",
"if",
"Path",
"is",
"a",
"directory",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Variables/PathVariable.py#L85-L92
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Variables/PathVariable.py
|
_PathVariableClass.PathExists
|
def PathExists(self, key, val, env):
"""Validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
|
python
|
def PathExists(self, key, val, env):
"""Validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
|
[
"def",
"PathExists",
"(",
"self",
",",
"key",
",",
"val",
",",
"env",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"val",
")",
":",
"m",
"=",
"'Path for option %s does not exist: %s'",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"m",
"%",
"(",
"key",
",",
"val",
")",
")"
] |
Validator to check if Path exists
|
[
"Validator",
"to",
"check",
"if",
"Path",
"exists"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Variables/PathVariable.py#L112-L116
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem._reset_vector
|
async def _reset_vector(self):
"""Background task to initialize this system in the event loop."""
self._logger.debug("sensor_graph subsystem task starting")
# If there is a persistent sgf loaded, send reset information.
self.initialized.set()
while True:
stream, reading = await self._inputs.get()
try:
await process_graph_input(self.graph, stream, reading, self._executor)
self.process_streamers()
except: #pylint:disable=bare-except;This is a background task that should not die
self._logger.exception("Unhandled exception processing sensor_graph input (stream=%s), reading=%s", stream, reading)
finally:
self._inputs.task_done()
|
python
|
async def _reset_vector(self):
"""Background task to initialize this system in the event loop."""
self._logger.debug("sensor_graph subsystem task starting")
# If there is a persistent sgf loaded, send reset information.
self.initialized.set()
while True:
stream, reading = await self._inputs.get()
try:
await process_graph_input(self.graph, stream, reading, self._executor)
self.process_streamers()
except: #pylint:disable=bare-except;This is a background task that should not die
self._logger.exception("Unhandled exception processing sensor_graph input (stream=%s), reading=%s", stream, reading)
finally:
self._inputs.task_done()
|
[
"async",
"def",
"_reset_vector",
"(",
"self",
")",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"sensor_graph subsystem task starting\"",
")",
"# If there is a persistent sgf loaded, send reset information.",
"self",
".",
"initialized",
".",
"set",
"(",
")",
"while",
"True",
":",
"stream",
",",
"reading",
"=",
"await",
"self",
".",
"_inputs",
".",
"get",
"(",
")",
"try",
":",
"await",
"process_graph_input",
"(",
"self",
".",
"graph",
",",
"stream",
",",
"reading",
",",
"self",
".",
"_executor",
")",
"self",
".",
"process_streamers",
"(",
")",
"except",
":",
"#pylint:disable=bare-except;This is a background task that should not die",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Unhandled exception processing sensor_graph input (stream=%s), reading=%s\"",
",",
"stream",
",",
"reading",
")",
"finally",
":",
"self",
".",
"_inputs",
".",
"task_done",
"(",
")"
] |
Background task to initialize this system in the event loop.
|
[
"Background",
"task",
"to",
"initialize",
"this",
"system",
"in",
"the",
"event",
"loop",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L140-L158
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.process_input
|
def process_input(self, encoded_stream, value):
"""Process or drop a graph input.
This method asynchronously queued an item to be processed by the
sensorgraph worker task in _reset_vector. It must be called from
inside the emulation loop and returns immediately before the input is
processed.
"""
if not self.enabled:
return
if isinstance(encoded_stream, str):
stream = DataStream.FromString(encoded_stream)
encoded_stream = stream.encode()
elif isinstance(encoded_stream, DataStream):
stream = encoded_stream
encoded_stream = stream.encode()
else:
stream = DataStream.FromEncoded(encoded_stream)
reading = IOTileReading(self.get_timestamp(), encoded_stream, value)
self._inputs.put_nowait((stream, reading))
|
python
|
def process_input(self, encoded_stream, value):
"""Process or drop a graph input.
This method asynchronously queued an item to be processed by the
sensorgraph worker task in _reset_vector. It must be called from
inside the emulation loop and returns immediately before the input is
processed.
"""
if not self.enabled:
return
if isinstance(encoded_stream, str):
stream = DataStream.FromString(encoded_stream)
encoded_stream = stream.encode()
elif isinstance(encoded_stream, DataStream):
stream = encoded_stream
encoded_stream = stream.encode()
else:
stream = DataStream.FromEncoded(encoded_stream)
reading = IOTileReading(self.get_timestamp(), encoded_stream, value)
self._inputs.put_nowait((stream, reading))
|
[
"def",
"process_input",
"(",
"self",
",",
"encoded_stream",
",",
"value",
")",
":",
"if",
"not",
"self",
".",
"enabled",
":",
"return",
"if",
"isinstance",
"(",
"encoded_stream",
",",
"str",
")",
":",
"stream",
"=",
"DataStream",
".",
"FromString",
"(",
"encoded_stream",
")",
"encoded_stream",
"=",
"stream",
".",
"encode",
"(",
")",
"elif",
"isinstance",
"(",
"encoded_stream",
",",
"DataStream",
")",
":",
"stream",
"=",
"encoded_stream",
"encoded_stream",
"=",
"stream",
".",
"encode",
"(",
")",
"else",
":",
"stream",
"=",
"DataStream",
".",
"FromEncoded",
"(",
"encoded_stream",
")",
"reading",
"=",
"IOTileReading",
"(",
"self",
".",
"get_timestamp",
"(",
")",
",",
"encoded_stream",
",",
"value",
")",
"self",
".",
"_inputs",
".",
"put_nowait",
"(",
"(",
"stream",
",",
"reading",
")",
")"
] |
Process or drop a graph input.
This method asynchronously queued an item to be processed by the
sensorgraph worker task in _reset_vector. It must be called from
inside the emulation loop and returns immediately before the input is
processed.
|
[
"Process",
"or",
"drop",
"a",
"graph",
"input",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L196-L219
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem._seek_streamer
|
def _seek_streamer(self, index, value):
"""Complex logic for actually seeking a streamer to a reading_id.
This routine hides all of the gnarly logic of the various edge cases.
In particular, the behavior depends on whether the reading id is found,
and if it is found, whether it belongs to the indicated streamer or not.
If not, the behavior depends on whether the sought reading it too high
or too low.
"""
highest_id = self._rsl.highest_stored_id()
streamer = self.graph.streamers[index]
if not streamer.walker.buffered:
return _pack_sgerror(SensorLogError.CANNOT_USE_UNBUFFERED_STREAM)
find_type = None
try:
exact = streamer.walker.seek(value, target='id')
if exact:
find_type = 'exact'
else:
find_type = 'other_stream'
except UnresolvedIdentifierError:
if value > highest_id:
find_type = 'too_high'
else:
find_type = 'too_low'
# If we found an exact match, move one beyond it
if find_type == 'exact':
try:
streamer.walker.pop()
except StreamEmptyError:
pass
error = Error.NO_ERROR
elif find_type == 'too_high':
streamer.walker.skip_all()
error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
elif find_type == 'too_low':
streamer.walker.seek(0, target='offset')
error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
else:
error = _pack_sgerror(SensorLogError.ID_FOUND_FOR_ANOTHER_STREAM)
return error
|
python
|
def _seek_streamer(self, index, value):
"""Complex logic for actually seeking a streamer to a reading_id.
This routine hides all of the gnarly logic of the various edge cases.
In particular, the behavior depends on whether the reading id is found,
and if it is found, whether it belongs to the indicated streamer or not.
If not, the behavior depends on whether the sought reading it too high
or too low.
"""
highest_id = self._rsl.highest_stored_id()
streamer = self.graph.streamers[index]
if not streamer.walker.buffered:
return _pack_sgerror(SensorLogError.CANNOT_USE_UNBUFFERED_STREAM)
find_type = None
try:
exact = streamer.walker.seek(value, target='id')
if exact:
find_type = 'exact'
else:
find_type = 'other_stream'
except UnresolvedIdentifierError:
if value > highest_id:
find_type = 'too_high'
else:
find_type = 'too_low'
# If we found an exact match, move one beyond it
if find_type == 'exact':
try:
streamer.walker.pop()
except StreamEmptyError:
pass
error = Error.NO_ERROR
elif find_type == 'too_high':
streamer.walker.skip_all()
error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
elif find_type == 'too_low':
streamer.walker.seek(0, target='offset')
error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
else:
error = _pack_sgerror(SensorLogError.ID_FOUND_FOR_ANOTHER_STREAM)
return error
|
[
"def",
"_seek_streamer",
"(",
"self",
",",
"index",
",",
"value",
")",
":",
"highest_id",
"=",
"self",
".",
"_rsl",
".",
"highest_stored_id",
"(",
")",
"streamer",
"=",
"self",
".",
"graph",
".",
"streamers",
"[",
"index",
"]",
"if",
"not",
"streamer",
".",
"walker",
".",
"buffered",
":",
"return",
"_pack_sgerror",
"(",
"SensorLogError",
".",
"CANNOT_USE_UNBUFFERED_STREAM",
")",
"find_type",
"=",
"None",
"try",
":",
"exact",
"=",
"streamer",
".",
"walker",
".",
"seek",
"(",
"value",
",",
"target",
"=",
"'id'",
")",
"if",
"exact",
":",
"find_type",
"=",
"'exact'",
"else",
":",
"find_type",
"=",
"'other_stream'",
"except",
"UnresolvedIdentifierError",
":",
"if",
"value",
">",
"highest_id",
":",
"find_type",
"=",
"'too_high'",
"else",
":",
"find_type",
"=",
"'too_low'",
"# If we found an exact match, move one beyond it",
"if",
"find_type",
"==",
"'exact'",
":",
"try",
":",
"streamer",
".",
"walker",
".",
"pop",
"(",
")",
"except",
"StreamEmptyError",
":",
"pass",
"error",
"=",
"Error",
".",
"NO_ERROR",
"elif",
"find_type",
"==",
"'too_high'",
":",
"streamer",
".",
"walker",
".",
"skip_all",
"(",
")",
"error",
"=",
"_pack_sgerror",
"(",
"SensorLogError",
".",
"NO_MORE_READINGS",
")",
"elif",
"find_type",
"==",
"'too_low'",
":",
"streamer",
".",
"walker",
".",
"seek",
"(",
"0",
",",
"target",
"=",
"'offset'",
")",
"error",
"=",
"_pack_sgerror",
"(",
"SensorLogError",
".",
"NO_MORE_READINGS",
")",
"else",
":",
"error",
"=",
"_pack_sgerror",
"(",
"SensorLogError",
".",
"ID_FOUND_FOR_ANOTHER_STREAM",
")",
"return",
"error"
] |
Complex logic for actually seeking a streamer to a reading_id.
This routine hides all of the gnarly logic of the various edge cases.
In particular, the behavior depends on whether the reading id is found,
and if it is found, whether it belongs to the indicated streamer or not.
If not, the behavior depends on whether the sought reading it too high
or too low.
|
[
"Complex",
"logic",
"for",
"actually",
"seeking",
"a",
"streamer",
"to",
"a",
"reading_id",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L221-L270
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.acknowledge_streamer
|
def acknowledge_streamer(self, index, ack, force):
"""Acknowledge a streamer value as received from the remote side."""
if index >= len(self.graph.streamers):
return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)
old_ack = self.streamer_acks.get(index, 0)
if ack != 0:
if ack <= old_ack and not force:
return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE)
self.streamer_acks[index] = ack
current_ack = self.streamer_acks.get(index, 0)
return self._seek_streamer(index, current_ack)
|
python
|
def acknowledge_streamer(self, index, ack, force):
"""Acknowledge a streamer value as received from the remote side."""
if index >= len(self.graph.streamers):
return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)
old_ack = self.streamer_acks.get(index, 0)
if ack != 0:
if ack <= old_ack and not force:
return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE)
self.streamer_acks[index] = ack
current_ack = self.streamer_acks.get(index, 0)
return self._seek_streamer(index, current_ack)
|
[
"def",
"acknowledge_streamer",
"(",
"self",
",",
"index",
",",
"ack",
",",
"force",
")",
":",
"if",
"index",
">=",
"len",
"(",
"self",
".",
"graph",
".",
"streamers",
")",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAMER_NOT_ALLOCATED",
")",
"old_ack",
"=",
"self",
".",
"streamer_acks",
".",
"get",
"(",
"index",
",",
"0",
")",
"if",
"ack",
"!=",
"0",
":",
"if",
"ack",
"<=",
"old_ack",
"and",
"not",
"force",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"OLD_ACKNOWLEDGE_UPDATE",
")",
"self",
".",
"streamer_acks",
"[",
"index",
"]",
"=",
"ack",
"current_ack",
"=",
"self",
".",
"streamer_acks",
".",
"get",
"(",
"index",
",",
"0",
")",
"return",
"self",
".",
"_seek_streamer",
"(",
"index",
",",
"current_ack",
")"
] |
Acknowledge a streamer value as received from the remote side.
|
[
"Acknowledge",
"a",
"streamer",
"value",
"as",
"received",
"from",
"the",
"remote",
"side",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L272-L287
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem._handle_streamer_finished
|
def _handle_streamer_finished(self, index, succeeded, highest_ack):
"""Callback when a streamer finishes processing."""
self._logger.debug("Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d", index, highest_ack)
self.acknowledge_streamer(index, highest_ack, False)
|
python
|
def _handle_streamer_finished(self, index, succeeded, highest_ack):
"""Callback when a streamer finishes processing."""
self._logger.debug("Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d", index, highest_ack)
self.acknowledge_streamer(index, highest_ack, False)
|
[
"def",
"_handle_streamer_finished",
"(",
"self",
",",
"index",
",",
"succeeded",
",",
"highest_ack",
")",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d\"",
",",
"index",
",",
"highest_ack",
")",
"self",
".",
"acknowledge_streamer",
"(",
"index",
",",
"highest_ack",
",",
"False",
")"
] |
Callback when a streamer finishes processing.
|
[
"Callback",
"when",
"a",
"streamer",
"finishes",
"processing",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L289-L293
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.process_streamers
|
def process_streamers(self):
"""Check if any streamers should be handed to the stream manager."""
# Check for any triggered streamers and pass them to stream manager
in_progress = self._stream_manager.in_progress()
triggered = self.graph.check_streamers(blacklist=in_progress)
for streamer in triggered:
self._stream_manager.process_streamer(streamer, callback=self._handle_streamer_finished)
|
python
|
def process_streamers(self):
"""Check if any streamers should be handed to the stream manager."""
# Check for any triggered streamers and pass them to stream manager
in_progress = self._stream_manager.in_progress()
triggered = self.graph.check_streamers(blacklist=in_progress)
for streamer in triggered:
self._stream_manager.process_streamer(streamer, callback=self._handle_streamer_finished)
|
[
"def",
"process_streamers",
"(",
"self",
")",
":",
"# Check for any triggered streamers and pass them to stream manager",
"in_progress",
"=",
"self",
".",
"_stream_manager",
".",
"in_progress",
"(",
")",
"triggered",
"=",
"self",
".",
"graph",
".",
"check_streamers",
"(",
"blacklist",
"=",
"in_progress",
")",
"for",
"streamer",
"in",
"triggered",
":",
"self",
".",
"_stream_manager",
".",
"process_streamer",
"(",
"streamer",
",",
"callback",
"=",
"self",
".",
"_handle_streamer_finished",
")"
] |
Check if any streamers should be handed to the stream manager.
|
[
"Check",
"if",
"any",
"streamers",
"should",
"be",
"handed",
"to",
"the",
"stream",
"manager",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L295-L303
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.trigger_streamer
|
def trigger_streamer(self, index):
"""Pass a streamer to the stream manager if it has data."""
self._logger.debug("trigger_streamer RPC called on streamer %d", index)
if index >= len(self.graph.streamers):
return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)
if index in self._stream_manager.in_progress():
return _pack_sgerror(SensorGraphError.STREAM_ALREADY_IN_PROGRESS)
streamer = self.graph.streamers[index]
if not streamer.triggered(manual=True):
return _pack_sgerror(SensorGraphError.STREAMER_HAS_NO_NEW_DATA)
self._logger.debug("calling mark_streamer on streamer %d from trigger_streamer RPC", index)
self.graph.mark_streamer(index)
self.process_streamers()
return Error.NO_ERROR
|
python
|
def trigger_streamer(self, index):
"""Pass a streamer to the stream manager if it has data."""
self._logger.debug("trigger_streamer RPC called on streamer %d", index)
if index >= len(self.graph.streamers):
return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)
if index in self._stream_manager.in_progress():
return _pack_sgerror(SensorGraphError.STREAM_ALREADY_IN_PROGRESS)
streamer = self.graph.streamers[index]
if not streamer.triggered(manual=True):
return _pack_sgerror(SensorGraphError.STREAMER_HAS_NO_NEW_DATA)
self._logger.debug("calling mark_streamer on streamer %d from trigger_streamer RPC", index)
self.graph.mark_streamer(index)
self.process_streamers()
return Error.NO_ERROR
|
[
"def",
"trigger_streamer",
"(",
"self",
",",
"index",
")",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"trigger_streamer RPC called on streamer %d\"",
",",
"index",
")",
"if",
"index",
">=",
"len",
"(",
"self",
".",
"graph",
".",
"streamers",
")",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAMER_NOT_ALLOCATED",
")",
"if",
"index",
"in",
"self",
".",
"_stream_manager",
".",
"in_progress",
"(",
")",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAM_ALREADY_IN_PROGRESS",
")",
"streamer",
"=",
"self",
".",
"graph",
".",
"streamers",
"[",
"index",
"]",
"if",
"not",
"streamer",
".",
"triggered",
"(",
"manual",
"=",
"True",
")",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAMER_HAS_NO_NEW_DATA",
")",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"calling mark_streamer on streamer %d from trigger_streamer RPC\"",
",",
"index",
")",
"self",
".",
"graph",
".",
"mark_streamer",
"(",
"index",
")",
"self",
".",
"process_streamers",
"(",
")",
"return",
"Error",
".",
"NO_ERROR"
] |
Pass a streamer to the stream manager if it has data.
|
[
"Pass",
"a",
"streamer",
"to",
"the",
"stream",
"manager",
"if",
"it",
"has",
"data",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L305-L325
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.persist
|
def persist(self):
"""Trigger saving the current sensorgraph to persistent storage."""
self.persisted_nodes = self.graph.dump_nodes()
self.persisted_streamers = self.graph.dump_streamers()
self.persisted_exists = True
self.persisted_constants = self._sensor_log.dump_constants()
|
python
|
def persist(self):
"""Trigger saving the current sensorgraph to persistent storage."""
self.persisted_nodes = self.graph.dump_nodes()
self.persisted_streamers = self.graph.dump_streamers()
self.persisted_exists = True
self.persisted_constants = self._sensor_log.dump_constants()
|
[
"def",
"persist",
"(",
"self",
")",
":",
"self",
".",
"persisted_nodes",
"=",
"self",
".",
"graph",
".",
"dump_nodes",
"(",
")",
"self",
".",
"persisted_streamers",
"=",
"self",
".",
"graph",
".",
"dump_streamers",
"(",
")",
"self",
".",
"persisted_exists",
"=",
"True",
"self",
".",
"persisted_constants",
"=",
"self",
".",
"_sensor_log",
".",
"dump_constants",
"(",
")"
] |
Trigger saving the current sensorgraph to persistent storage.
|
[
"Trigger",
"saving",
"the",
"current",
"sensorgraph",
"to",
"persistent",
"storage",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L332-L338
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.reset
|
def reset(self):
"""Clear the sensorgraph from RAM and flash."""
self.persisted_exists = False
self.persisted_nodes = []
self.persisted_streamers = []
self.persisted_constants = []
self.graph.clear()
self.streamer_status = {}
|
python
|
def reset(self):
"""Clear the sensorgraph from RAM and flash."""
self.persisted_exists = False
self.persisted_nodes = []
self.persisted_streamers = []
self.persisted_constants = []
self.graph.clear()
self.streamer_status = {}
|
[
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"persisted_exists",
"=",
"False",
"self",
".",
"persisted_nodes",
"=",
"[",
"]",
"self",
".",
"persisted_streamers",
"=",
"[",
"]",
"self",
".",
"persisted_constants",
"=",
"[",
"]",
"self",
".",
"graph",
".",
"clear",
"(",
")",
"self",
".",
"streamer_status",
"=",
"{",
"}"
] |
Clear the sensorgraph from RAM and flash.
|
[
"Clear",
"the",
"sensorgraph",
"from",
"RAM",
"and",
"flash",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L340-L349
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.add_node
|
def add_node(self, binary_descriptor):
"""Add a node to the sensor_graph using a binary node descriptor.
Args:
binary_descriptor (bytes): An encoded binary node descriptor.
Returns:
int: A packed error code.
"""
try:
node_string = parse_binary_descriptor(binary_descriptor)
except:
self._logger.exception("Error parsing binary node descriptor: %s", binary_descriptor)
return _pack_sgerror(SensorGraphError.INVALID_NODE_STREAM) # FIXME: Actually provide the correct error codes here
try:
self.graph.add_node(node_string)
except NodeConnectionError:
return _pack_sgerror(SensorGraphError.STREAM_NOT_IN_USE)
except ProcessingFunctionError:
return _pack_sgerror(SensorGraphError.INVALID_PROCESSING_FUNCTION)
except ResourceUsageError:
return _pack_sgerror(SensorGraphError.NO_NODE_SPACE_AVAILABLE)
return Error.NO_ERROR
|
python
|
def add_node(self, binary_descriptor):
"""Add a node to the sensor_graph using a binary node descriptor.
Args:
binary_descriptor (bytes): An encoded binary node descriptor.
Returns:
int: A packed error code.
"""
try:
node_string = parse_binary_descriptor(binary_descriptor)
except:
self._logger.exception("Error parsing binary node descriptor: %s", binary_descriptor)
return _pack_sgerror(SensorGraphError.INVALID_NODE_STREAM) # FIXME: Actually provide the correct error codes here
try:
self.graph.add_node(node_string)
except NodeConnectionError:
return _pack_sgerror(SensorGraphError.STREAM_NOT_IN_USE)
except ProcessingFunctionError:
return _pack_sgerror(SensorGraphError.INVALID_PROCESSING_FUNCTION)
except ResourceUsageError:
return _pack_sgerror(SensorGraphError.NO_NODE_SPACE_AVAILABLE)
return Error.NO_ERROR
|
[
"def",
"add_node",
"(",
"self",
",",
"binary_descriptor",
")",
":",
"try",
":",
"node_string",
"=",
"parse_binary_descriptor",
"(",
"binary_descriptor",
")",
"except",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error parsing binary node descriptor: %s\"",
",",
"binary_descriptor",
")",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"INVALID_NODE_STREAM",
")",
"# FIXME: Actually provide the correct error codes here",
"try",
":",
"self",
".",
"graph",
".",
"add_node",
"(",
"node_string",
")",
"except",
"NodeConnectionError",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAM_NOT_IN_USE",
")",
"except",
"ProcessingFunctionError",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"INVALID_PROCESSING_FUNCTION",
")",
"except",
"ResourceUsageError",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"NO_NODE_SPACE_AVAILABLE",
")",
"return",
"Error",
".",
"NO_ERROR"
] |
Add a node to the sensor_graph using a binary node descriptor.
Args:
binary_descriptor (bytes): An encoded binary node descriptor.
Returns:
int: A packed error code.
|
[
"Add",
"a",
"node",
"to",
"the",
"sensor_graph",
"using",
"a",
"binary",
"node",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L351-L376
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.add_streamer
|
def add_streamer(self, binary_descriptor):
"""Add a streamer to the sensor_graph using a binary streamer descriptor.
Args:
binary_descriptor (bytes): An encoded binary streamer descriptor.
Returns:
int: A packed error code
"""
streamer = streamer_descriptor.parse_binary_descriptor(binary_descriptor)
try:
self.graph.add_streamer(streamer)
self.streamer_status[len(self.graph.streamers) - 1] = StreamerStatus()
return Error.NO_ERROR
except ResourceUsageError:
return _pack_sgerror(SensorGraphError.NO_MORE_STREAMER_RESOURCES)
|
python
|
def add_streamer(self, binary_descriptor):
"""Add a streamer to the sensor_graph using a binary streamer descriptor.
Args:
binary_descriptor (bytes): An encoded binary streamer descriptor.
Returns:
int: A packed error code
"""
streamer = streamer_descriptor.parse_binary_descriptor(binary_descriptor)
try:
self.graph.add_streamer(streamer)
self.streamer_status[len(self.graph.streamers) - 1] = StreamerStatus()
return Error.NO_ERROR
except ResourceUsageError:
return _pack_sgerror(SensorGraphError.NO_MORE_STREAMER_RESOURCES)
|
[
"def",
"add_streamer",
"(",
"self",
",",
"binary_descriptor",
")",
":",
"streamer",
"=",
"streamer_descriptor",
".",
"parse_binary_descriptor",
"(",
"binary_descriptor",
")",
"try",
":",
"self",
".",
"graph",
".",
"add_streamer",
"(",
"streamer",
")",
"self",
".",
"streamer_status",
"[",
"len",
"(",
"self",
".",
"graph",
".",
"streamers",
")",
"-",
"1",
"]",
"=",
"StreamerStatus",
"(",
")",
"return",
"Error",
".",
"NO_ERROR",
"except",
"ResourceUsageError",
":",
"return",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"NO_MORE_STREAMER_RESOURCES",
")"
] |
Add a streamer to the sensor_graph using a binary streamer descriptor.
Args:
binary_descriptor (bytes): An encoded binary streamer descriptor.
Returns:
int: A packed error code
|
[
"Add",
"a",
"streamer",
"to",
"the",
"sensor_graph",
"using",
"a",
"binary",
"streamer",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L378-L396
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.inspect_streamer
|
def inspect_streamer(self, index):
"""Inspect the streamer at the given index."""
if index >= len(self.graph.streamers):
return [_pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED), b'\0'*14]
return [Error.NO_ERROR, streamer_descriptor.create_binary_descriptor(self.graph.streamers[index])]
|
python
|
def inspect_streamer(self, index):
"""Inspect the streamer at the given index."""
if index >= len(self.graph.streamers):
return [_pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED), b'\0'*14]
return [Error.NO_ERROR, streamer_descriptor.create_binary_descriptor(self.graph.streamers[index])]
|
[
"def",
"inspect_streamer",
"(",
"self",
",",
"index",
")",
":",
"if",
"index",
">=",
"len",
"(",
"self",
".",
"graph",
".",
"streamers",
")",
":",
"return",
"[",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAMER_NOT_ALLOCATED",
")",
",",
"b'\\0'",
"*",
"14",
"]",
"return",
"[",
"Error",
".",
"NO_ERROR",
",",
"streamer_descriptor",
".",
"create_binary_descriptor",
"(",
"self",
".",
"graph",
".",
"streamers",
"[",
"index",
"]",
")",
"]"
] |
Inspect the streamer at the given index.
|
[
"Inspect",
"the",
"streamer",
"at",
"the",
"given",
"index",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L398-L404
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.inspect_node
|
def inspect_node(self, index):
"""Inspect the graph node at the given index."""
if index >= len(self.graph.nodes):
raise RPCErrorCode(6) #FIXME: use actual error code here for UNKNOWN_ERROR status
return create_binary_descriptor(str(self.graph.nodes[index]))
|
python
|
def inspect_node(self, index):
"""Inspect the graph node at the given index."""
if index >= len(self.graph.nodes):
raise RPCErrorCode(6) #FIXME: use actual error code here for UNKNOWN_ERROR status
return create_binary_descriptor(str(self.graph.nodes[index]))
|
[
"def",
"inspect_node",
"(",
"self",
",",
"index",
")",
":",
"if",
"index",
">=",
"len",
"(",
"self",
".",
"graph",
".",
"nodes",
")",
":",
"raise",
"RPCErrorCode",
"(",
"6",
")",
"#FIXME: use actual error code here for UNKNOWN_ERROR status",
"return",
"create_binary_descriptor",
"(",
"str",
"(",
"self",
".",
"graph",
".",
"nodes",
"[",
"index",
"]",
")",
")"
] |
Inspect the graph node at the given index.
|
[
"Inspect",
"the",
"graph",
"node",
"at",
"the",
"given",
"index",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L406-L412
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphSubsystem.query_streamer
|
def query_streamer(self, index):
"""Query the status of the streamer at the given index."""
if index >= len(self.graph.streamers):
return None
info = self.streamer_status[index]
highest_ack = self.streamer_acks.get(index, 0)
return [info.last_attempt_time, info.last_success_time, info.last_error, highest_ack, info.last_status, info.attempt_number, info.comm_status]
|
python
|
def query_streamer(self, index):
"""Query the status of the streamer at the given index."""
if index >= len(self.graph.streamers):
return None
info = self.streamer_status[index]
highest_ack = self.streamer_acks.get(index, 0)
return [info.last_attempt_time, info.last_success_time, info.last_error, highest_ack, info.last_status, info.attempt_number, info.comm_status]
|
[
"def",
"query_streamer",
"(",
"self",
",",
"index",
")",
":",
"if",
"index",
">=",
"len",
"(",
"self",
".",
"graph",
".",
"streamers",
")",
":",
"return",
"None",
"info",
"=",
"self",
".",
"streamer_status",
"[",
"index",
"]",
"highest_ack",
"=",
"self",
".",
"streamer_acks",
".",
"get",
"(",
"index",
",",
"0",
")",
"return",
"[",
"info",
".",
"last_attempt_time",
",",
"info",
".",
"last_success_time",
",",
"info",
".",
"last_error",
",",
"highest_ack",
",",
"info",
".",
"last_status",
",",
"info",
".",
"attempt_number",
",",
"info",
".",
"comm_status",
"]"
] |
Query the status of the streamer at the given index.
|
[
"Query",
"the",
"status",
"of",
"the",
"streamer",
"at",
"the",
"given",
"index",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L414-L423
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphMixin.sg_graph_input
|
def sg_graph_input(self, value, stream_id):
""""Present a graph input to the sensor_graph subsystem."""
self.sensor_graph.process_input(stream_id, value)
return [Error.NO_ERROR]
|
python
|
def sg_graph_input(self, value, stream_id):
""""Present a graph input to the sensor_graph subsystem."""
self.sensor_graph.process_input(stream_id, value)
return [Error.NO_ERROR]
|
[
"def",
"sg_graph_input",
"(",
"self",
",",
"value",
",",
"stream_id",
")",
":",
"self",
".",
"sensor_graph",
".",
"process_input",
"(",
"stream_id",
",",
"value",
")",
"return",
"[",
"Error",
".",
"NO_ERROR",
"]"
] |
Present a graph input to the sensor_graph subsystem.
|
[
"Present",
"a",
"graph",
"input",
"to",
"the",
"sensor_graph",
"subsystem",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L461-L465
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphMixin.sg_add_streamer
|
def sg_add_streamer(self, desc):
"""Add a graph streamer using a binary descriptor."""
if len(desc) == 13:
desc += b'\0'
err = self.sensor_graph.add_streamer(desc)
return [err]
|
python
|
def sg_add_streamer(self, desc):
"""Add a graph streamer using a binary descriptor."""
if len(desc) == 13:
desc += b'\0'
err = self.sensor_graph.add_streamer(desc)
return [err]
|
[
"def",
"sg_add_streamer",
"(",
"self",
",",
"desc",
")",
":",
"if",
"len",
"(",
"desc",
")",
"==",
"13",
":",
"desc",
"+=",
"b'\\0'",
"err",
"=",
"self",
".",
"sensor_graph",
".",
"add_streamer",
"(",
"desc",
")",
"return",
"[",
"err",
"]"
] |
Add a graph streamer using a binary descriptor.
|
[
"Add",
"a",
"graph",
"streamer",
"using",
"a",
"binary",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L488-L495
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphMixin.sg_seek_streamer
|
def sg_seek_streamer(self, index, force, value):
"""Ackowledge a streamer."""
force = bool(force)
err = self.sensor_graph.acknowledge_streamer(index, value, force)
return [err]
|
python
|
def sg_seek_streamer(self, index, force, value):
"""Ackowledge a streamer."""
force = bool(force)
err = self.sensor_graph.acknowledge_streamer(index, value, force)
return [err]
|
[
"def",
"sg_seek_streamer",
"(",
"self",
",",
"index",
",",
"force",
",",
"value",
")",
":",
"force",
"=",
"bool",
"(",
"force",
")",
"err",
"=",
"self",
".",
"sensor_graph",
".",
"acknowledge_streamer",
"(",
"index",
",",
"value",
",",
"force",
")",
"return",
"[",
"err",
"]"
] |
Ackowledge a streamer.
|
[
"Ackowledge",
"a",
"streamer",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L512-L517
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py
|
SensorGraphMixin.sg_query_streamer
|
def sg_query_streamer(self, index):
"""Query the current status of a streamer."""
resp = self.sensor_graph.query_streamer(index)
if resp is None:
return [struct.pack("<L", _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED))]
return [struct.pack("<LLLLBBBx", *resp)]
|
python
|
def sg_query_streamer(self, index):
"""Query the current status of a streamer."""
resp = self.sensor_graph.query_streamer(index)
if resp is None:
return [struct.pack("<L", _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED))]
return [struct.pack("<LLLLBBBx", *resp)]
|
[
"def",
"sg_query_streamer",
"(",
"self",
",",
"index",
")",
":",
"resp",
"=",
"self",
".",
"sensor_graph",
".",
"query_streamer",
"(",
"index",
")",
"if",
"resp",
"is",
"None",
":",
"return",
"[",
"struct",
".",
"pack",
"(",
"\"<L\"",
",",
"_pack_sgerror",
"(",
"SensorGraphError",
".",
"STREAMER_NOT_ALLOCATED",
")",
")",
"]",
"return",
"[",
"struct",
".",
"pack",
"(",
"\"<LLLLBBBx\"",
",",
"*",
"resp",
")",
"]"
] |
Query the current status of a streamer.
|
[
"Query",
"the",
"current",
"status",
"of",
"a",
"streamer",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/sensor_graph.py#L520-L527
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/workqueue_thread.py
|
WorkQueueThread.dispatch
|
def dispatch(self, value, callback=None):
"""Dispatch an item to the workqueue and optionally wait.
This is the only way to add work to the background work queue. Unless
you also pass a callback object, this method will synchronously wait
for the work to finish and return the result. If the work raises an
exception, the exception will be reraised in this method.
If you pass an optional callback(exc_info, return_value), this method
will not block and instead your callback will be called when the work
finishes. If an exception was raised during processing, exc_info will
be set with the contents of sys.exc_info(). Otherwise, exc_info will
be None and whatever the work_queue handler returned will be passed as
the return_value parameter to the supplied callback.
Args:
value (object): Arbitrary object that will be passed to the work
queue handler.
callback (callable): Optional callback to receive the result of
the work queue when it finishes. If not passed, this method
will be synchronous and return the result from the dispatch()
method itself
Returns:
object: The result of the work_queue handler function or None.
If callback is not None, then this method will return immediately
with a None return value. Otherwise it will block until the work
item is finished (including any work items ahead in the queue) and
return whatever the work item handler returned.
"""
done = None
if callback is None:
done = threading.Event()
shared_data = [None, None]
def _callback(exc_info, return_value):
shared_data[0] = exc_info
shared_data[1] = return_value
done.set()
callback = _callback
workitem = WorkItem(value, callback)
self._work_queue.put(workitem)
if done is None:
return None
done.wait()
exc_info, return_value = shared_data
if exc_info is not None:
self.future_raise(*exc_info)
return return_value
|
python
|
def dispatch(self, value, callback=None):
"""Dispatch an item to the workqueue and optionally wait.
This is the only way to add work to the background work queue. Unless
you also pass a callback object, this method will synchronously wait
for the work to finish and return the result. If the work raises an
exception, the exception will be reraised in this method.
If you pass an optional callback(exc_info, return_value), this method
will not block and instead your callback will be called when the work
finishes. If an exception was raised during processing, exc_info will
be set with the contents of sys.exc_info(). Otherwise, exc_info will
be None and whatever the work_queue handler returned will be passed as
the return_value parameter to the supplied callback.
Args:
value (object): Arbitrary object that will be passed to the work
queue handler.
callback (callable): Optional callback to receive the result of
the work queue when it finishes. If not passed, this method
will be synchronous and return the result from the dispatch()
method itself
Returns:
object: The result of the work_queue handler function or None.
If callback is not None, then this method will return immediately
with a None return value. Otherwise it will block until the work
item is finished (including any work items ahead in the queue) and
return whatever the work item handler returned.
"""
done = None
if callback is None:
done = threading.Event()
shared_data = [None, None]
def _callback(exc_info, return_value):
shared_data[0] = exc_info
shared_data[1] = return_value
done.set()
callback = _callback
workitem = WorkItem(value, callback)
self._work_queue.put(workitem)
if done is None:
return None
done.wait()
exc_info, return_value = shared_data
if exc_info is not None:
self.future_raise(*exc_info)
return return_value
|
[
"def",
"dispatch",
"(",
"self",
",",
"value",
",",
"callback",
"=",
"None",
")",
":",
"done",
"=",
"None",
"if",
"callback",
"is",
"None",
":",
"done",
"=",
"threading",
".",
"Event",
"(",
")",
"shared_data",
"=",
"[",
"None",
",",
"None",
"]",
"def",
"_callback",
"(",
"exc_info",
",",
"return_value",
")",
":",
"shared_data",
"[",
"0",
"]",
"=",
"exc_info",
"shared_data",
"[",
"1",
"]",
"=",
"return_value",
"done",
".",
"set",
"(",
")",
"callback",
"=",
"_callback",
"workitem",
"=",
"WorkItem",
"(",
"value",
",",
"callback",
")",
"self",
".",
"_work_queue",
".",
"put",
"(",
"workitem",
")",
"if",
"done",
"is",
"None",
":",
"return",
"None",
"done",
".",
"wait",
"(",
")",
"exc_info",
",",
"return_value",
"=",
"shared_data",
"if",
"exc_info",
"is",
"not",
"None",
":",
"self",
".",
"future_raise",
"(",
"*",
"exc_info",
")",
"return",
"return_value"
] |
Dispatch an item to the workqueue and optionally wait.
This is the only way to add work to the background work queue. Unless
you also pass a callback object, this method will synchronously wait
for the work to finish and return the result. If the work raises an
exception, the exception will be reraised in this method.
If you pass an optional callback(exc_info, return_value), this method
will not block and instead your callback will be called when the work
finishes. If an exception was raised during processing, exc_info will
be set with the contents of sys.exc_info(). Otherwise, exc_info will
be None and whatever the work_queue handler returned will be passed as
the return_value parameter to the supplied callback.
Args:
value (object): Arbitrary object that will be passed to the work
queue handler.
callback (callable): Optional callback to receive the result of
the work queue when it finishes. If not passed, this method
will be synchronous and return the result from the dispatch()
method itself
Returns:
object: The result of the work_queue handler function or None.
If callback is not None, then this method will return immediately
with a None return value. Otherwise it will block until the work
item is finished (including any work items ahead in the queue) and
return whatever the work item handler returned.
|
[
"Dispatch",
"an",
"item",
"to",
"the",
"workqueue",
"and",
"optionally",
"wait",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/workqueue_thread.py#L85-L142
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/workqueue_thread.py
|
WorkQueueThread.future_raise
|
def future_raise(self, tp, value=None, tb=None):
"""raise_ implementation from future.utils"""
if value is not None and isinstance(tp, Exception):
raise TypeError("instance exception may not have a separate value")
if value is not None:
exc = tp(value)
else:
exc = tp
if exc.__traceback__ is not tb:
raise exc.with_traceback(tb)
raise exc
|
python
|
def future_raise(self, tp, value=None, tb=None):
"""raise_ implementation from future.utils"""
if value is not None and isinstance(tp, Exception):
raise TypeError("instance exception may not have a separate value")
if value is not None:
exc = tp(value)
else:
exc = tp
if exc.__traceback__ is not tb:
raise exc.with_traceback(tb)
raise exc
|
[
"def",
"future_raise",
"(",
"self",
",",
"tp",
",",
"value",
"=",
"None",
",",
"tb",
"=",
"None",
")",
":",
"if",
"value",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"tp",
",",
"Exception",
")",
":",
"raise",
"TypeError",
"(",
"\"instance exception may not have a separate value\"",
")",
"if",
"value",
"is",
"not",
"None",
":",
"exc",
"=",
"tp",
"(",
"value",
")",
"else",
":",
"exc",
"=",
"tp",
"if",
"exc",
".",
"__traceback__",
"is",
"not",
"tb",
":",
"raise",
"exc",
".",
"with_traceback",
"(",
"tb",
")",
"raise",
"exc"
] |
raise_ implementation from future.utils
|
[
"raise_",
"implementation",
"from",
"future",
".",
"utils"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/workqueue_thread.py#L144-L154
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/workqueue_thread.py
|
WorkQueueThread.flush
|
def flush(self):
"""Synchronously wait until this work item is processed.
This has the effect of waiting until all work items queued before this
method has been called have finished.
"""
done = threading.Event()
def _callback():
done.set()
self.defer(_callback)
done.wait()
|
python
|
def flush(self):
"""Synchronously wait until this work item is processed.
This has the effect of waiting until all work items queued before this
method has been called have finished.
"""
done = threading.Event()
def _callback():
done.set()
self.defer(_callback)
done.wait()
|
[
"def",
"flush",
"(",
"self",
")",
":",
"done",
"=",
"threading",
".",
"Event",
"(",
")",
"def",
"_callback",
"(",
")",
":",
"done",
".",
"set",
"(",
")",
"self",
".",
"defer",
"(",
"_callback",
")",
"done",
".",
"wait",
"(",
")"
] |
Synchronously wait until this work item is processed.
This has the effect of waiting until all work items queued before this
method has been called have finished.
|
[
"Synchronously",
"wait",
"until",
"this",
"work",
"item",
"is",
"processed",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/workqueue_thread.py#L156-L169
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/workqueue_thread.py
|
WorkQueueThread.direct_dispatch
|
def direct_dispatch(self, arg, callback):
"""Directly dispatch a work item.
This method MUST only be called from inside of another work item and
will synchronously invoke the work item as if it was passed to
dispatch(). Calling this method from any other thread has undefined
consequences since it will be unsynchronized with respect to items
dispatched from inside the background work queue itself.
"""
try:
self._current_callbacks.appendleft(callback)
exc_info = None
retval = None
retval = self._routine(arg)
except: # pylint:disable=bare-except;We need to capture the exception and feed it back to the caller
exc_info = sys.exc_info()
finally:
self._current_callbacks.popleft()
if callback is not None and retval is not self.STILL_PENDING:
callback(exc_info, retval)
return retval, exc_info
|
python
|
def direct_dispatch(self, arg, callback):
"""Directly dispatch a work item.
This method MUST only be called from inside of another work item and
will synchronously invoke the work item as if it was passed to
dispatch(). Calling this method from any other thread has undefined
consequences since it will be unsynchronized with respect to items
dispatched from inside the background work queue itself.
"""
try:
self._current_callbacks.appendleft(callback)
exc_info = None
retval = None
retval = self._routine(arg)
except: # pylint:disable=bare-except;We need to capture the exception and feed it back to the caller
exc_info = sys.exc_info()
finally:
self._current_callbacks.popleft()
if callback is not None and retval is not self.STILL_PENDING:
callback(exc_info, retval)
return retval, exc_info
|
[
"def",
"direct_dispatch",
"(",
"self",
",",
"arg",
",",
"callback",
")",
":",
"try",
":",
"self",
".",
"_current_callbacks",
".",
"appendleft",
"(",
"callback",
")",
"exc_info",
"=",
"None",
"retval",
"=",
"None",
"retval",
"=",
"self",
".",
"_routine",
"(",
"arg",
")",
"except",
":",
"# pylint:disable=bare-except;We need to capture the exception and feed it back to the caller",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"finally",
":",
"self",
".",
"_current_callbacks",
".",
"popleft",
"(",
")",
"if",
"callback",
"is",
"not",
"None",
"and",
"retval",
"is",
"not",
"self",
".",
"STILL_PENDING",
":",
"callback",
"(",
"exc_info",
",",
"retval",
")",
"return",
"retval",
",",
"exc_info"
] |
Directly dispatch a work item.
This method MUST only be called from inside of another work item and
will synchronously invoke the work item as if it was passed to
dispatch(). Calling this method from any other thread has undefined
consequences since it will be unsynchronized with respect to items
dispatched from inside the background work queue itself.
|
[
"Directly",
"dispatch",
"a",
"work",
"item",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/workqueue_thread.py#L237-L262
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/workqueue_thread.py
|
WorkQueueThread.stop
|
def stop(self, timeout=None, force=False):
"""Stop the worker thread and synchronously wait for it to finish.
Args:
timeout (float): The maximum time to wait for the thread to stop
before raising a TimeoutExpiredError. If force is True, TimeoutExpiredError
is not raised and the thread is just marked as a daemon thread
so that it does not block cleanly exiting the process.
force (bool): If true and the thread does not exit in timeout seconds
no error is raised since the thread is marked as daemon and will
be killed when the process exits.
"""
self.signal_stop()
self.wait_stopped(timeout, force)
|
python
|
def stop(self, timeout=None, force=False):
"""Stop the worker thread and synchronously wait for it to finish.
Args:
timeout (float): The maximum time to wait for the thread to stop
before raising a TimeoutExpiredError. If force is True, TimeoutExpiredError
is not raised and the thread is just marked as a daemon thread
so that it does not block cleanly exiting the process.
force (bool): If true and the thread does not exit in timeout seconds
no error is raised since the thread is marked as daemon and will
be killed when the process exits.
"""
self.signal_stop()
self.wait_stopped(timeout, force)
|
[
"def",
"stop",
"(",
"self",
",",
"timeout",
"=",
"None",
",",
"force",
"=",
"False",
")",
":",
"self",
".",
"signal_stop",
"(",
")",
"self",
".",
"wait_stopped",
"(",
"timeout",
",",
"force",
")"
] |
Stop the worker thread and synchronously wait for it to finish.
Args:
timeout (float): The maximum time to wait for the thread to stop
before raising a TimeoutExpiredError. If force is True, TimeoutExpiredError
is not raised and the thread is just marked as a daemon thread
so that it does not block cleanly exiting the process.
force (bool): If true and the thread does not exit in timeout seconds
no error is raised since the thread is marked as daemon and will
be killed when the process exits.
|
[
"Stop",
"the",
"worker",
"thread",
"and",
"synchronously",
"wait",
"for",
"it",
"to",
"finish",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/workqueue_thread.py#L300-L314
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/workqueue_thread.py
|
WorkQueueThread.wait_stopped
|
def wait_stopped(self, timeout=None, force=False):
"""Wait for the thread to stop.
You must have previously called signal_stop or this function will
hang.
Args:
timeout (float): The maximum time to wait for the thread to stop
before raising a TimeoutExpiredError. If force is True,
TimeoutExpiredError is not raised and the thread is just
marked as a daemon thread so that it does not block cleanly
exiting the process.
force (bool): If true and the thread does not exit in timeout seconds
no error is raised since the thread is marked as daemon and will
be killed when the process exits.
"""
self.join(timeout)
if self.is_alive() and force is False:
raise TimeoutExpiredError("Error waiting for background thread to exit", timeout=timeout)
|
python
|
def wait_stopped(self, timeout=None, force=False):
"""Wait for the thread to stop.
You must have previously called signal_stop or this function will
hang.
Args:
timeout (float): The maximum time to wait for the thread to stop
before raising a TimeoutExpiredError. If force is True,
TimeoutExpiredError is not raised and the thread is just
marked as a daemon thread so that it does not block cleanly
exiting the process.
force (bool): If true and the thread does not exit in timeout seconds
no error is raised since the thread is marked as daemon and will
be killed when the process exits.
"""
self.join(timeout)
if self.is_alive() and force is False:
raise TimeoutExpiredError("Error waiting for background thread to exit", timeout=timeout)
|
[
"def",
"wait_stopped",
"(",
"self",
",",
"timeout",
"=",
"None",
",",
"force",
"=",
"False",
")",
":",
"self",
".",
"join",
"(",
"timeout",
")",
"if",
"self",
".",
"is_alive",
"(",
")",
"and",
"force",
"is",
"False",
":",
"raise",
"TimeoutExpiredError",
"(",
"\"Error waiting for background thread to exit\"",
",",
"timeout",
"=",
"timeout",
")"
] |
Wait for the thread to stop.
You must have previously called signal_stop or this function will
hang.
Args:
timeout (float): The maximum time to wait for the thread to stop
before raising a TimeoutExpiredError. If force is True,
TimeoutExpiredError is not raised and the thread is just
marked as a daemon thread so that it does not block cleanly
exiting the process.
force (bool): If true and the thread does not exit in timeout seconds
no error is raised since the thread is marked as daemon and will
be killed when the process exits.
|
[
"Wait",
"for",
"the",
"thread",
"to",
"stop",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/workqueue_thread.py#L325-L346
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/wix.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for WiX to an Environment."""
if not exists(env):
return
env['WIXCANDLEFLAGS'] = ['-nologo']
env['WIXCANDLEINCLUDE'] = []
env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}'
env['WIXLIGHTFLAGS'].append( '-nologo' )
env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}"
env['WIXSRCSUF'] = '.wxs'
env['WIXOBJSUF'] = '.wixobj'
object_builder = SCons.Builder.Builder(
action = '$WIXCANDLECOM',
suffix = '$WIXOBJSUF',
src_suffix = '$WIXSRCSUF')
linker_builder = SCons.Builder.Builder(
action = '$WIXLIGHTCOM',
src_suffix = '$WIXOBJSUF',
src_builder = object_builder)
env['BUILDERS']['WiX'] = linker_builder
|
python
|
def generate(env):
"""Add Builders and construction variables for WiX to an Environment."""
if not exists(env):
return
env['WIXCANDLEFLAGS'] = ['-nologo']
env['WIXCANDLEINCLUDE'] = []
env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}'
env['WIXLIGHTFLAGS'].append( '-nologo' )
env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}"
env['WIXSRCSUF'] = '.wxs'
env['WIXOBJSUF'] = '.wixobj'
object_builder = SCons.Builder.Builder(
action = '$WIXCANDLECOM',
suffix = '$WIXOBJSUF',
src_suffix = '$WIXSRCSUF')
linker_builder = SCons.Builder.Builder(
action = '$WIXLIGHTCOM',
src_suffix = '$WIXOBJSUF',
src_builder = object_builder)
env['BUILDERS']['WiX'] = linker_builder
|
[
"def",
"generate",
"(",
"env",
")",
":",
"if",
"not",
"exists",
"(",
"env",
")",
":",
"return",
"env",
"[",
"'WIXCANDLEFLAGS'",
"]",
"=",
"[",
"'-nologo'",
"]",
"env",
"[",
"'WIXCANDLEINCLUDE'",
"]",
"=",
"[",
"]",
"env",
"[",
"'WIXCANDLECOM'",
"]",
"=",
"'$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}'",
"env",
"[",
"'WIXLIGHTFLAGS'",
"]",
".",
"append",
"(",
"'-nologo'",
")",
"env",
"[",
"'WIXLIGHTCOM'",
"]",
"=",
"\"$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}\"",
"env",
"[",
"'WIXSRCSUF'",
"]",
"=",
"'.wxs'",
"env",
"[",
"'WIXOBJSUF'",
"]",
"=",
"'.wixobj'",
"object_builder",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"'$WIXCANDLECOM'",
",",
"suffix",
"=",
"'$WIXOBJSUF'",
",",
"src_suffix",
"=",
"'$WIXSRCSUF'",
")",
"linker_builder",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"'$WIXLIGHTCOM'",
",",
"src_suffix",
"=",
"'$WIXOBJSUF'",
",",
"src_builder",
"=",
"object_builder",
")",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'WiX'",
"]",
"=",
"linker_builder"
] |
Add Builders and construction variables for WiX to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"WiX",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/wix.py#L39-L63
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/sim/stimulus.py
|
SimulationStimulus.FromString
|
def FromString(cls, desc):
"""Create a new stimulus from a description string.
The string must have the format:
[time: ][system ]input X = Y
where X and Y are integers. The time, if given must
be a time_interval, which is an integer followed by a
time unit such as second(s), minute(s), etc.
Args:
desc (str): A string description of the stimulus.
Returns:
SimulationStimulus: The parsed stimulus object.
"""
if language.stream is None:
language.get_language()
parse_exp = Optional(time_interval('time') - Literal(':').suppress()) - language.stream('stream') - Literal('=').suppress() - number('value')
try:
data = parse_exp.parseString(desc)
time = 0
if 'time' in data:
time = data['time'][0]
return SimulationStimulus(time, data['stream'][0], data['value'])
except (ParseException, ParseSyntaxException):
raise ArgumentError("Could not parse stimulus descriptor", descriptor=desc)
|
python
|
def FromString(cls, desc):
"""Create a new stimulus from a description string.
The string must have the format:
[time: ][system ]input X = Y
where X and Y are integers. The time, if given must
be a time_interval, which is an integer followed by a
time unit such as second(s), minute(s), etc.
Args:
desc (str): A string description of the stimulus.
Returns:
SimulationStimulus: The parsed stimulus object.
"""
if language.stream is None:
language.get_language()
parse_exp = Optional(time_interval('time') - Literal(':').suppress()) - language.stream('stream') - Literal('=').suppress() - number('value')
try:
data = parse_exp.parseString(desc)
time = 0
if 'time' in data:
time = data['time'][0]
return SimulationStimulus(time, data['stream'][0], data['value'])
except (ParseException, ParseSyntaxException):
raise ArgumentError("Could not parse stimulus descriptor", descriptor=desc)
|
[
"def",
"FromString",
"(",
"cls",
",",
"desc",
")",
":",
"if",
"language",
".",
"stream",
"is",
"None",
":",
"language",
".",
"get_language",
"(",
")",
"parse_exp",
"=",
"Optional",
"(",
"time_interval",
"(",
"'time'",
")",
"-",
"Literal",
"(",
"':'",
")",
".",
"suppress",
"(",
")",
")",
"-",
"language",
".",
"stream",
"(",
"'stream'",
")",
"-",
"Literal",
"(",
"'='",
")",
".",
"suppress",
"(",
")",
"-",
"number",
"(",
"'value'",
")",
"try",
":",
"data",
"=",
"parse_exp",
".",
"parseString",
"(",
"desc",
")",
"time",
"=",
"0",
"if",
"'time'",
"in",
"data",
":",
"time",
"=",
"data",
"[",
"'time'",
"]",
"[",
"0",
"]",
"return",
"SimulationStimulus",
"(",
"time",
",",
"data",
"[",
"'stream'",
"]",
"[",
"0",
"]",
",",
"data",
"[",
"'value'",
"]",
")",
"except",
"(",
"ParseException",
",",
"ParseSyntaxException",
")",
":",
"raise",
"ArgumentError",
"(",
"\"Could not parse stimulus descriptor\"",
",",
"descriptor",
"=",
"desc",
")"
] |
Create a new stimulus from a description string.
The string must have the format:
[time: ][system ]input X = Y
where X and Y are integers. The time, if given must
be a time_interval, which is an integer followed by a
time unit such as second(s), minute(s), etc.
Args:
desc (str): A string description of the stimulus.
Returns:
SimulationStimulus: The parsed stimulus object.
|
[
"Create",
"a",
"new",
"stimulus",
"from",
"a",
"description",
"string",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sim/stimulus.py#L27-L56
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager.get_connection_id
|
def get_connection_id(self, conn_or_int_id):
"""Get the connection id.
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
Returns:
dict: The context data associated with that connection or None if it cannot
be found.
Raises:
ArgumentError: When the key is not found in the list of active connections
or is invalid.
"""
key = conn_or_int_id
if isinstance(key, str):
table = self._int_connections
elif isinstance(key, int):
table = self._connections
else:
raise ArgumentError("You must supply either an int connection id or a string internal id to _get_connection_state", id=key)
try:
data = table[key]
except KeyError:
raise ArgumentError("Could not find connection by id", id=key)
return data['conn_id']
|
python
|
def get_connection_id(self, conn_or_int_id):
"""Get the connection id.
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
Returns:
dict: The context data associated with that connection or None if it cannot
be found.
Raises:
ArgumentError: When the key is not found in the list of active connections
or is invalid.
"""
key = conn_or_int_id
if isinstance(key, str):
table = self._int_connections
elif isinstance(key, int):
table = self._connections
else:
raise ArgumentError("You must supply either an int connection id or a string internal id to _get_connection_state", id=key)
try:
data = table[key]
except KeyError:
raise ArgumentError("Could not find connection by id", id=key)
return data['conn_id']
|
[
"def",
"get_connection_id",
"(",
"self",
",",
"conn_or_int_id",
")",
":",
"key",
"=",
"conn_or_int_id",
"if",
"isinstance",
"(",
"key",
",",
"str",
")",
":",
"table",
"=",
"self",
".",
"_int_connections",
"elif",
"isinstance",
"(",
"key",
",",
"int",
")",
":",
"table",
"=",
"self",
".",
"_connections",
"else",
":",
"raise",
"ArgumentError",
"(",
"\"You must supply either an int connection id or a string internal id to _get_connection_state\"",
",",
"id",
"=",
"key",
")",
"try",
":",
"data",
"=",
"table",
"[",
"key",
"]",
"except",
"KeyError",
":",
"raise",
"ArgumentError",
"(",
"\"Could not find connection by id\"",
",",
"id",
"=",
"key",
")",
"return",
"data",
"[",
"'conn_id'",
"]"
] |
Get the connection id.
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
Returns:
dict: The context data associated with that connection or None if it cannot
be found.
Raises:
ArgumentError: When the key is not found in the list of active connections
or is invalid.
|
[
"Get",
"the",
"connection",
"id",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L174-L203
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager._get_connection
|
def _get_connection(self, conn_or_int_id):
"""Get the data for a connection by either conn_id or internal_id
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
Returns:
dict: The context data associated with that connection or None if it cannot
be found.
Raises:
ArgumentError: When the key is not found in the list of active connections
or is invalid.
"""
key = conn_or_int_id
if isinstance(key, str):
table = self._int_connections
elif isinstance(key, int):
table = self._connections
else:
return None
try:
data = table[key]
except KeyError:
return None
return data
|
python
|
def _get_connection(self, conn_or_int_id):
"""Get the data for a connection by either conn_id or internal_id
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
Returns:
dict: The context data associated with that connection or None if it cannot
be found.
Raises:
ArgumentError: When the key is not found in the list of active connections
or is invalid.
"""
key = conn_or_int_id
if isinstance(key, str):
table = self._int_connections
elif isinstance(key, int):
table = self._connections
else:
return None
try:
data = table[key]
except KeyError:
return None
return data
|
[
"def",
"_get_connection",
"(",
"self",
",",
"conn_or_int_id",
")",
":",
"key",
"=",
"conn_or_int_id",
"if",
"isinstance",
"(",
"key",
",",
"str",
")",
":",
"table",
"=",
"self",
".",
"_int_connections",
"elif",
"isinstance",
"(",
"key",
",",
"int",
")",
":",
"table",
"=",
"self",
".",
"_connections",
"else",
":",
"return",
"None",
"try",
":",
"data",
"=",
"table",
"[",
"key",
"]",
"except",
"KeyError",
":",
"return",
"None",
"return",
"data"
] |
Get the data for a connection by either conn_id or internal_id
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
Returns:
dict: The context data associated with that connection or None if it cannot
be found.
Raises:
ArgumentError: When the key is not found in the list of active connections
or is invalid.
|
[
"Get",
"the",
"data",
"for",
"a",
"connection",
"by",
"either",
"conn_id",
"or",
"internal_id"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L205-L234
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager._get_connection_state
|
def _get_connection_state(self, conn_or_int_id):
"""Get a connection's state by either conn_id or internal_id
This routine must only be called from the internal worker thread.
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
"""
key = conn_or_int_id
if isinstance(key, str):
table = self._int_connections
elif isinstance(key, int):
table = self._connections
else:
raise ArgumentError("You must supply either an int connection id or a string internal id to _get_connection_state", id=key)
if key not in table:
return self.Disconnected
data = table[key]
return data['state']
|
python
|
def _get_connection_state(self, conn_or_int_id):
"""Get a connection's state by either conn_id or internal_id
This routine must only be called from the internal worker thread.
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
"""
key = conn_or_int_id
if isinstance(key, str):
table = self._int_connections
elif isinstance(key, int):
table = self._connections
else:
raise ArgumentError("You must supply either an int connection id or a string internal id to _get_connection_state", id=key)
if key not in table:
return self.Disconnected
data = table[key]
return data['state']
|
[
"def",
"_get_connection_state",
"(",
"self",
",",
"conn_or_int_id",
")",
":",
"key",
"=",
"conn_or_int_id",
"if",
"isinstance",
"(",
"key",
",",
"str",
")",
":",
"table",
"=",
"self",
".",
"_int_connections",
"elif",
"isinstance",
"(",
"key",
",",
"int",
")",
":",
"table",
"=",
"self",
".",
"_connections",
"else",
":",
"raise",
"ArgumentError",
"(",
"\"You must supply either an int connection id or a string internal id to _get_connection_state\"",
",",
"id",
"=",
"key",
")",
"if",
"key",
"not",
"in",
"table",
":",
"return",
"self",
".",
"Disconnected",
"data",
"=",
"table",
"[",
"key",
"]",
"return",
"data",
"[",
"'state'",
"]"
] |
Get a connection's state by either conn_id or internal_id
This routine must only be called from the internal worker thread.
Args:
conn_or_int_id (int, string): The external integer connection id or
and internal string connection id
|
[
"Get",
"a",
"connection",
"s",
"state",
"by",
"either",
"conn_id",
"or",
"internal_id"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L236-L258
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager._check_timeouts
|
def _check_timeouts(self):
"""Check if any operations in progress need to be timed out
Adds the corresponding finish action that fails the request due to a
timeout.
"""
for conn_id, data in self._connections.items():
if 'timeout' in data and data['timeout'].expired:
if data['state'] == self.Connecting:
self.finish_connection(conn_id, False, 'Connection attempt timed out')
elif data['state'] == self.Disconnecting:
self.finish_disconnection(conn_id, False, 'Disconnection attempt timed out')
elif data['state'] == self.InProgress:
if data['microstate'] == 'rpc':
self.finish_operation(conn_id, False, 'RPC timed out without response', None, None)
elif data['microstate'] == 'open_interface':
self.finish_operation(conn_id, False, 'Open interface request timed out')
|
python
|
def _check_timeouts(self):
"""Check if any operations in progress need to be timed out
Adds the corresponding finish action that fails the request due to a
timeout.
"""
for conn_id, data in self._connections.items():
if 'timeout' in data and data['timeout'].expired:
if data['state'] == self.Connecting:
self.finish_connection(conn_id, False, 'Connection attempt timed out')
elif data['state'] == self.Disconnecting:
self.finish_disconnection(conn_id, False, 'Disconnection attempt timed out')
elif data['state'] == self.InProgress:
if data['microstate'] == 'rpc':
self.finish_operation(conn_id, False, 'RPC timed out without response', None, None)
elif data['microstate'] == 'open_interface':
self.finish_operation(conn_id, False, 'Open interface request timed out')
|
[
"def",
"_check_timeouts",
"(",
"self",
")",
":",
"for",
"conn_id",
",",
"data",
"in",
"self",
".",
"_connections",
".",
"items",
"(",
")",
":",
"if",
"'timeout'",
"in",
"data",
"and",
"data",
"[",
"'timeout'",
"]",
".",
"expired",
":",
"if",
"data",
"[",
"'state'",
"]",
"==",
"self",
".",
"Connecting",
":",
"self",
".",
"finish_connection",
"(",
"conn_id",
",",
"False",
",",
"'Connection attempt timed out'",
")",
"elif",
"data",
"[",
"'state'",
"]",
"==",
"self",
".",
"Disconnecting",
":",
"self",
".",
"finish_disconnection",
"(",
"conn_id",
",",
"False",
",",
"'Disconnection attempt timed out'",
")",
"elif",
"data",
"[",
"'state'",
"]",
"==",
"self",
".",
"InProgress",
":",
"if",
"data",
"[",
"'microstate'",
"]",
"==",
"'rpc'",
":",
"self",
".",
"finish_operation",
"(",
"conn_id",
",",
"False",
",",
"'RPC timed out without response'",
",",
"None",
",",
"None",
")",
"elif",
"data",
"[",
"'microstate'",
"]",
"==",
"'open_interface'",
":",
"self",
".",
"finish_operation",
"(",
"conn_id",
",",
"False",
",",
"'Open interface request timed out'",
")"
] |
Check if any operations in progress need to be timed out
Adds the corresponding finish action that fails the request due to a
timeout.
|
[
"Check",
"if",
"any",
"operations",
"in",
"progress",
"need",
"to",
"be",
"timed",
"out"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L260-L277
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager.unexpected_disconnect
|
def unexpected_disconnect(self, conn_or_internal_id):
"""Notify that there was an unexpected disconnection of the device.
Any in progress operations are canceled cleanly and the device is transitioned
to a disconnected state.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
"""
data = {
'id': conn_or_internal_id
}
action = ConnectionAction('force_disconnect', data, sync=False)
self._actions.put(action)
|
python
|
def unexpected_disconnect(self, conn_or_internal_id):
"""Notify that there was an unexpected disconnection of the device.
Any in progress operations are canceled cleanly and the device is transitioned
to a disconnected state.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
"""
data = {
'id': conn_or_internal_id
}
action = ConnectionAction('force_disconnect', data, sync=False)
self._actions.put(action)
|
[
"def",
"unexpected_disconnect",
"(",
"self",
",",
"conn_or_internal_id",
")",
":",
"data",
"=",
"{",
"'id'",
":",
"conn_or_internal_id",
"}",
"action",
"=",
"ConnectionAction",
"(",
"'force_disconnect'",
",",
"data",
",",
"sync",
"=",
"False",
")",
"self",
".",
"_actions",
".",
"put",
"(",
"action",
")"
] |
Notify that there was an unexpected disconnection of the device.
Any in progress operations are canceled cleanly and the device is transitioned
to a disconnected state.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
|
[
"Notify",
"that",
"there",
"was",
"an",
"unexpected",
"disconnection",
"of",
"the",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L393-L409
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager.finish_operation
|
def finish_operation(self, conn_or_internal_id, success, *args):
"""Finish an operation on a connection.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
success (bool): Whether the operation was successful
failure_reason (string): Optional reason why the operation failed
result (dict): Optional dictionary containing the results of the operation
"""
data = {
'id': conn_or_internal_id,
'success': success,
'callback_args': args
}
action = ConnectionAction('finish_operation', data, sync=False)
self._actions.put(action)
|
python
|
def finish_operation(self, conn_or_internal_id, success, *args):
"""Finish an operation on a connection.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
success (bool): Whether the operation was successful
failure_reason (string): Optional reason why the operation failed
result (dict): Optional dictionary containing the results of the operation
"""
data = {
'id': conn_or_internal_id,
'success': success,
'callback_args': args
}
action = ConnectionAction('finish_operation', data, sync=False)
self._actions.put(action)
|
[
"def",
"finish_operation",
"(",
"self",
",",
"conn_or_internal_id",
",",
"success",
",",
"*",
"args",
")",
":",
"data",
"=",
"{",
"'id'",
":",
"conn_or_internal_id",
",",
"'success'",
":",
"success",
",",
"'callback_args'",
":",
"args",
"}",
"action",
"=",
"ConnectionAction",
"(",
"'finish_operation'",
",",
"data",
",",
"sync",
"=",
"False",
")",
"self",
".",
"_actions",
".",
"put",
"(",
"action",
")"
] |
Finish an operation on a connection.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
success (bool): Whether the operation was successful
failure_reason (string): Optional reason why the operation failed
result (dict): Optional dictionary containing the results of the operation
|
[
"Finish",
"an",
"operation",
"on",
"a",
"connection",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L593-L611
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py
|
ConnectionManager._finish_operation_action
|
def _finish_operation_action(self, action):
"""Finish an attempted operation.
Args:
action (ConnectionAction): the action object describing the result
of the operation that we are finishing
"""
success = action.data['success']
conn_key = action.data['id']
if self._get_connection_state(conn_key) != self.InProgress:
self._logger.error("Invalid finish_operation action on a connection whose state is not InProgress, conn_key=%s", str(conn_key))
return
# Cannot be None since we checked above to make sure it exists
data = self._get_connection(conn_key)
callback = data['callback']
conn_id = data['conn_id']
args = action.data['callback_args']
data['state'] = self.Idle
data['microstate'] = None
callback(conn_id, self.id, success, *args)
|
python
|
def _finish_operation_action(self, action):
"""Finish an attempted operation.
Args:
action (ConnectionAction): the action object describing the result
of the operation that we are finishing
"""
success = action.data['success']
conn_key = action.data['id']
if self._get_connection_state(conn_key) != self.InProgress:
self._logger.error("Invalid finish_operation action on a connection whose state is not InProgress, conn_key=%s", str(conn_key))
return
# Cannot be None since we checked above to make sure it exists
data = self._get_connection(conn_key)
callback = data['callback']
conn_id = data['conn_id']
args = action.data['callback_args']
data['state'] = self.Idle
data['microstate'] = None
callback(conn_id, self.id, success, *args)
|
[
"def",
"_finish_operation_action",
"(",
"self",
",",
"action",
")",
":",
"success",
"=",
"action",
".",
"data",
"[",
"'success'",
"]",
"conn_key",
"=",
"action",
".",
"data",
"[",
"'id'",
"]",
"if",
"self",
".",
"_get_connection_state",
"(",
"conn_key",
")",
"!=",
"self",
".",
"InProgress",
":",
"self",
".",
"_logger",
".",
"error",
"(",
"\"Invalid finish_operation action on a connection whose state is not InProgress, conn_key=%s\"",
",",
"str",
"(",
"conn_key",
")",
")",
"return",
"# Cannot be None since we checked above to make sure it exists",
"data",
"=",
"self",
".",
"_get_connection",
"(",
"conn_key",
")",
"callback",
"=",
"data",
"[",
"'callback'",
"]",
"conn_id",
"=",
"data",
"[",
"'conn_id'",
"]",
"args",
"=",
"action",
".",
"data",
"[",
"'callback_args'",
"]",
"data",
"[",
"'state'",
"]",
"=",
"self",
".",
"Idle",
"data",
"[",
"'microstate'",
"]",
"=",
"None",
"callback",
"(",
"conn_id",
",",
"self",
".",
"id",
",",
"success",
",",
"*",
"args",
")"
] |
Finish an attempted operation.
Args:
action (ConnectionAction): the action object describing the result
of the operation that we are finishing
|
[
"Finish",
"an",
"attempted",
"operation",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/connection_manager.py#L613-L637
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Scanner/LaTeX.py
|
LaTeX.canonical_text
|
def canonical_text(self, text):
"""Standardize an input TeX-file contents.
Currently:
* removes comments, unwrapping comment-wrapped lines.
"""
out = []
line_continues_a_comment = False
for line in text.splitlines():
line,comment = self.comment_re.findall(line)[0]
if line_continues_a_comment == True:
out[-1] = out[-1] + line.lstrip()
else:
out.append(line)
line_continues_a_comment = len(comment) > 0
return '\n'.join(out).rstrip()+'\n'
|
python
|
def canonical_text(self, text):
"""Standardize an input TeX-file contents.
Currently:
* removes comments, unwrapping comment-wrapped lines.
"""
out = []
line_continues_a_comment = False
for line in text.splitlines():
line,comment = self.comment_re.findall(line)[0]
if line_continues_a_comment == True:
out[-1] = out[-1] + line.lstrip()
else:
out.append(line)
line_continues_a_comment = len(comment) > 0
return '\n'.join(out).rstrip()+'\n'
|
[
"def",
"canonical_text",
"(",
"self",
",",
"text",
")",
":",
"out",
"=",
"[",
"]",
"line_continues_a_comment",
"=",
"False",
"for",
"line",
"in",
"text",
".",
"splitlines",
"(",
")",
":",
"line",
",",
"comment",
"=",
"self",
".",
"comment_re",
".",
"findall",
"(",
"line",
")",
"[",
"0",
"]",
"if",
"line_continues_a_comment",
"==",
"True",
":",
"out",
"[",
"-",
"1",
"]",
"=",
"out",
"[",
"-",
"1",
"]",
"+",
"line",
".",
"lstrip",
"(",
")",
"else",
":",
"out",
".",
"append",
"(",
"line",
")",
"line_continues_a_comment",
"=",
"len",
"(",
"comment",
")",
">",
"0",
"return",
"'\\n'",
".",
"join",
"(",
"out",
")",
".",
"rstrip",
"(",
")",
"+",
"'\\n'"
] |
Standardize an input TeX-file contents.
Currently:
* removes comments, unwrapping comment-wrapped lines.
|
[
"Standardize",
"an",
"input",
"TeX",
"-",
"file",
"contents",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Scanner/LaTeX.py#L326-L341
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Scanner/LaTeX.py
|
LaTeX.scan_recurse
|
def scan_recurse(self, node, path=()):
""" do a recursive scan of the top level target file
This lets us search for included files based on the
directory of the main file just as latex does"""
path_dict = dict(list(path))
queue = []
queue.extend( self.scan(node) )
seen = {}
# This is a hand-coded DSU (decorate-sort-undecorate, or
# Schwartzian transform) pattern. The sort key is the raw name
# of the file as specifed on the \include, \input, etc. line.
# TODO: what about the comment in the original Classic scanner:
# """which lets
# us keep the sort order constant regardless of whether the file
# is actually found in a Repository or locally."""
nodes = []
source_dir = node.get_dir()
#for include in includes:
while queue:
include = queue.pop()
inc_type, inc_subdir, inc_filename = include
try:
if seen[inc_filename] == 1:
continue
except KeyError:
seen[inc_filename] = 1
#
# Handle multiple filenames in include[1]
#
n, i = self.find_include(include, source_dir, path_dict)
if n is None:
# Do not bother with 'usepackage' warnings, as they most
# likely refer to system-level files
if inc_type != 'usepackage':
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s (included from: %s) -- file not found" % (i, node))
else:
sortkey = self.sort_key(n)
nodes.append((sortkey, n))
# recurse down
queue.extend( self.scan(n, inc_subdir) )
return [pair[1] for pair in sorted(nodes)]
|
python
|
def scan_recurse(self, node, path=()):
""" do a recursive scan of the top level target file
This lets us search for included files based on the
directory of the main file just as latex does"""
path_dict = dict(list(path))
queue = []
queue.extend( self.scan(node) )
seen = {}
# This is a hand-coded DSU (decorate-sort-undecorate, or
# Schwartzian transform) pattern. The sort key is the raw name
# of the file as specifed on the \include, \input, etc. line.
# TODO: what about the comment in the original Classic scanner:
# """which lets
# us keep the sort order constant regardless of whether the file
# is actually found in a Repository or locally."""
nodes = []
source_dir = node.get_dir()
#for include in includes:
while queue:
include = queue.pop()
inc_type, inc_subdir, inc_filename = include
try:
if seen[inc_filename] == 1:
continue
except KeyError:
seen[inc_filename] = 1
#
# Handle multiple filenames in include[1]
#
n, i = self.find_include(include, source_dir, path_dict)
if n is None:
# Do not bother with 'usepackage' warnings, as they most
# likely refer to system-level files
if inc_type != 'usepackage':
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s (included from: %s) -- file not found" % (i, node))
else:
sortkey = self.sort_key(n)
nodes.append((sortkey, n))
# recurse down
queue.extend( self.scan(n, inc_subdir) )
return [pair[1] for pair in sorted(nodes)]
|
[
"def",
"scan_recurse",
"(",
"self",
",",
"node",
",",
"path",
"=",
"(",
")",
")",
":",
"path_dict",
"=",
"dict",
"(",
"list",
"(",
"path",
")",
")",
"queue",
"=",
"[",
"]",
"queue",
".",
"extend",
"(",
"self",
".",
"scan",
"(",
"node",
")",
")",
"seen",
"=",
"{",
"}",
"# This is a hand-coded DSU (decorate-sort-undecorate, or",
"# Schwartzian transform) pattern. The sort key is the raw name",
"# of the file as specifed on the \\include, \\input, etc. line.",
"# TODO: what about the comment in the original Classic scanner:",
"# \"\"\"which lets",
"# us keep the sort order constant regardless of whether the file",
"# is actually found in a Repository or locally.\"\"\"",
"nodes",
"=",
"[",
"]",
"source_dir",
"=",
"node",
".",
"get_dir",
"(",
")",
"#for include in includes:",
"while",
"queue",
":",
"include",
"=",
"queue",
".",
"pop",
"(",
")",
"inc_type",
",",
"inc_subdir",
",",
"inc_filename",
"=",
"include",
"try",
":",
"if",
"seen",
"[",
"inc_filename",
"]",
"==",
"1",
":",
"continue",
"except",
"KeyError",
":",
"seen",
"[",
"inc_filename",
"]",
"=",
"1",
"#",
"# Handle multiple filenames in include[1]",
"#",
"n",
",",
"i",
"=",
"self",
".",
"find_include",
"(",
"include",
",",
"source_dir",
",",
"path_dict",
")",
"if",
"n",
"is",
"None",
":",
"# Do not bother with 'usepackage' warnings, as they most",
"# likely refer to system-level files",
"if",
"inc_type",
"!=",
"'usepackage'",
":",
"SCons",
".",
"Warnings",
".",
"warn",
"(",
"SCons",
".",
"Warnings",
".",
"DependencyWarning",
",",
"\"No dependency generated for file: %s (included from: %s) -- file not found\"",
"%",
"(",
"i",
",",
"node",
")",
")",
"else",
":",
"sortkey",
"=",
"self",
".",
"sort_key",
"(",
"n",
")",
"nodes",
".",
"append",
"(",
"(",
"sortkey",
",",
"n",
")",
")",
"# recurse down",
"queue",
".",
"extend",
"(",
"self",
".",
"scan",
"(",
"n",
",",
"inc_subdir",
")",
")",
"return",
"[",
"pair",
"[",
"1",
"]",
"for",
"pair",
"in",
"sorted",
"(",
"nodes",
")",
"]"
] |
do a recursive scan of the top level target file
This lets us search for included files based on the
directory of the main file just as latex does
|
[
"do",
"a",
"recursive",
"scan",
"of",
"the",
"top",
"level",
"target",
"file",
"This",
"lets",
"us",
"search",
"for",
"included",
"files",
"based",
"on",
"the",
"directory",
"of",
"the",
"main",
"file",
"just",
"as",
"latex",
"does"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Scanner/LaTeX.py#L383-L431
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Debug.py
|
caller_trace
|
def caller_trace(back=0):
"""
Trace caller stack and save info into global dicts, which
are printed automatically at the end of SCons execution.
"""
global caller_bases, caller_dicts
import traceback
tb = traceback.extract_stack(limit=3+back)
tb.reverse()
callee = tb[1][:3]
caller_bases[callee] = caller_bases.get(callee, 0) + 1
for caller in tb[2:]:
caller = callee + caller[:3]
try:
entry = caller_dicts[callee]
except KeyError:
caller_dicts[callee] = entry = {}
entry[caller] = entry.get(caller, 0) + 1
callee = caller
|
python
|
def caller_trace(back=0):
"""
Trace caller stack and save info into global dicts, which
are printed automatically at the end of SCons execution.
"""
global caller_bases, caller_dicts
import traceback
tb = traceback.extract_stack(limit=3+back)
tb.reverse()
callee = tb[1][:3]
caller_bases[callee] = caller_bases.get(callee, 0) + 1
for caller in tb[2:]:
caller = callee + caller[:3]
try:
entry = caller_dicts[callee]
except KeyError:
caller_dicts[callee] = entry = {}
entry[caller] = entry.get(caller, 0) + 1
callee = caller
|
[
"def",
"caller_trace",
"(",
"back",
"=",
"0",
")",
":",
"global",
"caller_bases",
",",
"caller_dicts",
"import",
"traceback",
"tb",
"=",
"traceback",
".",
"extract_stack",
"(",
"limit",
"=",
"3",
"+",
"back",
")",
"tb",
".",
"reverse",
"(",
")",
"callee",
"=",
"tb",
"[",
"1",
"]",
"[",
":",
"3",
"]",
"caller_bases",
"[",
"callee",
"]",
"=",
"caller_bases",
".",
"get",
"(",
"callee",
",",
"0",
")",
"+",
"1",
"for",
"caller",
"in",
"tb",
"[",
"2",
":",
"]",
":",
"caller",
"=",
"callee",
"+",
"caller",
"[",
":",
"3",
"]",
"try",
":",
"entry",
"=",
"caller_dicts",
"[",
"callee",
"]",
"except",
"KeyError",
":",
"caller_dicts",
"[",
"callee",
"]",
"=",
"entry",
"=",
"{",
"}",
"entry",
"[",
"caller",
"]",
"=",
"entry",
".",
"get",
"(",
"caller",
",",
"0",
")",
"+",
"1",
"callee",
"=",
"caller"
] |
Trace caller stack and save info into global dicts, which
are printed automatically at the end of SCons execution.
|
[
"Trace",
"caller",
"stack",
"and",
"save",
"info",
"into",
"global",
"dicts",
"which",
"are",
"printed",
"automatically",
"at",
"the",
"end",
"of",
"SCons",
"execution",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Debug.py#L144-L162
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
diff_dumps
|
def diff_dumps(ih1, ih2, tofile=None, name1="a", name2="b", n_context=3):
"""Diff 2 IntelHex objects and produce unified diff output for their
hex dumps.
@param ih1 first IntelHex object to compare
@param ih2 second IntelHex object to compare
@param tofile file-like object to write output
@param name1 name of the first hex file to show in the diff header
@param name2 name of the first hex file to show in the diff header
@param n_context number of context lines in the unidiff output
"""
def prepare_lines(ih):
sio = StringIO()
ih.dump(sio)
dump = sio.getvalue()
lines = dump.splitlines()
return lines
a = prepare_lines(ih1)
b = prepare_lines(ih2)
import difflib
result = list(difflib.unified_diff(a, b, fromfile=name1, tofile=name2, n=n_context, lineterm=''))
if tofile is None:
tofile = sys.stdout
output = '\n'.join(result)+'\n'
tofile.write(output)
|
python
|
def diff_dumps(ih1, ih2, tofile=None, name1="a", name2="b", n_context=3):
"""Diff 2 IntelHex objects and produce unified diff output for their
hex dumps.
@param ih1 first IntelHex object to compare
@param ih2 second IntelHex object to compare
@param tofile file-like object to write output
@param name1 name of the first hex file to show in the diff header
@param name2 name of the first hex file to show in the diff header
@param n_context number of context lines in the unidiff output
"""
def prepare_lines(ih):
sio = StringIO()
ih.dump(sio)
dump = sio.getvalue()
lines = dump.splitlines()
return lines
a = prepare_lines(ih1)
b = prepare_lines(ih2)
import difflib
result = list(difflib.unified_diff(a, b, fromfile=name1, tofile=name2, n=n_context, lineterm=''))
if tofile is None:
tofile = sys.stdout
output = '\n'.join(result)+'\n'
tofile.write(output)
|
[
"def",
"diff_dumps",
"(",
"ih1",
",",
"ih2",
",",
"tofile",
"=",
"None",
",",
"name1",
"=",
"\"a\"",
",",
"name2",
"=",
"\"b\"",
",",
"n_context",
"=",
"3",
")",
":",
"def",
"prepare_lines",
"(",
"ih",
")",
":",
"sio",
"=",
"StringIO",
"(",
")",
"ih",
".",
"dump",
"(",
"sio",
")",
"dump",
"=",
"sio",
".",
"getvalue",
"(",
")",
"lines",
"=",
"dump",
".",
"splitlines",
"(",
")",
"return",
"lines",
"a",
"=",
"prepare_lines",
"(",
"ih1",
")",
"b",
"=",
"prepare_lines",
"(",
"ih2",
")",
"import",
"difflib",
"result",
"=",
"list",
"(",
"difflib",
".",
"unified_diff",
"(",
"a",
",",
"b",
",",
"fromfile",
"=",
"name1",
",",
"tofile",
"=",
"name2",
",",
"n",
"=",
"n_context",
",",
"lineterm",
"=",
"''",
")",
")",
"if",
"tofile",
"is",
"None",
":",
"tofile",
"=",
"sys",
".",
"stdout",
"output",
"=",
"'\\n'",
".",
"join",
"(",
"result",
")",
"+",
"'\\n'",
"tofile",
".",
"write",
"(",
"output",
")"
] |
Diff 2 IntelHex objects and produce unified diff output for their
hex dumps.
@param ih1 first IntelHex object to compare
@param ih2 second IntelHex object to compare
@param tofile file-like object to write output
@param name1 name of the first hex file to show in the diff header
@param name2 name of the first hex file to show in the diff header
@param n_context number of context lines in the unidiff output
|
[
"Diff",
"2",
"IntelHex",
"objects",
"and",
"produce",
"unified",
"diff",
"output",
"for",
"their",
"hex",
"dumps",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L1100-L1124
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
IntelHex._decode_record
|
def _decode_record(self, s, line=0):
'''Decode one record of HEX file.
@param s line with HEX record.
@param line line number (for error messages).
@raise EndOfFile if EOF record encountered.
'''
s = s.rstrip('\r\n')
if not s:
return # empty line
if s[0] == ':':
try:
bin = array('B', unhexlify(asbytes(s[1:])))
except (TypeError, ValueError):
# this might be raised by unhexlify when odd hexascii digits
raise HexRecordError(line=line)
length = len(bin)
if length < 5:
raise HexRecordError(line=line)
else:
raise HexRecordError(line=line)
record_length = bin[0]
if length != (5 + record_length):
raise RecordLengthError(line=line)
addr = bin[1]*256 + bin[2]
record_type = bin[3]
if not (0 <= record_type <= 5):
raise RecordTypeError(line=line)
crc = sum(bin)
crc &= 0x0FF
if crc != 0:
raise RecordChecksumError(line=line)
if record_type == 0:
# data record
addr += self._offset
for i in range_g(4, 4+record_length):
if not self._buf.get(addr, None) is None:
raise AddressOverlapError(address=addr, line=line)
self._buf[addr] = bin[i]
addr += 1 # FIXME: addr should be wrapped
# BUT after 02 record (at 64K boundary)
# and after 04 record (at 4G boundary)
elif record_type == 1:
# end of file record
if record_length != 0:
raise EOFRecordError(line=line)
raise _EndOfFile
elif record_type == 2:
# Extended 8086 Segment Record
if record_length != 2 or addr != 0:
raise ExtendedSegmentAddressRecordError(line=line)
self._offset = (bin[4]*256 + bin[5]) * 16
elif record_type == 4:
# Extended Linear Address Record
if record_length != 2 or addr != 0:
raise ExtendedLinearAddressRecordError(line=line)
self._offset = (bin[4]*256 + bin[5]) * 65536
elif record_type == 3:
# Start Segment Address Record
if record_length != 4 or addr != 0:
raise StartSegmentAddressRecordError(line=line)
if self.start_addr:
raise DuplicateStartAddressRecordError(line=line)
self.start_addr = {'CS': bin[4]*256 + bin[5],
'IP': bin[6]*256 + bin[7],
}
elif record_type == 5:
# Start Linear Address Record
if record_length != 4 or addr != 0:
raise StartLinearAddressRecordError(line=line)
if self.start_addr:
raise DuplicateStartAddressRecordError(line=line)
self.start_addr = {'EIP': (bin[4]*16777216 +
bin[5]*65536 +
bin[6]*256 +
bin[7]),
}
|
python
|
def _decode_record(self, s, line=0):
'''Decode one record of HEX file.
@param s line with HEX record.
@param line line number (for error messages).
@raise EndOfFile if EOF record encountered.
'''
s = s.rstrip('\r\n')
if not s:
return # empty line
if s[0] == ':':
try:
bin = array('B', unhexlify(asbytes(s[1:])))
except (TypeError, ValueError):
# this might be raised by unhexlify when odd hexascii digits
raise HexRecordError(line=line)
length = len(bin)
if length < 5:
raise HexRecordError(line=line)
else:
raise HexRecordError(line=line)
record_length = bin[0]
if length != (5 + record_length):
raise RecordLengthError(line=line)
addr = bin[1]*256 + bin[2]
record_type = bin[3]
if not (0 <= record_type <= 5):
raise RecordTypeError(line=line)
crc = sum(bin)
crc &= 0x0FF
if crc != 0:
raise RecordChecksumError(line=line)
if record_type == 0:
# data record
addr += self._offset
for i in range_g(4, 4+record_length):
if not self._buf.get(addr, None) is None:
raise AddressOverlapError(address=addr, line=line)
self._buf[addr] = bin[i]
addr += 1 # FIXME: addr should be wrapped
# BUT after 02 record (at 64K boundary)
# and after 04 record (at 4G boundary)
elif record_type == 1:
# end of file record
if record_length != 0:
raise EOFRecordError(line=line)
raise _EndOfFile
elif record_type == 2:
# Extended 8086 Segment Record
if record_length != 2 or addr != 0:
raise ExtendedSegmentAddressRecordError(line=line)
self._offset = (bin[4]*256 + bin[5]) * 16
elif record_type == 4:
# Extended Linear Address Record
if record_length != 2 or addr != 0:
raise ExtendedLinearAddressRecordError(line=line)
self._offset = (bin[4]*256 + bin[5]) * 65536
elif record_type == 3:
# Start Segment Address Record
if record_length != 4 or addr != 0:
raise StartSegmentAddressRecordError(line=line)
if self.start_addr:
raise DuplicateStartAddressRecordError(line=line)
self.start_addr = {'CS': bin[4]*256 + bin[5],
'IP': bin[6]*256 + bin[7],
}
elif record_type == 5:
# Start Linear Address Record
if record_length != 4 or addr != 0:
raise StartLinearAddressRecordError(line=line)
if self.start_addr:
raise DuplicateStartAddressRecordError(line=line)
self.start_addr = {'EIP': (bin[4]*16777216 +
bin[5]*65536 +
bin[6]*256 +
bin[7]),
}
|
[
"def",
"_decode_record",
"(",
"self",
",",
"s",
",",
"line",
"=",
"0",
")",
":",
"s",
"=",
"s",
".",
"rstrip",
"(",
"'\\r\\n'",
")",
"if",
"not",
"s",
":",
"return",
"# empty line",
"if",
"s",
"[",
"0",
"]",
"==",
"':'",
":",
"try",
":",
"bin",
"=",
"array",
"(",
"'B'",
",",
"unhexlify",
"(",
"asbytes",
"(",
"s",
"[",
"1",
":",
"]",
")",
")",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"# this might be raised by unhexlify when odd hexascii digits",
"raise",
"HexRecordError",
"(",
"line",
"=",
"line",
")",
"length",
"=",
"len",
"(",
"bin",
")",
"if",
"length",
"<",
"5",
":",
"raise",
"HexRecordError",
"(",
"line",
"=",
"line",
")",
"else",
":",
"raise",
"HexRecordError",
"(",
"line",
"=",
"line",
")",
"record_length",
"=",
"bin",
"[",
"0",
"]",
"if",
"length",
"!=",
"(",
"5",
"+",
"record_length",
")",
":",
"raise",
"RecordLengthError",
"(",
"line",
"=",
"line",
")",
"addr",
"=",
"bin",
"[",
"1",
"]",
"*",
"256",
"+",
"bin",
"[",
"2",
"]",
"record_type",
"=",
"bin",
"[",
"3",
"]",
"if",
"not",
"(",
"0",
"<=",
"record_type",
"<=",
"5",
")",
":",
"raise",
"RecordTypeError",
"(",
"line",
"=",
"line",
")",
"crc",
"=",
"sum",
"(",
"bin",
")",
"crc",
"&=",
"0x0FF",
"if",
"crc",
"!=",
"0",
":",
"raise",
"RecordChecksumError",
"(",
"line",
"=",
"line",
")",
"if",
"record_type",
"==",
"0",
":",
"# data record",
"addr",
"+=",
"self",
".",
"_offset",
"for",
"i",
"in",
"range_g",
"(",
"4",
",",
"4",
"+",
"record_length",
")",
":",
"if",
"not",
"self",
".",
"_buf",
".",
"get",
"(",
"addr",
",",
"None",
")",
"is",
"None",
":",
"raise",
"AddressOverlapError",
"(",
"address",
"=",
"addr",
",",
"line",
"=",
"line",
")",
"self",
".",
"_buf",
"[",
"addr",
"]",
"=",
"bin",
"[",
"i",
"]",
"addr",
"+=",
"1",
"# FIXME: addr should be wrapped",
"# BUT after 02 record (at 64K boundary)",
"# and after 04 record (at 4G boundary)",
"elif",
"record_type",
"==",
"1",
":",
"# end of file record",
"if",
"record_length",
"!=",
"0",
":",
"raise",
"EOFRecordError",
"(",
"line",
"=",
"line",
")",
"raise",
"_EndOfFile",
"elif",
"record_type",
"==",
"2",
":",
"# Extended 8086 Segment Record",
"if",
"record_length",
"!=",
"2",
"or",
"addr",
"!=",
"0",
":",
"raise",
"ExtendedSegmentAddressRecordError",
"(",
"line",
"=",
"line",
")",
"self",
".",
"_offset",
"=",
"(",
"bin",
"[",
"4",
"]",
"*",
"256",
"+",
"bin",
"[",
"5",
"]",
")",
"*",
"16",
"elif",
"record_type",
"==",
"4",
":",
"# Extended Linear Address Record",
"if",
"record_length",
"!=",
"2",
"or",
"addr",
"!=",
"0",
":",
"raise",
"ExtendedLinearAddressRecordError",
"(",
"line",
"=",
"line",
")",
"self",
".",
"_offset",
"=",
"(",
"bin",
"[",
"4",
"]",
"*",
"256",
"+",
"bin",
"[",
"5",
"]",
")",
"*",
"65536",
"elif",
"record_type",
"==",
"3",
":",
"# Start Segment Address Record",
"if",
"record_length",
"!=",
"4",
"or",
"addr",
"!=",
"0",
":",
"raise",
"StartSegmentAddressRecordError",
"(",
"line",
"=",
"line",
")",
"if",
"self",
".",
"start_addr",
":",
"raise",
"DuplicateStartAddressRecordError",
"(",
"line",
"=",
"line",
")",
"self",
".",
"start_addr",
"=",
"{",
"'CS'",
":",
"bin",
"[",
"4",
"]",
"*",
"256",
"+",
"bin",
"[",
"5",
"]",
",",
"'IP'",
":",
"bin",
"[",
"6",
"]",
"*",
"256",
"+",
"bin",
"[",
"7",
"]",
",",
"}",
"elif",
"record_type",
"==",
"5",
":",
"# Start Linear Address Record",
"if",
"record_length",
"!=",
"4",
"or",
"addr",
"!=",
"0",
":",
"raise",
"StartLinearAddressRecordError",
"(",
"line",
"=",
"line",
")",
"if",
"self",
".",
"start_addr",
":",
"raise",
"DuplicateStartAddressRecordError",
"(",
"line",
"=",
"line",
")",
"self",
".",
"start_addr",
"=",
"{",
"'EIP'",
":",
"(",
"bin",
"[",
"4",
"]",
"*",
"16777216",
"+",
"bin",
"[",
"5",
"]",
"*",
"65536",
"+",
"bin",
"[",
"6",
"]",
"*",
"256",
"+",
"bin",
"[",
"7",
"]",
")",
",",
"}"
] |
Decode one record of HEX file.
@param s line with HEX record.
@param line line number (for error messages).
@raise EndOfFile if EOF record encountered.
|
[
"Decode",
"one",
"record",
"of",
"HEX",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L109-L197
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
IntelHex.loadhex
|
def loadhex(self, fobj):
"""Load hex file into internal buffer. This is not necessary
if object was initialized with source set. This will overwrite
addresses if object was already initialized.
@param fobj file name or file-like object
"""
if getattr(fobj, "read", None) is None:
fobj = open(fobj, "r")
fclose = fobj.close
else:
fclose = None
self._offset = 0
line = 0
try:
decode = self._decode_record
try:
for s in fobj:
line += 1
decode(s, line)
except _EndOfFile:
pass
finally:
if fclose:
fclose()
|
python
|
def loadhex(self, fobj):
"""Load hex file into internal buffer. This is not necessary
if object was initialized with source set. This will overwrite
addresses if object was already initialized.
@param fobj file name or file-like object
"""
if getattr(fobj, "read", None) is None:
fobj = open(fobj, "r")
fclose = fobj.close
else:
fclose = None
self._offset = 0
line = 0
try:
decode = self._decode_record
try:
for s in fobj:
line += 1
decode(s, line)
except _EndOfFile:
pass
finally:
if fclose:
fclose()
|
[
"def",
"loadhex",
"(",
"self",
",",
"fobj",
")",
":",
"if",
"getattr",
"(",
"fobj",
",",
"\"read\"",
",",
"None",
")",
"is",
"None",
":",
"fobj",
"=",
"open",
"(",
"fobj",
",",
"\"r\"",
")",
"fclose",
"=",
"fobj",
".",
"close",
"else",
":",
"fclose",
"=",
"None",
"self",
".",
"_offset",
"=",
"0",
"line",
"=",
"0",
"try",
":",
"decode",
"=",
"self",
".",
"_decode_record",
"try",
":",
"for",
"s",
"in",
"fobj",
":",
"line",
"+=",
"1",
"decode",
"(",
"s",
",",
"line",
")",
"except",
"_EndOfFile",
":",
"pass",
"finally",
":",
"if",
"fclose",
":",
"fclose",
"(",
")"
] |
Load hex file into internal buffer. This is not necessary
if object was initialized with source set. This will overwrite
addresses if object was already initialized.
@param fobj file name or file-like object
|
[
"Load",
"hex",
"file",
"into",
"internal",
"buffer",
".",
"This",
"is",
"not",
"necessary",
"if",
"object",
"was",
"initialized",
"with",
"source",
"set",
".",
"This",
"will",
"overwrite",
"addresses",
"if",
"object",
"was",
"already",
"initialized",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L199-L225
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
IntelHex.loadbin
|
def loadbin(self, fobj, offset=0):
"""Load bin file into internal buffer. Not needed if source set in
constructor. This will overwrite addresses without warning
if object was already initialized.
@param fobj file name or file-like object
@param offset starting address offset
"""
fread = getattr(fobj, "read", None)
if fread is None:
f = open(fobj, "rb")
fread = f.read
fclose = f.close
else:
fclose = None
try:
self.frombytes(array('B', asbytes(fread())), offset=offset)
finally:
if fclose:
fclose()
|
python
|
def loadbin(self, fobj, offset=0):
"""Load bin file into internal buffer. Not needed if source set in
constructor. This will overwrite addresses without warning
if object was already initialized.
@param fobj file name or file-like object
@param offset starting address offset
"""
fread = getattr(fobj, "read", None)
if fread is None:
f = open(fobj, "rb")
fread = f.read
fclose = f.close
else:
fclose = None
try:
self.frombytes(array('B', asbytes(fread())), offset=offset)
finally:
if fclose:
fclose()
|
[
"def",
"loadbin",
"(",
"self",
",",
"fobj",
",",
"offset",
"=",
"0",
")",
":",
"fread",
"=",
"getattr",
"(",
"fobj",
",",
"\"read\"",
",",
"None",
")",
"if",
"fread",
"is",
"None",
":",
"f",
"=",
"open",
"(",
"fobj",
",",
"\"rb\"",
")",
"fread",
"=",
"f",
".",
"read",
"fclose",
"=",
"f",
".",
"close",
"else",
":",
"fclose",
"=",
"None",
"try",
":",
"self",
".",
"frombytes",
"(",
"array",
"(",
"'B'",
",",
"asbytes",
"(",
"fread",
"(",
")",
")",
")",
",",
"offset",
"=",
"offset",
")",
"finally",
":",
"if",
"fclose",
":",
"fclose",
"(",
")"
] |
Load bin file into internal buffer. Not needed if source set in
constructor. This will overwrite addresses without warning
if object was already initialized.
@param fobj file name or file-like object
@param offset starting address offset
|
[
"Load",
"bin",
"file",
"into",
"internal",
"buffer",
".",
"Not",
"needed",
"if",
"source",
"set",
"in",
"constructor",
".",
"This",
"will",
"overwrite",
"addresses",
"without",
"warning",
"if",
"object",
"was",
"already",
"initialized",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L227-L247
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
IntelHex.loadfile
|
def loadfile(self, fobj, format):
"""Load data file into internal buffer. Preferred wrapper over
loadbin or loadhex.
@param fobj file name or file-like object
@param format file format ("hex" or "bin")
"""
if format == "hex":
self.loadhex(fobj)
elif format == "bin":
self.loadbin(fobj)
else:
raise ValueError('format should be either "hex" or "bin";'
' got %r instead' % format)
|
python
|
def loadfile(self, fobj, format):
"""Load data file into internal buffer. Preferred wrapper over
loadbin or loadhex.
@param fobj file name or file-like object
@param format file format ("hex" or "bin")
"""
if format == "hex":
self.loadhex(fobj)
elif format == "bin":
self.loadbin(fobj)
else:
raise ValueError('format should be either "hex" or "bin";'
' got %r instead' % format)
|
[
"def",
"loadfile",
"(",
"self",
",",
"fobj",
",",
"format",
")",
":",
"if",
"format",
"==",
"\"hex\"",
":",
"self",
".",
"loadhex",
"(",
"fobj",
")",
"elif",
"format",
"==",
"\"bin\"",
":",
"self",
".",
"loadbin",
"(",
"fobj",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'format should be either \"hex\" or \"bin\";'",
"' got %r instead'",
"%",
"format",
")"
] |
Load data file into internal buffer. Preferred wrapper over
loadbin or loadhex.
@param fobj file name or file-like object
@param format file format ("hex" or "bin")
|
[
"Load",
"data",
"file",
"into",
"internal",
"buffer",
".",
"Preferred",
"wrapper",
"over",
"loadbin",
"or",
"loadhex",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L249-L262
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
IntelHex._get_start_end
|
def _get_start_end(self, start=None, end=None, size=None):
"""Return default values for start and end if they are None.
If this IntelHex object is empty then it's error to
invoke this method with both start and end as None.
"""
if (start,end) == (None,None) and self._buf == {}:
raise EmptyIntelHexError
if size is not None:
if None not in (start, end):
raise ValueError("tobinarray: you can't use start,end and size"
" arguments in the same time")
if (start, end) == (None, None):
start = self.minaddr()
if start is not None:
end = start + size - 1
else:
start = end - size + 1
if start < 0:
raise ValueError("tobinarray: invalid size (%d) "
"for given end address (%d)" % (size,end))
else:
if start is None:
start = self.minaddr()
if end is None:
end = self.maxaddr()
if start > end:
start, end = end, start
return start, end
|
python
|
def _get_start_end(self, start=None, end=None, size=None):
"""Return default values for start and end if they are None.
If this IntelHex object is empty then it's error to
invoke this method with both start and end as None.
"""
if (start,end) == (None,None) and self._buf == {}:
raise EmptyIntelHexError
if size is not None:
if None not in (start, end):
raise ValueError("tobinarray: you can't use start,end and size"
" arguments in the same time")
if (start, end) == (None, None):
start = self.minaddr()
if start is not None:
end = start + size - 1
else:
start = end - size + 1
if start < 0:
raise ValueError("tobinarray: invalid size (%d) "
"for given end address (%d)" % (size,end))
else:
if start is None:
start = self.minaddr()
if end is None:
end = self.maxaddr()
if start > end:
start, end = end, start
return start, end
|
[
"def",
"_get_start_end",
"(",
"self",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"size",
"=",
"None",
")",
":",
"if",
"(",
"start",
",",
"end",
")",
"==",
"(",
"None",
",",
"None",
")",
"and",
"self",
".",
"_buf",
"==",
"{",
"}",
":",
"raise",
"EmptyIntelHexError",
"if",
"size",
"is",
"not",
"None",
":",
"if",
"None",
"not",
"in",
"(",
"start",
",",
"end",
")",
":",
"raise",
"ValueError",
"(",
"\"tobinarray: you can't use start,end and size\"",
"\" arguments in the same time\"",
")",
"if",
"(",
"start",
",",
"end",
")",
"==",
"(",
"None",
",",
"None",
")",
":",
"start",
"=",
"self",
".",
"minaddr",
"(",
")",
"if",
"start",
"is",
"not",
"None",
":",
"end",
"=",
"start",
"+",
"size",
"-",
"1",
"else",
":",
"start",
"=",
"end",
"-",
"size",
"+",
"1",
"if",
"start",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"tobinarray: invalid size (%d) \"",
"\"for given end address (%d)\"",
"%",
"(",
"size",
",",
"end",
")",
")",
"else",
":",
"if",
"start",
"is",
"None",
":",
"start",
"=",
"self",
".",
"minaddr",
"(",
")",
"if",
"end",
"is",
"None",
":",
"end",
"=",
"self",
".",
"maxaddr",
"(",
")",
"if",
"start",
">",
"end",
":",
"start",
",",
"end",
"=",
"end",
",",
"start",
"return",
"start",
",",
"end"
] |
Return default values for start and end if they are None.
If this IntelHex object is empty then it's error to
invoke this method with both start and end as None.
|
[
"Return",
"default",
"values",
"for",
"start",
"and",
"end",
"if",
"they",
"are",
"None",
".",
"If",
"this",
"IntelHex",
"object",
"is",
"empty",
"then",
"it",
"s",
"error",
"to",
"invoke",
"this",
"method",
"with",
"both",
"start",
"and",
"end",
"as",
"None",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L297-L324
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/intelhex/__init__.py
|
IntelHex.tobinarray
|
def tobinarray(self, start=None, end=None, pad=_DEPRECATED, size=None):
''' Convert this object to binary form as array. If start and end
unspecified, they will be inferred from the data.
@param start start address of output bytes.
@param end end address of output bytes (inclusive).
@param pad [DEPRECATED PARAMETER, please use self.padding instead]
fill empty spaces with this value
(if pad is None then this method uses self.padding).
@param size size of the block, used with start or end parameter.
@return array of unsigned char data.
'''
if not isinstance(pad, _DeprecatedParam):
print ("IntelHex.tobinarray: 'pad' parameter is deprecated.")
if pad is not None:
print ("Please, use IntelHex.padding attribute instead.")
else:
print ("Please, don't pass it explicitly.")
print ("Use syntax like this: ih.tobinarray(start=xxx, end=yyy, size=zzz)")
else:
pad = None
return self._tobinarray_really(start, end, pad, size)
|
python
|
def tobinarray(self, start=None, end=None, pad=_DEPRECATED, size=None):
''' Convert this object to binary form as array. If start and end
unspecified, they will be inferred from the data.
@param start start address of output bytes.
@param end end address of output bytes (inclusive).
@param pad [DEPRECATED PARAMETER, please use self.padding instead]
fill empty spaces with this value
(if pad is None then this method uses self.padding).
@param size size of the block, used with start or end parameter.
@return array of unsigned char data.
'''
if not isinstance(pad, _DeprecatedParam):
print ("IntelHex.tobinarray: 'pad' parameter is deprecated.")
if pad is not None:
print ("Please, use IntelHex.padding attribute instead.")
else:
print ("Please, don't pass it explicitly.")
print ("Use syntax like this: ih.tobinarray(start=xxx, end=yyy, size=zzz)")
else:
pad = None
return self._tobinarray_really(start, end, pad, size)
|
[
"def",
"tobinarray",
"(",
"self",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"pad",
"=",
"_DEPRECATED",
",",
"size",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"pad",
",",
"_DeprecatedParam",
")",
":",
"print",
"(",
"\"IntelHex.tobinarray: 'pad' parameter is deprecated.\"",
")",
"if",
"pad",
"is",
"not",
"None",
":",
"print",
"(",
"\"Please, use IntelHex.padding attribute instead.\"",
")",
"else",
":",
"print",
"(",
"\"Please, don't pass it explicitly.\"",
")",
"print",
"(",
"\"Use syntax like this: ih.tobinarray(start=xxx, end=yyy, size=zzz)\"",
")",
"else",
":",
"pad",
"=",
"None",
"return",
"self",
".",
"_tobinarray_really",
"(",
"start",
",",
"end",
",",
"pad",
",",
"size",
")"
] |
Convert this object to binary form as array. If start and end
unspecified, they will be inferred from the data.
@param start start address of output bytes.
@param end end address of output bytes (inclusive).
@param pad [DEPRECATED PARAMETER, please use self.padding instead]
fill empty spaces with this value
(if pad is None then this method uses self.padding).
@param size size of the block, used with start or end parameter.
@return array of unsigned char data.
|
[
"Convert",
"this",
"object",
"to",
"binary",
"form",
"as",
"array",
".",
"If",
"start",
"and",
"end",
"unspecified",
"they",
"will",
"be",
"inferred",
"from",
"the",
"data",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/intelhex/__init__.py#L326-L346
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.