repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/states.py
|
ServiceState.get_message
|
def get_message(self, message_id):
"""Get a message by its persistent id.
Args:
message_id (int): The id of the message that we're looking for
"""
for message in self.messages:
if message.id == message_id:
return message
raise ArgumentError("Message ID not found", message_id=message_id)
|
python
|
def get_message(self, message_id):
"""Get a message by its persistent id.
Args:
message_id (int): The id of the message that we're looking for
"""
for message in self.messages:
if message.id == message_id:
return message
raise ArgumentError("Message ID not found", message_id=message_id)
|
[
"def",
"get_message",
"(",
"self",
",",
"message_id",
")",
":",
"for",
"message",
"in",
"self",
".",
"messages",
":",
"if",
"message",
".",
"id",
"==",
"message_id",
":",
"return",
"message",
"raise",
"ArgumentError",
"(",
"\"Message ID not found\"",
",",
"message_id",
"=",
"message_id",
")"
] |
Get a message by its persistent id.
Args:
message_id (int): The id of the message that we're looking for
|
[
"Get",
"a",
"message",
"by",
"its",
"persistent",
"id",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/states.py#L164-L175
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/states.py
|
ServiceState.post_message
|
def post_message(self, level, message, count=1, timestamp=None, now_reference=None):
"""Post a new message for service.
Args:
level (int): The level of the message (info, warning, error)
message (string): The message contents
count (int): The number of times the message has been repeated
timestamp (float): An optional monotonic value in seconds for when the message was created
now_reference (float): If timestamp is not relative to monotonic() as called from this
module then this should be now() as seen by whoever created the timestamp.
Returns:
ServiceMessage: The posted message
"""
if len(self.messages) > 0 and self.messages[-1].message == message:
self.messages[-1].count += 1
else:
msg_object = ServiceMessage(level, message, self._last_message_id, timestamp, now_reference)
msg_object.count = count
self.messages.append(msg_object)
self._last_message_id += 1
return self.messages[-1]
|
python
|
def post_message(self, level, message, count=1, timestamp=None, now_reference=None):
"""Post a new message for service.
Args:
level (int): The level of the message (info, warning, error)
message (string): The message contents
count (int): The number of times the message has been repeated
timestamp (float): An optional monotonic value in seconds for when the message was created
now_reference (float): If timestamp is not relative to monotonic() as called from this
module then this should be now() as seen by whoever created the timestamp.
Returns:
ServiceMessage: The posted message
"""
if len(self.messages) > 0 and self.messages[-1].message == message:
self.messages[-1].count += 1
else:
msg_object = ServiceMessage(level, message, self._last_message_id, timestamp, now_reference)
msg_object.count = count
self.messages.append(msg_object)
self._last_message_id += 1
return self.messages[-1]
|
[
"def",
"post_message",
"(",
"self",
",",
"level",
",",
"message",
",",
"count",
"=",
"1",
",",
"timestamp",
"=",
"None",
",",
"now_reference",
"=",
"None",
")",
":",
"if",
"len",
"(",
"self",
".",
"messages",
")",
">",
"0",
"and",
"self",
".",
"messages",
"[",
"-",
"1",
"]",
".",
"message",
"==",
"message",
":",
"self",
".",
"messages",
"[",
"-",
"1",
"]",
".",
"count",
"+=",
"1",
"else",
":",
"msg_object",
"=",
"ServiceMessage",
"(",
"level",
",",
"message",
",",
"self",
".",
"_last_message_id",
",",
"timestamp",
",",
"now_reference",
")",
"msg_object",
".",
"count",
"=",
"count",
"self",
".",
"messages",
".",
"append",
"(",
"msg_object",
")",
"self",
".",
"_last_message_id",
"+=",
"1",
"return",
"self",
".",
"messages",
"[",
"-",
"1",
"]"
] |
Post a new message for service.
Args:
level (int): The level of the message (info, warning, error)
message (string): The message contents
count (int): The number of times the message has been repeated
timestamp (float): An optional monotonic value in seconds for when the message was created
now_reference (float): If timestamp is not relative to monotonic() as called from this
module then this should be now() as seen by whoever created the timestamp.
Returns:
ServiceMessage: The posted message
|
[
"Post",
"a",
"new",
"message",
"for",
"service",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/states.py#L177-L200
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/states.py
|
ServiceState.set_headline
|
def set_headline(self, level, message, timestamp=None, now_reference=None):
"""Set the persistent headline message for this service.
Args:
level (int): The level of the message (info, warning, error)
message (string): The message contents
timestamp (float): An optional monotonic value in seconds for when the message was created
now_reference (float): If timestamp is not relative to monotonic() as called from this
module then this should be now() as seen by whoever created the timestamp.
"""
if self.headline is not None and self.headline.message == message:
self.headline.created = monotonic()
self.headline.count += 1
return
msg_object = ServiceMessage(level, message, self._last_message_id, timestamp, now_reference)
self.headline = msg_object
self._last_message_id += 1
|
python
|
def set_headline(self, level, message, timestamp=None, now_reference=None):
"""Set the persistent headline message for this service.
Args:
level (int): The level of the message (info, warning, error)
message (string): The message contents
timestamp (float): An optional monotonic value in seconds for when the message was created
now_reference (float): If timestamp is not relative to monotonic() as called from this
module then this should be now() as seen by whoever created the timestamp.
"""
if self.headline is not None and self.headline.message == message:
self.headline.created = monotonic()
self.headline.count += 1
return
msg_object = ServiceMessage(level, message, self._last_message_id, timestamp, now_reference)
self.headline = msg_object
self._last_message_id += 1
|
[
"def",
"set_headline",
"(",
"self",
",",
"level",
",",
"message",
",",
"timestamp",
"=",
"None",
",",
"now_reference",
"=",
"None",
")",
":",
"if",
"self",
".",
"headline",
"is",
"not",
"None",
"and",
"self",
".",
"headline",
".",
"message",
"==",
"message",
":",
"self",
".",
"headline",
".",
"created",
"=",
"monotonic",
"(",
")",
"self",
".",
"headline",
".",
"count",
"+=",
"1",
"return",
"msg_object",
"=",
"ServiceMessage",
"(",
"level",
",",
"message",
",",
"self",
".",
"_last_message_id",
",",
"timestamp",
",",
"now_reference",
")",
"self",
".",
"headline",
"=",
"msg_object",
"self",
".",
"_last_message_id",
"+=",
"1"
] |
Set the persistent headline message for this service.
Args:
level (int): The level of the message (info, warning, error)
message (string): The message contents
timestamp (float): An optional monotonic value in seconds for when the message was created
now_reference (float): If timestamp is not relative to monotonic() as called from this
module then this should be now() as seen by whoever created the timestamp.
|
[
"Set",
"the",
"persistent",
"headline",
"message",
"for",
"this",
"service",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/states.py#L202-L220
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/site_scons/docbuild.py
|
generate_doxygen_file
|
def generate_doxygen_file(output_path, iotile):
"""Fill in our default doxygen template file with info from an IOTile
This populates things like name, version, etc.
Arguments:
output_path (str): a string path for where the filled template should go
iotile (IOTile): An IOTile object that can be queried for information
"""
mapping = {}
mapping['short_name'] = iotile.short_name
mapping['full_name'] = iotile.full_name
mapping['authors'] = iotile.authors
mapping['version'] = iotile.version
render_template('doxygen.txt.tpl', mapping, out_path=output_path)
|
python
|
def generate_doxygen_file(output_path, iotile):
"""Fill in our default doxygen template file with info from an IOTile
This populates things like name, version, etc.
Arguments:
output_path (str): a string path for where the filled template should go
iotile (IOTile): An IOTile object that can be queried for information
"""
mapping = {}
mapping['short_name'] = iotile.short_name
mapping['full_name'] = iotile.full_name
mapping['authors'] = iotile.authors
mapping['version'] = iotile.version
render_template('doxygen.txt.tpl', mapping, out_path=output_path)
|
[
"def",
"generate_doxygen_file",
"(",
"output_path",
",",
"iotile",
")",
":",
"mapping",
"=",
"{",
"}",
"mapping",
"[",
"'short_name'",
"]",
"=",
"iotile",
".",
"short_name",
"mapping",
"[",
"'full_name'",
"]",
"=",
"iotile",
".",
"full_name",
"mapping",
"[",
"'authors'",
"]",
"=",
"iotile",
".",
"authors",
"mapping",
"[",
"'version'",
"]",
"=",
"iotile",
".",
"version",
"render_template",
"(",
"'doxygen.txt.tpl'",
",",
"mapping",
",",
"out_path",
"=",
"output_path",
")"
] |
Fill in our default doxygen template file with info from an IOTile
This populates things like name, version, etc.
Arguments:
output_path (str): a string path for where the filled template should go
iotile (IOTile): An IOTile object that can be queried for information
|
[
"Fill",
"in",
"our",
"default",
"doxygen",
"template",
"file",
"with",
"info",
"from",
"an",
"IOTile"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/docbuild.py#L4-L21
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/dev/pull_release.py
|
pull
|
def pull(name, version, force=False):
"""Pull a released IOTile component into the current working directory
The component is found using whatever DependencyResolvers are installed and registered
as part of the default DependencyResolverChain. This is the same mechanism used in
iotile depends update, so any component that can be updated using iotile depends update
can be found and pulled using this method.
"""
chain = DependencyResolverChain()
ver = SemanticVersionRange.FromString(version)
chain.pull_release(name, ver, force=force)
|
python
|
def pull(name, version, force=False):
"""Pull a released IOTile component into the current working directory
The component is found using whatever DependencyResolvers are installed and registered
as part of the default DependencyResolverChain. This is the same mechanism used in
iotile depends update, so any component that can be updated using iotile depends update
can be found and pulled using this method.
"""
chain = DependencyResolverChain()
ver = SemanticVersionRange.FromString(version)
chain.pull_release(name, ver, force=force)
|
[
"def",
"pull",
"(",
"name",
",",
"version",
",",
"force",
"=",
"False",
")",
":",
"chain",
"=",
"DependencyResolverChain",
"(",
")",
"ver",
"=",
"SemanticVersionRange",
".",
"FromString",
"(",
"version",
")",
"chain",
".",
"pull_release",
"(",
"name",
",",
"ver",
",",
"force",
"=",
"force",
")"
] |
Pull a released IOTile component into the current working directory
The component is found using whatever DependencyResolvers are installed and registered
as part of the default DependencyResolverChain. This is the same mechanism used in
iotile depends update, so any component that can be updated using iotile depends update
can be found and pulled using this method.
|
[
"Pull",
"a",
"released",
"IOTile",
"component",
"into",
"the",
"current",
"working",
"directory"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/pull_release.py#L9-L21
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapter/sync_wrapper.py
|
SynchronousLegacyWrapper.add_callback
|
def add_callback(self, name, func):
"""Add a callback when device events happen.
Args:
name (str): currently support 'on_scan' and 'on_disconnect'
func (callable): the function that should be called
"""
if name == 'on_scan':
events = ['device_seen']
def callback(_conn_string, _conn_id, _name, event):
func(self.id, event, event.get('validity_period', 60))
elif name == 'on_report':
events = ['report', 'broadcast']
def callback(_conn_string, conn_id, _name, event):
func(conn_id, event)
elif name == 'on_trace':
events = ['trace']
def callback(_conn_string, conn_id, _name, event):
func(conn_id, event)
elif name == 'on_disconnect':
events = ['disconnection']
def callback(_conn_string, conn_id, _name, _event):
func(self.id, conn_id)
else:
raise ArgumentError("Unknown callback type {}".format(name))
self._adapter.register_monitor([None], events, callback)
|
python
|
def add_callback(self, name, func):
"""Add a callback when device events happen.
Args:
name (str): currently support 'on_scan' and 'on_disconnect'
func (callable): the function that should be called
"""
if name == 'on_scan':
events = ['device_seen']
def callback(_conn_string, _conn_id, _name, event):
func(self.id, event, event.get('validity_period', 60))
elif name == 'on_report':
events = ['report', 'broadcast']
def callback(_conn_string, conn_id, _name, event):
func(conn_id, event)
elif name == 'on_trace':
events = ['trace']
def callback(_conn_string, conn_id, _name, event):
func(conn_id, event)
elif name == 'on_disconnect':
events = ['disconnection']
def callback(_conn_string, conn_id, _name, _event):
func(self.id, conn_id)
else:
raise ArgumentError("Unknown callback type {}".format(name))
self._adapter.register_monitor([None], events, callback)
|
[
"def",
"add_callback",
"(",
"self",
",",
"name",
",",
"func",
")",
":",
"if",
"name",
"==",
"'on_scan'",
":",
"events",
"=",
"[",
"'device_seen'",
"]",
"def",
"callback",
"(",
"_conn_string",
",",
"_conn_id",
",",
"_name",
",",
"event",
")",
":",
"func",
"(",
"self",
".",
"id",
",",
"event",
",",
"event",
".",
"get",
"(",
"'validity_period'",
",",
"60",
")",
")",
"elif",
"name",
"==",
"'on_report'",
":",
"events",
"=",
"[",
"'report'",
",",
"'broadcast'",
"]",
"def",
"callback",
"(",
"_conn_string",
",",
"conn_id",
",",
"_name",
",",
"event",
")",
":",
"func",
"(",
"conn_id",
",",
"event",
")",
"elif",
"name",
"==",
"'on_trace'",
":",
"events",
"=",
"[",
"'trace'",
"]",
"def",
"callback",
"(",
"_conn_string",
",",
"conn_id",
",",
"_name",
",",
"event",
")",
":",
"func",
"(",
"conn_id",
",",
"event",
")",
"elif",
"name",
"==",
"'on_disconnect'",
":",
"events",
"=",
"[",
"'disconnection'",
"]",
"def",
"callback",
"(",
"_conn_string",
",",
"conn_id",
",",
"_name",
",",
"_event",
")",
":",
"func",
"(",
"self",
".",
"id",
",",
"conn_id",
")",
"else",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown callback type {}\"",
".",
"format",
"(",
"name",
")",
")",
"self",
".",
"_adapter",
".",
"register_monitor",
"(",
"[",
"None",
"]",
",",
"events",
",",
"callback",
")"
] |
Add a callback when device events happen.
Args:
name (str): currently support 'on_scan' and 'on_disconnect'
func (callable): the function that should be called
|
[
"Add",
"a",
"callback",
"when",
"device",
"events",
"happen",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapter/sync_wrapper.py#L71-L98
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapter/sync_wrapper.py
|
SynchronousLegacyWrapper.disconnect_async
|
def disconnect_async(self, conn_id, callback):
"""Asynchronously disconnect from a device."""
future = self._loop.launch_coroutine(self._adapter.disconnect(conn_id))
future.add_done_callback(lambda x: self._callback_future(conn_id, x, callback))
|
python
|
def disconnect_async(self, conn_id, callback):
"""Asynchronously disconnect from a device."""
future = self._loop.launch_coroutine(self._adapter.disconnect(conn_id))
future.add_done_callback(lambda x: self._callback_future(conn_id, x, callback))
|
[
"def",
"disconnect_async",
"(",
"self",
",",
"conn_id",
",",
"callback",
")",
":",
"future",
"=",
"self",
".",
"_loop",
".",
"launch_coroutine",
"(",
"self",
".",
"_adapter",
".",
"disconnect",
"(",
"conn_id",
")",
")",
"future",
".",
"add_done_callback",
"(",
"lambda",
"x",
":",
"self",
".",
"_callback_future",
"(",
"conn_id",
",",
"x",
",",
"callback",
")",
")"
] |
Asynchronously disconnect from a device.
|
[
"Asynchronously",
"disconnect",
"from",
"a",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapter/sync_wrapper.py#L120-L124
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapter/sync_wrapper.py
|
SynchronousLegacyWrapper.send_script_async
|
def send_script_async(self, conn_id, data, progress_callback, callback):
"""Asynchronously send a script to the device."""
def monitor_callback(_conn_string, _conn_id, _event_name, event):
if event.get('operation') != 'script':
return
progress_callback(event.get('finished'), event.get('total'))
async def _install_monitor():
try:
conn_string = self._adapter._get_property(conn_id, 'connection_string')
return self._adapter.register_monitor([conn_string], ['progress'], monitor_callback)
except: #pylint:disable=bare-except;This is a legacy shim that must always ensure it doesn't raise.
self._logger.exception("Error installing script progress monitor")
return None
monitor_id = self._loop.run_coroutine(_install_monitor())
if monitor_id is None:
callback(conn_id, self.id, False, 'could not install progress monitor')
return
future = self._loop.launch_coroutine(self._adapter.send_script(conn_id, data))
future.add_done_callback(lambda x: self._callback_future(conn_id, x, callback, monitors=[monitor_id]))
|
python
|
def send_script_async(self, conn_id, data, progress_callback, callback):
"""Asynchronously send a script to the device."""
def monitor_callback(_conn_string, _conn_id, _event_name, event):
if event.get('operation') != 'script':
return
progress_callback(event.get('finished'), event.get('total'))
async def _install_monitor():
try:
conn_string = self._adapter._get_property(conn_id, 'connection_string')
return self._adapter.register_monitor([conn_string], ['progress'], monitor_callback)
except: #pylint:disable=bare-except;This is a legacy shim that must always ensure it doesn't raise.
self._logger.exception("Error installing script progress monitor")
return None
monitor_id = self._loop.run_coroutine(_install_monitor())
if monitor_id is None:
callback(conn_id, self.id, False, 'could not install progress monitor')
return
future = self._loop.launch_coroutine(self._adapter.send_script(conn_id, data))
future.add_done_callback(lambda x: self._callback_future(conn_id, x, callback, monitors=[monitor_id]))
|
[
"def",
"send_script_async",
"(",
"self",
",",
"conn_id",
",",
"data",
",",
"progress_callback",
",",
"callback",
")",
":",
"def",
"monitor_callback",
"(",
"_conn_string",
",",
"_conn_id",
",",
"_event_name",
",",
"event",
")",
":",
"if",
"event",
".",
"get",
"(",
"'operation'",
")",
"!=",
"'script'",
":",
"return",
"progress_callback",
"(",
"event",
".",
"get",
"(",
"'finished'",
")",
",",
"event",
".",
"get",
"(",
"'total'",
")",
")",
"async",
"def",
"_install_monitor",
"(",
")",
":",
"try",
":",
"conn_string",
"=",
"self",
".",
"_adapter",
".",
"_get_property",
"(",
"conn_id",
",",
"'connection_string'",
")",
"return",
"self",
".",
"_adapter",
".",
"register_monitor",
"(",
"[",
"conn_string",
"]",
",",
"[",
"'progress'",
"]",
",",
"monitor_callback",
")",
"except",
":",
"#pylint:disable=bare-except;This is a legacy shim that must always ensure it doesn't raise.",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error installing script progress monitor\"",
")",
"return",
"None",
"monitor_id",
"=",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"_install_monitor",
"(",
")",
")",
"if",
"monitor_id",
"is",
"None",
":",
"callback",
"(",
"conn_id",
",",
"self",
".",
"id",
",",
"False",
",",
"'could not install progress monitor'",
")",
"return",
"future",
"=",
"self",
".",
"_loop",
".",
"launch_coroutine",
"(",
"self",
".",
"_adapter",
".",
"send_script",
"(",
"conn_id",
",",
"data",
")",
")",
"future",
".",
"add_done_callback",
"(",
"lambda",
"x",
":",
"self",
".",
"_callback_future",
"(",
"conn_id",
",",
"x",
",",
"callback",
",",
"monitors",
"=",
"[",
"monitor_id",
"]",
")",
")"
] |
Asynchronously send a script to the device.
|
[
"Asynchronously",
"send",
"a",
"script",
"to",
"the",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapter/sync_wrapper.py#L218-L241
|
train
|
iotile/coretools
|
transport_plugins/awsiot/iotile_transport_awsiot/topic_validator.py
|
MQTTTopicValidator.lock
|
def lock(self, key, client):
"""Set the key that will be used to ensure messages come from one party
Args:
key (string): The key used to validate future messages
client (string): A string that will be returned to indicate who
locked this device.
"""
self.key = key
self.client = client
|
python
|
def lock(self, key, client):
"""Set the key that will be used to ensure messages come from one party
Args:
key (string): The key used to validate future messages
client (string): A string that will be returned to indicate who
locked this device.
"""
self.key = key
self.client = client
|
[
"def",
"lock",
"(",
"self",
",",
"key",
",",
"client",
")",
":",
"self",
".",
"key",
"=",
"key",
"self",
".",
"client",
"=",
"client"
] |
Set the key that will be used to ensure messages come from one party
Args:
key (string): The key used to validate future messages
client (string): A string that will be returned to indicate who
locked this device.
|
[
"Set",
"the",
"key",
"that",
"will",
"be",
"used",
"to",
"ensure",
"messages",
"come",
"from",
"one",
"party"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/topic_validator.py#L27-L37
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/state_log.py
|
EmulationStateLog.track_change
|
def track_change(self, tile, property_name, value, formatter=None):
"""Record that a change happened on a given tile's property.
This will as a StateChange object to our list of changes if we
are recording changes, otherwise, it will drop the change.
Args:
tile (int): The address of the tile that the change happened on.
property_name (str): The name of the property that changed.
value (object): The new value assigned to the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`.
"""
if not self.tracking:
return
if len(self._whitelist) > 0 and (tile, property_name) not in self._whitelist:
return
if formatter is None:
formatter = str
change = StateChange(monotonic(), tile, property_name, value, formatter(value))
with self._lock:
self.changes.append(change)
|
python
|
def track_change(self, tile, property_name, value, formatter=None):
"""Record that a change happened on a given tile's property.
This will as a StateChange object to our list of changes if we
are recording changes, otherwise, it will drop the change.
Args:
tile (int): The address of the tile that the change happened on.
property_name (str): The name of the property that changed.
value (object): The new value assigned to the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`.
"""
if not self.tracking:
return
if len(self._whitelist) > 0 and (tile, property_name) not in self._whitelist:
return
if formatter is None:
formatter = str
change = StateChange(monotonic(), tile, property_name, value, formatter(value))
with self._lock:
self.changes.append(change)
|
[
"def",
"track_change",
"(",
"self",
",",
"tile",
",",
"property_name",
",",
"value",
",",
"formatter",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"tracking",
":",
"return",
"if",
"len",
"(",
"self",
".",
"_whitelist",
")",
">",
"0",
"and",
"(",
"tile",
",",
"property_name",
")",
"not",
"in",
"self",
".",
"_whitelist",
":",
"return",
"if",
"formatter",
"is",
"None",
":",
"formatter",
"=",
"str",
"change",
"=",
"StateChange",
"(",
"monotonic",
"(",
")",
",",
"tile",
",",
"property_name",
",",
"value",
",",
"formatter",
"(",
"value",
")",
")",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"changes",
".",
"append",
"(",
"change",
")"
] |
Record that a change happened on a given tile's property.
This will as a StateChange object to our list of changes if we
are recording changes, otherwise, it will drop the change.
Args:
tile (int): The address of the tile that the change happened on.
property_name (str): The name of the property that changed.
value (object): The new value assigned to the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`.
|
[
"Record",
"that",
"a",
"change",
"happened",
"on",
"a",
"given",
"tile",
"s",
"property",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/state_log.py#L21-L50
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/virtual/state_log.py
|
EmulationStateLog.dump
|
def dump(self, out_path, header=True):
"""Save this list of changes as a csv file at out_path.
The format of the output file will be a CSV with 4 columns:
timestamp, tile address, property, string_value
There will be a single header row starting the CSV output unless
header=False is passed.
Args:
out_path (str): The path where we should save our current list of
changes.
header (bool): Whether we should include a header row in the csv
file. Defaults to True.
"""
# See https://stackoverflow.com/a/3348664/9739119 for why this is necessary
if sys.version_info[0] < 3:
mode = "wb"
else:
mode = "w"
with open(out_path, mode) as outfile:
writer = csv.writer(outfile, quoting=csv.QUOTE_MINIMAL)
if header:
writer.writerow(["Timestamp", "Tile Address", "Property Name", "Value"])
for entry in self.changes:
writer.writerow([entry.time, entry.tile, entry.property, entry.string_value])
|
python
|
def dump(self, out_path, header=True):
"""Save this list of changes as a csv file at out_path.
The format of the output file will be a CSV with 4 columns:
timestamp, tile address, property, string_value
There will be a single header row starting the CSV output unless
header=False is passed.
Args:
out_path (str): The path where we should save our current list of
changes.
header (bool): Whether we should include a header row in the csv
file. Defaults to True.
"""
# See https://stackoverflow.com/a/3348664/9739119 for why this is necessary
if sys.version_info[0] < 3:
mode = "wb"
else:
mode = "w"
with open(out_path, mode) as outfile:
writer = csv.writer(outfile, quoting=csv.QUOTE_MINIMAL)
if header:
writer.writerow(["Timestamp", "Tile Address", "Property Name", "Value"])
for entry in self.changes:
writer.writerow([entry.time, entry.tile, entry.property, entry.string_value])
|
[
"def",
"dump",
"(",
"self",
",",
"out_path",
",",
"header",
"=",
"True",
")",
":",
"# See https://stackoverflow.com/a/3348664/9739119 for why this is necessary",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"mode",
"=",
"\"wb\"",
"else",
":",
"mode",
"=",
"\"w\"",
"with",
"open",
"(",
"out_path",
",",
"mode",
")",
"as",
"outfile",
":",
"writer",
"=",
"csv",
".",
"writer",
"(",
"outfile",
",",
"quoting",
"=",
"csv",
".",
"QUOTE_MINIMAL",
")",
"if",
"header",
":",
"writer",
".",
"writerow",
"(",
"[",
"\"Timestamp\"",
",",
"\"Tile Address\"",
",",
"\"Property Name\"",
",",
"\"Value\"",
"]",
")",
"for",
"entry",
"in",
"self",
".",
"changes",
":",
"writer",
".",
"writerow",
"(",
"[",
"entry",
".",
"time",
",",
"entry",
".",
"tile",
",",
"entry",
".",
"property",
",",
"entry",
".",
"string_value",
"]",
")"
] |
Save this list of changes as a csv file at out_path.
The format of the output file will be a CSV with 4 columns:
timestamp, tile address, property, string_value
There will be a single header row starting the CSV output unless
header=False is passed.
Args:
out_path (str): The path where we should save our current list of
changes.
header (bool): Whether we should include a header row in the csv
file. Defaults to True.
|
[
"Save",
"this",
"list",
"of",
"changes",
"as",
"a",
"csv",
"file",
"at",
"out_path",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/state_log.py#L80-L108
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/pdftex.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for pdftex to an Environment."""
global PDFTeXAction
if PDFTeXAction is None:
PDFTeXAction = SCons.Action.Action('$PDFTEXCOM', '$PDFTEXCOMSTR')
global PDFLaTeXAction
if PDFLaTeXAction is None:
PDFLaTeXAction = SCons.Action.Action("$PDFLATEXCOM", "$PDFLATEXCOMSTR")
global PDFTeXLaTeXAction
if PDFTeXLaTeXAction is None:
PDFTeXLaTeXAction = SCons.Action.Action(PDFTeXLaTeXFunction,
strfunction=SCons.Tool.tex.TeXLaTeXStrFunction)
env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes)
from . import pdf
pdf.generate(env)
bld = env['BUILDERS']['PDF']
bld.add_action('.tex', PDFTeXLaTeXAction)
bld.add_emitter('.tex', SCons.Tool.tex.tex_pdf_emitter)
# Add the epstopdf builder after the pdftex builder
# so pdftex is the default for no source suffix
pdf.generate2(env)
SCons.Tool.tex.generate_common(env)
|
python
|
def generate(env):
"""Add Builders and construction variables for pdftex to an Environment."""
global PDFTeXAction
if PDFTeXAction is None:
PDFTeXAction = SCons.Action.Action('$PDFTEXCOM', '$PDFTEXCOMSTR')
global PDFLaTeXAction
if PDFLaTeXAction is None:
PDFLaTeXAction = SCons.Action.Action("$PDFLATEXCOM", "$PDFLATEXCOMSTR")
global PDFTeXLaTeXAction
if PDFTeXLaTeXAction is None:
PDFTeXLaTeXAction = SCons.Action.Action(PDFTeXLaTeXFunction,
strfunction=SCons.Tool.tex.TeXLaTeXStrFunction)
env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes)
from . import pdf
pdf.generate(env)
bld = env['BUILDERS']['PDF']
bld.add_action('.tex', PDFTeXLaTeXAction)
bld.add_emitter('.tex', SCons.Tool.tex.tex_pdf_emitter)
# Add the epstopdf builder after the pdftex builder
# so pdftex is the default for no source suffix
pdf.generate2(env)
SCons.Tool.tex.generate_common(env)
|
[
"def",
"generate",
"(",
"env",
")",
":",
"global",
"PDFTeXAction",
"if",
"PDFTeXAction",
"is",
"None",
":",
"PDFTeXAction",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"'$PDFTEXCOM'",
",",
"'$PDFTEXCOMSTR'",
")",
"global",
"PDFLaTeXAction",
"if",
"PDFLaTeXAction",
"is",
"None",
":",
"PDFLaTeXAction",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"\"$PDFLATEXCOM\"",
",",
"\"$PDFLATEXCOMSTR\"",
")",
"global",
"PDFTeXLaTeXAction",
"if",
"PDFTeXLaTeXAction",
"is",
"None",
":",
"PDFTeXLaTeXAction",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"PDFTeXLaTeXFunction",
",",
"strfunction",
"=",
"SCons",
".",
"Tool",
".",
"tex",
".",
"TeXLaTeXStrFunction",
")",
"env",
".",
"AppendUnique",
"(",
"LATEXSUFFIXES",
"=",
"SCons",
".",
"Tool",
".",
"LaTeXSuffixes",
")",
"from",
".",
"import",
"pdf",
"pdf",
".",
"generate",
"(",
"env",
")",
"bld",
"=",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'PDF'",
"]",
"bld",
".",
"add_action",
"(",
"'.tex'",
",",
"PDFTeXLaTeXAction",
")",
"bld",
".",
"add_emitter",
"(",
"'.tex'",
",",
"SCons",
".",
"Tool",
".",
"tex",
".",
"tex_pdf_emitter",
")",
"# Add the epstopdf builder after the pdftex builder ",
"# so pdftex is the default for no source suffix",
"pdf",
".",
"generate2",
"(",
"env",
")",
"SCons",
".",
"Tool",
".",
"tex",
".",
"generate_common",
"(",
"env",
")"
] |
Add Builders and construction variables for pdftex to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"pdftex",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/pdftex.py#L71-L99
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/virtual/tile_based_device.py
|
TileBasedVirtualDevice.stop
|
def stop(self):
"""Stop running this virtual device including any worker threads."""
for tile in self._tiles.values():
tile.signal_stop()
for tile in self._tiles.values():
tile.wait_stopped()
super(TileBasedVirtualDevice, self).stop()
|
python
|
def stop(self):
"""Stop running this virtual device including any worker threads."""
for tile in self._tiles.values():
tile.signal_stop()
for tile in self._tiles.values():
tile.wait_stopped()
super(TileBasedVirtualDevice, self).stop()
|
[
"def",
"stop",
"(",
"self",
")",
":",
"for",
"tile",
"in",
"self",
".",
"_tiles",
".",
"values",
"(",
")",
":",
"tile",
".",
"signal_stop",
"(",
")",
"for",
"tile",
"in",
"self",
".",
"_tiles",
".",
"values",
"(",
")",
":",
"tile",
".",
"wait_stopped",
"(",
")",
"super",
"(",
"TileBasedVirtualDevice",
",",
"self",
")",
".",
"stop",
"(",
")"
] |
Stop running this virtual device including any worker threads.
|
[
"Stop",
"running",
"this",
"virtual",
"device",
"including",
"any",
"worker",
"threads",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/virtual/tile_based_device.py#L53-L62
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SetCacheMode
|
def SetCacheMode(mode):
"""Set the Configure cache mode. mode must be one of "auto", "force",
or "cache"."""
global cache_mode
if mode == "auto":
cache_mode = AUTO
elif mode == "force":
cache_mode = FORCE
elif mode == "cache":
cache_mode = CACHE
else:
raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode)
|
python
|
def SetCacheMode(mode):
"""Set the Configure cache mode. mode must be one of "auto", "force",
or "cache"."""
global cache_mode
if mode == "auto":
cache_mode = AUTO
elif mode == "force":
cache_mode = FORCE
elif mode == "cache":
cache_mode = CACHE
else:
raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode)
|
[
"def",
"SetCacheMode",
"(",
"mode",
")",
":",
"global",
"cache_mode",
"if",
"mode",
"==",
"\"auto\"",
":",
"cache_mode",
"=",
"AUTO",
"elif",
"mode",
"==",
"\"force\"",
":",
"cache_mode",
"=",
"FORCE",
"elif",
"mode",
"==",
"\"cache\"",
":",
"cache_mode",
"=",
"CACHE",
"else",
":",
"raise",
"ValueError",
"(",
"\"SCons.SConf.SetCacheMode: Unknown mode \"",
"+",
"mode",
")"
] |
Set the Configure cache mode. mode must be one of "auto", "force",
or "cache".
|
[
"Set",
"the",
"Configure",
"cache",
"mode",
".",
"mode",
"must",
"be",
"one",
"of",
"auto",
"force",
"or",
"cache",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L79-L90
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
CreateConfigHBuilder
|
def CreateConfigHBuilder(env):
"""Called if necessary just before the building targets phase begins."""
action = SCons.Action.Action(_createConfigH,
_stringConfigH)
sconfigHBld = SCons.Builder.Builder(action=action)
env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} )
for k in list(_ac_config_hs.keys()):
env.SConfigHBuilder(k, env.Value(_ac_config_hs[k]))
|
python
|
def CreateConfigHBuilder(env):
"""Called if necessary just before the building targets phase begins."""
action = SCons.Action.Action(_createConfigH,
_stringConfigH)
sconfigHBld = SCons.Builder.Builder(action=action)
env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} )
for k in list(_ac_config_hs.keys()):
env.SConfigHBuilder(k, env.Value(_ac_config_hs[k]))
|
[
"def",
"CreateConfigHBuilder",
"(",
"env",
")",
":",
"action",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"_createConfigH",
",",
"_stringConfigH",
")",
"sconfigHBld",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"action",
")",
"env",
".",
"Append",
"(",
"BUILDERS",
"=",
"{",
"'SConfigHBuilder'",
":",
"sconfigHBld",
"}",
")",
"for",
"k",
"in",
"list",
"(",
"_ac_config_hs",
".",
"keys",
"(",
")",
")",
":",
"env",
".",
"SConfigHBuilder",
"(",
"k",
",",
"env",
".",
"Value",
"(",
"_ac_config_hs",
"[",
"k",
"]",
")",
")"
] |
Called if necessary just before the building targets phase begins.
|
[
"Called",
"if",
"necessary",
"just",
"before",
"the",
"building",
"targets",
"phase",
"begins",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L128-L135
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
CheckHeader
|
def CheckHeader(context, header, include_quotes = '<>', language = None):
"""
A test for a C or C++ header file.
"""
prog_prefix, hdr_to_check = \
createIncludesFromHeaders(header, 1, include_quotes)
res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix,
language = language,
include_quotes = include_quotes)
context.did_show_result = 1
return not res
|
python
|
def CheckHeader(context, header, include_quotes = '<>', language = None):
"""
A test for a C or C++ header file.
"""
prog_prefix, hdr_to_check = \
createIncludesFromHeaders(header, 1, include_quotes)
res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix,
language = language,
include_quotes = include_quotes)
context.did_show_result = 1
return not res
|
[
"def",
"CheckHeader",
"(",
"context",
",",
"header",
",",
"include_quotes",
"=",
"'<>'",
",",
"language",
"=",
"None",
")",
":",
"prog_prefix",
",",
"hdr_to_check",
"=",
"createIncludesFromHeaders",
"(",
"header",
",",
"1",
",",
"include_quotes",
")",
"res",
"=",
"SCons",
".",
"Conftest",
".",
"CheckHeader",
"(",
"context",
",",
"hdr_to_check",
",",
"prog_prefix",
",",
"language",
"=",
"language",
",",
"include_quotes",
"=",
"include_quotes",
")",
"context",
".",
"did_show_result",
"=",
"1",
"return",
"not",
"res"
] |
A test for a C or C++ header file.
|
[
"A",
"test",
"for",
"a",
"C",
"or",
"C",
"++",
"header",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L944-L954
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
CheckLib
|
def CheckLib(context, library = None, symbol = "main",
header = None, language = None, autoadd = 1):
"""
A test for a library. See also CheckLibWithHeader.
Note that library may also be None to test whether the given symbol
compiles without flags.
"""
if library == []:
library = [None]
if not SCons.Util.is_List(library):
library = [library]
# ToDo: accept path for the library
res = SCons.Conftest.CheckLib(context, library, symbol, header = header,
language = language, autoadd = autoadd)
context.did_show_result = 1
return not res
|
python
|
def CheckLib(context, library = None, symbol = "main",
header = None, language = None, autoadd = 1):
"""
A test for a library. See also CheckLibWithHeader.
Note that library may also be None to test whether the given symbol
compiles without flags.
"""
if library == []:
library = [None]
if not SCons.Util.is_List(library):
library = [library]
# ToDo: accept path for the library
res = SCons.Conftest.CheckLib(context, library, symbol, header = header,
language = language, autoadd = autoadd)
context.did_show_result = 1
return not res
|
[
"def",
"CheckLib",
"(",
"context",
",",
"library",
"=",
"None",
",",
"symbol",
"=",
"\"main\"",
",",
"header",
"=",
"None",
",",
"language",
"=",
"None",
",",
"autoadd",
"=",
"1",
")",
":",
"if",
"library",
"==",
"[",
"]",
":",
"library",
"=",
"[",
"None",
"]",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"library",
")",
":",
"library",
"=",
"[",
"library",
"]",
"# ToDo: accept path for the library",
"res",
"=",
"SCons",
".",
"Conftest",
".",
"CheckLib",
"(",
"context",
",",
"library",
",",
"symbol",
",",
"header",
"=",
"header",
",",
"language",
"=",
"language",
",",
"autoadd",
"=",
"autoadd",
")",
"context",
".",
"did_show_result",
"=",
"1",
"return",
"not",
"res"
] |
A test for a library. See also CheckLibWithHeader.
Note that library may also be None to test whether the given symbol
compiles without flags.
|
[
"A",
"test",
"for",
"a",
"library",
".",
"See",
"also",
"CheckLibWithHeader",
".",
"Note",
"that",
"library",
"may",
"also",
"be",
"None",
"to",
"test",
"whether",
"the",
"given",
"symbol",
"compiles",
"without",
"flags",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L994-L1012
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
CheckProg
|
def CheckProg(context, prog_name):
"""Simple check if a program exists in the path. Returns the path
for the application, or None if not found.
"""
res = SCons.Conftest.CheckProg(context, prog_name)
context.did_show_result = 1
return res
|
python
|
def CheckProg(context, prog_name):
"""Simple check if a program exists in the path. Returns the path
for the application, or None if not found.
"""
res = SCons.Conftest.CheckProg(context, prog_name)
context.did_show_result = 1
return res
|
[
"def",
"CheckProg",
"(",
"context",
",",
"prog_name",
")",
":",
"res",
"=",
"SCons",
".",
"Conftest",
".",
"CheckProg",
"(",
"context",
",",
"prog_name",
")",
"context",
".",
"did_show_result",
"=",
"1",
"return",
"res"
] |
Simple check if a program exists in the path. Returns the path
for the application, or None if not found.
|
[
"Simple",
"check",
"if",
"a",
"program",
"exists",
"in",
"the",
"path",
".",
"Returns",
"the",
"path",
"for",
"the",
"application",
"or",
"None",
"if",
"not",
"found",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L1040-L1046
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SConfBuildTask.display_cached_string
|
def display_cached_string(self, bi):
"""
Logs the original builder messages, given the SConfBuildInfo instance
bi.
"""
if not isinstance(bi, SConfBuildInfo):
SCons.Warnings.warn(SConfWarning,
"The stored build information has an unexpected class: %s" % bi.__class__)
else:
self.display("The original builder output was:\n" +
(" |" + str(bi.string)).replace("\n", "\n |"))
|
python
|
def display_cached_string(self, bi):
"""
Logs the original builder messages, given the SConfBuildInfo instance
bi.
"""
if not isinstance(bi, SConfBuildInfo):
SCons.Warnings.warn(SConfWarning,
"The stored build information has an unexpected class: %s" % bi.__class__)
else:
self.display("The original builder output was:\n" +
(" |" + str(bi.string)).replace("\n", "\n |"))
|
[
"def",
"display_cached_string",
"(",
"self",
",",
"bi",
")",
":",
"if",
"not",
"isinstance",
"(",
"bi",
",",
"SConfBuildInfo",
")",
":",
"SCons",
".",
"Warnings",
".",
"warn",
"(",
"SConfWarning",
",",
"\"The stored build information has an unexpected class: %s\"",
"%",
"bi",
".",
"__class__",
")",
"else",
":",
"self",
".",
"display",
"(",
"\"The original builder output was:\\n\"",
"+",
"(",
"\" |\"",
"+",
"str",
"(",
"bi",
".",
"string",
")",
")",
".",
"replace",
"(",
"\"\\n\"",
",",
"\"\\n |\"",
")",
")"
] |
Logs the original builder messages, given the SConfBuildInfo instance
bi.
|
[
"Logs",
"the",
"original",
"builder",
"messages",
"given",
"the",
"SConfBuildInfo",
"instance",
"bi",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L231-L241
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SConfBase.Define
|
def Define(self, name, value = None, comment = None):
"""
Define a pre processor symbol name, with the optional given value in the
current config header.
If value is None (default), then #define name is written. If value is not
none, then #define name value is written.
comment is a string which will be put as a C comment in the header, to explain the meaning of the value
(appropriate C comments will be added automatically).
"""
lines = []
if comment:
comment_str = "/* %s */" % comment
lines.append(comment_str)
if value is not None:
define_str = "#define %s %s" % (name, value)
else:
define_str = "#define %s" % name
lines.append(define_str)
lines.append('')
self.config_h_text = self.config_h_text + '\n'.join(lines)
|
python
|
def Define(self, name, value = None, comment = None):
"""
Define a pre processor symbol name, with the optional given value in the
current config header.
If value is None (default), then #define name is written. If value is not
none, then #define name value is written.
comment is a string which will be put as a C comment in the header, to explain the meaning of the value
(appropriate C comments will be added automatically).
"""
lines = []
if comment:
comment_str = "/* %s */" % comment
lines.append(comment_str)
if value is not None:
define_str = "#define %s %s" % (name, value)
else:
define_str = "#define %s" % name
lines.append(define_str)
lines.append('')
self.config_h_text = self.config_h_text + '\n'.join(lines)
|
[
"def",
"Define",
"(",
"self",
",",
"name",
",",
"value",
"=",
"None",
",",
"comment",
"=",
"None",
")",
":",
"lines",
"=",
"[",
"]",
"if",
"comment",
":",
"comment_str",
"=",
"\"/* %s */\"",
"%",
"comment",
"lines",
".",
"append",
"(",
"comment_str",
")",
"if",
"value",
"is",
"not",
"None",
":",
"define_str",
"=",
"\"#define %s %s\"",
"%",
"(",
"name",
",",
"value",
")",
"else",
":",
"define_str",
"=",
"\"#define %s\"",
"%",
"name",
"lines",
".",
"append",
"(",
"define_str",
")",
"lines",
".",
"append",
"(",
"''",
")",
"self",
".",
"config_h_text",
"=",
"self",
".",
"config_h_text",
"+",
"'\\n'",
".",
"join",
"(",
"lines",
")"
] |
Define a pre processor symbol name, with the optional given value in the
current config header.
If value is None (default), then #define name is written. If value is not
none, then #define name value is written.
comment is a string which will be put as a C comment in the header, to explain the meaning of the value
(appropriate C comments will be added automatically).
|
[
"Define",
"a",
"pre",
"processor",
"symbol",
"name",
"with",
"the",
"optional",
"given",
"value",
"in",
"the",
"current",
"config",
"header",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L453-L476
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SConfBase.BuildNodes
|
def BuildNodes(self, nodes):
"""
Tries to build the given nodes immediately. Returns 1 on success,
0 on error.
"""
if self.logstream is not None:
# override stdout / stderr to write in log file
oldStdout = sys.stdout
sys.stdout = self.logstream
oldStderr = sys.stderr
sys.stderr = self.logstream
# the engine assumes the current path is the SConstruct directory ...
old_fs_dir = SConfFS.getcwd()
old_os_dir = os.getcwd()
SConfFS.chdir(SConfFS.Top, change_os_dir=1)
# Because we take responsibility here for writing out our
# own .sconsign info (see SConfBuildTask.execute(), above),
# we override the store_info() method with a null place-holder
# so we really control how it gets written.
for n in nodes:
n.store_info = 0
if not hasattr(n, 'attributes'):
n.attributes = SCons.Node.Node.Attrs()
n.attributes.keep_targetinfo = 1
ret = 1
try:
# ToDo: use user options for calc
save_max_drift = SConfFS.get_max_drift()
SConfFS.set_max_drift(0)
tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask)
# we don't want to build tests in parallel
jobs = SCons.Job.Jobs(1, tm )
jobs.run()
for n in nodes:
state = n.get_state()
if (state != SCons.Node.executed and
state != SCons.Node.up_to_date):
# the node could not be built. we return 0 in this case
ret = 0
finally:
SConfFS.set_max_drift(save_max_drift)
os.chdir(old_os_dir)
SConfFS.chdir(old_fs_dir, change_os_dir=0)
if self.logstream is not None:
# restore stdout / stderr
sys.stdout = oldStdout
sys.stderr = oldStderr
return ret
|
python
|
def BuildNodes(self, nodes):
"""
Tries to build the given nodes immediately. Returns 1 on success,
0 on error.
"""
if self.logstream is not None:
# override stdout / stderr to write in log file
oldStdout = sys.stdout
sys.stdout = self.logstream
oldStderr = sys.stderr
sys.stderr = self.logstream
# the engine assumes the current path is the SConstruct directory ...
old_fs_dir = SConfFS.getcwd()
old_os_dir = os.getcwd()
SConfFS.chdir(SConfFS.Top, change_os_dir=1)
# Because we take responsibility here for writing out our
# own .sconsign info (see SConfBuildTask.execute(), above),
# we override the store_info() method with a null place-holder
# so we really control how it gets written.
for n in nodes:
n.store_info = 0
if not hasattr(n, 'attributes'):
n.attributes = SCons.Node.Node.Attrs()
n.attributes.keep_targetinfo = 1
ret = 1
try:
# ToDo: use user options for calc
save_max_drift = SConfFS.get_max_drift()
SConfFS.set_max_drift(0)
tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask)
# we don't want to build tests in parallel
jobs = SCons.Job.Jobs(1, tm )
jobs.run()
for n in nodes:
state = n.get_state()
if (state != SCons.Node.executed and
state != SCons.Node.up_to_date):
# the node could not be built. we return 0 in this case
ret = 0
finally:
SConfFS.set_max_drift(save_max_drift)
os.chdir(old_os_dir)
SConfFS.chdir(old_fs_dir, change_os_dir=0)
if self.logstream is not None:
# restore stdout / stderr
sys.stdout = oldStdout
sys.stderr = oldStderr
return ret
|
[
"def",
"BuildNodes",
"(",
"self",
",",
"nodes",
")",
":",
"if",
"self",
".",
"logstream",
"is",
"not",
"None",
":",
"# override stdout / stderr to write in log file",
"oldStdout",
"=",
"sys",
".",
"stdout",
"sys",
".",
"stdout",
"=",
"self",
".",
"logstream",
"oldStderr",
"=",
"sys",
".",
"stderr",
"sys",
".",
"stderr",
"=",
"self",
".",
"logstream",
"# the engine assumes the current path is the SConstruct directory ...",
"old_fs_dir",
"=",
"SConfFS",
".",
"getcwd",
"(",
")",
"old_os_dir",
"=",
"os",
".",
"getcwd",
"(",
")",
"SConfFS",
".",
"chdir",
"(",
"SConfFS",
".",
"Top",
",",
"change_os_dir",
"=",
"1",
")",
"# Because we take responsibility here for writing out our",
"# own .sconsign info (see SConfBuildTask.execute(), above),",
"# we override the store_info() method with a null place-holder",
"# so we really control how it gets written.",
"for",
"n",
"in",
"nodes",
":",
"n",
".",
"store_info",
"=",
"0",
"if",
"not",
"hasattr",
"(",
"n",
",",
"'attributes'",
")",
":",
"n",
".",
"attributes",
"=",
"SCons",
".",
"Node",
".",
"Node",
".",
"Attrs",
"(",
")",
"n",
".",
"attributes",
".",
"keep_targetinfo",
"=",
"1",
"ret",
"=",
"1",
"try",
":",
"# ToDo: use user options for calc",
"save_max_drift",
"=",
"SConfFS",
".",
"get_max_drift",
"(",
")",
"SConfFS",
".",
"set_max_drift",
"(",
"0",
")",
"tm",
"=",
"SCons",
".",
"Taskmaster",
".",
"Taskmaster",
"(",
"nodes",
",",
"SConfBuildTask",
")",
"# we don't want to build tests in parallel",
"jobs",
"=",
"SCons",
".",
"Job",
".",
"Jobs",
"(",
"1",
",",
"tm",
")",
"jobs",
".",
"run",
"(",
")",
"for",
"n",
"in",
"nodes",
":",
"state",
"=",
"n",
".",
"get_state",
"(",
")",
"if",
"(",
"state",
"!=",
"SCons",
".",
"Node",
".",
"executed",
"and",
"state",
"!=",
"SCons",
".",
"Node",
".",
"up_to_date",
")",
":",
"# the node could not be built. we return 0 in this case",
"ret",
"=",
"0",
"finally",
":",
"SConfFS",
".",
"set_max_drift",
"(",
"save_max_drift",
")",
"os",
".",
"chdir",
"(",
"old_os_dir",
")",
"SConfFS",
".",
"chdir",
"(",
"old_fs_dir",
",",
"change_os_dir",
"=",
"0",
")",
"if",
"self",
".",
"logstream",
"is",
"not",
"None",
":",
"# restore stdout / stderr",
"sys",
".",
"stdout",
"=",
"oldStdout",
"sys",
".",
"stderr",
"=",
"oldStderr",
"return",
"ret"
] |
Tries to build the given nodes immediately. Returns 1 on success,
0 on error.
|
[
"Tries",
"to",
"build",
"the",
"given",
"nodes",
"immediately",
".",
"Returns",
"1",
"on",
"success",
"0",
"on",
"error",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L478-L529
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SConfBase.pspawn_wrapper
|
def pspawn_wrapper(self, sh, escape, cmd, args, env):
"""Wrapper function for handling piped spawns.
This looks to the calling interface (in Action.py) like a "normal"
spawn, but associates the call with the PSPAWN variable from
the construction environment and with the streams to which we
want the output logged. This gets slid into the construction
environment as the SPAWN variable so Action.py doesn't have to
know or care whether it's spawning a piped command or not.
"""
return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream)
|
python
|
def pspawn_wrapper(self, sh, escape, cmd, args, env):
"""Wrapper function for handling piped spawns.
This looks to the calling interface (in Action.py) like a "normal"
spawn, but associates the call with the PSPAWN variable from
the construction environment and with the streams to which we
want the output logged. This gets slid into the construction
environment as the SPAWN variable so Action.py doesn't have to
know or care whether it's spawning a piped command or not.
"""
return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream)
|
[
"def",
"pspawn_wrapper",
"(",
"self",
",",
"sh",
",",
"escape",
",",
"cmd",
",",
"args",
",",
"env",
")",
":",
"return",
"self",
".",
"pspawn",
"(",
"sh",
",",
"escape",
",",
"cmd",
",",
"args",
",",
"env",
",",
"self",
".",
"logstream",
",",
"self",
".",
"logstream",
")"
] |
Wrapper function for handling piped spawns.
This looks to the calling interface (in Action.py) like a "normal"
spawn, but associates the call with the PSPAWN variable from
the construction environment and with the streams to which we
want the output logged. This gets slid into the construction
environment as the SPAWN variable so Action.py doesn't have to
know or care whether it's spawning a piped command or not.
|
[
"Wrapper",
"function",
"for",
"handling",
"piped",
"spawns",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L531-L541
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SConfBase._startup
|
def _startup(self):
"""Private method. Set up logstream, and set the environment
variables necessary for a piped build
"""
global _ac_config_logs
global sconf_global
global SConfFS
self.lastEnvFs = self.env.fs
self.env.fs = SConfFS
self._createDir(self.confdir)
self.confdir.up().add_ignore( [self.confdir] )
if self.logfile is not None and not dryrun:
# truncate logfile, if SConf.Configure is called for the first time
# in a build
if self.logfile in _ac_config_logs:
log_mode = "a"
else:
_ac_config_logs[self.logfile] = None
log_mode = "w"
fp = open(str(self.logfile), log_mode)
self.logstream = SCons.Util.Unbuffered(fp)
# logfile may stay in a build directory, so we tell
# the build system not to override it with a eventually
# existing file with the same name in the source directory
self.logfile.dir.add_ignore( [self.logfile] )
tb = traceback.extract_stack()[-3-self.depth]
old_fs_dir = SConfFS.getcwd()
SConfFS.chdir(SConfFS.Top, change_os_dir=0)
self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' %
(tb[0], tb[1], str(self.confdir)) )
SConfFS.chdir(old_fs_dir)
else:
self.logstream = None
# we use a special builder to create source files from TEXT
action = SCons.Action.Action(_createSource,
_stringSource)
sconfSrcBld = SCons.Builder.Builder(action=action)
self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} )
self.config_h_text = _ac_config_hs.get(self.config_h, "")
self.active = 1
# only one SConf instance should be active at a time ...
sconf_global = self
|
python
|
def _startup(self):
"""Private method. Set up logstream, and set the environment
variables necessary for a piped build
"""
global _ac_config_logs
global sconf_global
global SConfFS
self.lastEnvFs = self.env.fs
self.env.fs = SConfFS
self._createDir(self.confdir)
self.confdir.up().add_ignore( [self.confdir] )
if self.logfile is not None and not dryrun:
# truncate logfile, if SConf.Configure is called for the first time
# in a build
if self.logfile in _ac_config_logs:
log_mode = "a"
else:
_ac_config_logs[self.logfile] = None
log_mode = "w"
fp = open(str(self.logfile), log_mode)
self.logstream = SCons.Util.Unbuffered(fp)
# logfile may stay in a build directory, so we tell
# the build system not to override it with a eventually
# existing file with the same name in the source directory
self.logfile.dir.add_ignore( [self.logfile] )
tb = traceback.extract_stack()[-3-self.depth]
old_fs_dir = SConfFS.getcwd()
SConfFS.chdir(SConfFS.Top, change_os_dir=0)
self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' %
(tb[0], tb[1], str(self.confdir)) )
SConfFS.chdir(old_fs_dir)
else:
self.logstream = None
# we use a special builder to create source files from TEXT
action = SCons.Action.Action(_createSource,
_stringSource)
sconfSrcBld = SCons.Builder.Builder(action=action)
self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} )
self.config_h_text = _ac_config_hs.get(self.config_h, "")
self.active = 1
# only one SConf instance should be active at a time ...
sconf_global = self
|
[
"def",
"_startup",
"(",
"self",
")",
":",
"global",
"_ac_config_logs",
"global",
"sconf_global",
"global",
"SConfFS",
"self",
".",
"lastEnvFs",
"=",
"self",
".",
"env",
".",
"fs",
"self",
".",
"env",
".",
"fs",
"=",
"SConfFS",
"self",
".",
"_createDir",
"(",
"self",
".",
"confdir",
")",
"self",
".",
"confdir",
".",
"up",
"(",
")",
".",
"add_ignore",
"(",
"[",
"self",
".",
"confdir",
"]",
")",
"if",
"self",
".",
"logfile",
"is",
"not",
"None",
"and",
"not",
"dryrun",
":",
"# truncate logfile, if SConf.Configure is called for the first time",
"# in a build",
"if",
"self",
".",
"logfile",
"in",
"_ac_config_logs",
":",
"log_mode",
"=",
"\"a\"",
"else",
":",
"_ac_config_logs",
"[",
"self",
".",
"logfile",
"]",
"=",
"None",
"log_mode",
"=",
"\"w\"",
"fp",
"=",
"open",
"(",
"str",
"(",
"self",
".",
"logfile",
")",
",",
"log_mode",
")",
"self",
".",
"logstream",
"=",
"SCons",
".",
"Util",
".",
"Unbuffered",
"(",
"fp",
")",
"# logfile may stay in a build directory, so we tell",
"# the build system not to override it with a eventually",
"# existing file with the same name in the source directory",
"self",
".",
"logfile",
".",
"dir",
".",
"add_ignore",
"(",
"[",
"self",
".",
"logfile",
"]",
")",
"tb",
"=",
"traceback",
".",
"extract_stack",
"(",
")",
"[",
"-",
"3",
"-",
"self",
".",
"depth",
"]",
"old_fs_dir",
"=",
"SConfFS",
".",
"getcwd",
"(",
")",
"SConfFS",
".",
"chdir",
"(",
"SConfFS",
".",
"Top",
",",
"change_os_dir",
"=",
"0",
")",
"self",
".",
"logstream",
".",
"write",
"(",
"'file %s,line %d:\\n\\tConfigure(confdir = %s)\\n'",
"%",
"(",
"tb",
"[",
"0",
"]",
",",
"tb",
"[",
"1",
"]",
",",
"str",
"(",
"self",
".",
"confdir",
")",
")",
")",
"SConfFS",
".",
"chdir",
"(",
"old_fs_dir",
")",
"else",
":",
"self",
".",
"logstream",
"=",
"None",
"# we use a special builder to create source files from TEXT",
"action",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"_createSource",
",",
"_stringSource",
")",
"sconfSrcBld",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"action",
")",
"self",
".",
"env",
".",
"Append",
"(",
"BUILDERS",
"=",
"{",
"'SConfSourceBuilder'",
":",
"sconfSrcBld",
"}",
")",
"self",
".",
"config_h_text",
"=",
"_ac_config_hs",
".",
"get",
"(",
"self",
".",
"config_h",
",",
"\"\"",
")",
"self",
".",
"active",
"=",
"1",
"# only one SConf instance should be active at a time ...",
"sconf_global",
"=",
"self"
] |
Private method. Set up logstream, and set the environment
variables necessary for a piped build
|
[
"Private",
"method",
".",
"Set",
"up",
"logstream",
"and",
"set",
"the",
"environment",
"variables",
"necessary",
"for",
"a",
"piped",
"build"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L685-L729
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
SConfBase._shutdown
|
def _shutdown(self):
"""Private method. Reset to non-piped spawn"""
global sconf_global, _ac_config_hs
if not self.active:
raise SCons.Errors.UserError("Finish may be called only once!")
if self.logstream is not None and not dryrun:
self.logstream.write("\n")
self.logstream.close()
self.logstream = None
# remove the SConfSourceBuilder from the environment
blds = self.env['BUILDERS']
del blds['SConfSourceBuilder']
self.env.Replace( BUILDERS=blds )
self.active = 0
sconf_global = None
if not self.config_h is None:
_ac_config_hs[self.config_h] = self.config_h_text
self.env.fs = self.lastEnvFs
|
python
|
def _shutdown(self):
"""Private method. Reset to non-piped spawn"""
global sconf_global, _ac_config_hs
if not self.active:
raise SCons.Errors.UserError("Finish may be called only once!")
if self.logstream is not None and not dryrun:
self.logstream.write("\n")
self.logstream.close()
self.logstream = None
# remove the SConfSourceBuilder from the environment
blds = self.env['BUILDERS']
del blds['SConfSourceBuilder']
self.env.Replace( BUILDERS=blds )
self.active = 0
sconf_global = None
if not self.config_h is None:
_ac_config_hs[self.config_h] = self.config_h_text
self.env.fs = self.lastEnvFs
|
[
"def",
"_shutdown",
"(",
"self",
")",
":",
"global",
"sconf_global",
",",
"_ac_config_hs",
"if",
"not",
"self",
".",
"active",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"Finish may be called only once!\"",
")",
"if",
"self",
".",
"logstream",
"is",
"not",
"None",
"and",
"not",
"dryrun",
":",
"self",
".",
"logstream",
".",
"write",
"(",
"\"\\n\"",
")",
"self",
".",
"logstream",
".",
"close",
"(",
")",
"self",
".",
"logstream",
"=",
"None",
"# remove the SConfSourceBuilder from the environment",
"blds",
"=",
"self",
".",
"env",
"[",
"'BUILDERS'",
"]",
"del",
"blds",
"[",
"'SConfSourceBuilder'",
"]",
"self",
".",
"env",
".",
"Replace",
"(",
"BUILDERS",
"=",
"blds",
")",
"self",
".",
"active",
"=",
"0",
"sconf_global",
"=",
"None",
"if",
"not",
"self",
".",
"config_h",
"is",
"None",
":",
"_ac_config_hs",
"[",
"self",
".",
"config_h",
"]",
"=",
"self",
".",
"config_h_text",
"self",
".",
"env",
".",
"fs",
"=",
"self",
".",
"lastEnvFs"
] |
Private method. Reset to non-piped spawn
|
[
"Private",
"method",
".",
"Reset",
"to",
"non",
"-",
"piped",
"spawn"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L731-L749
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py
|
CheckContext.Result
|
def Result(self, res):
"""Inform about the result of the test. If res is not a string, displays
'yes' or 'no' depending on whether res is evaluated as true or false.
The result is only displayed when self.did_show_result is not set.
"""
if isinstance(res, str):
text = res
elif res:
text = "yes"
else:
text = "no"
if self.did_show_result == 0:
# Didn't show result yet, do it now.
self.Display(text + "\n")
self.did_show_result = 1
|
python
|
def Result(self, res):
"""Inform about the result of the test. If res is not a string, displays
'yes' or 'no' depending on whether res is evaluated as true or false.
The result is only displayed when self.did_show_result is not set.
"""
if isinstance(res, str):
text = res
elif res:
text = "yes"
else:
text = "no"
if self.did_show_result == 0:
# Didn't show result yet, do it now.
self.Display(text + "\n")
self.did_show_result = 1
|
[
"def",
"Result",
"(",
"self",
",",
"res",
")",
":",
"if",
"isinstance",
"(",
"res",
",",
"str",
")",
":",
"text",
"=",
"res",
"elif",
"res",
":",
"text",
"=",
"\"yes\"",
"else",
":",
"text",
"=",
"\"no\"",
"if",
"self",
".",
"did_show_result",
"==",
"0",
":",
"# Didn't show result yet, do it now.",
"self",
".",
"Display",
"(",
"text",
"+",
"\"\\n\"",
")",
"self",
".",
"did_show_result",
"=",
"1"
] |
Inform about the result of the test. If res is not a string, displays
'yes' or 'no' depending on whether res is evaluated as true or false.
The result is only displayed when self.did_show_result is not set.
|
[
"Inform",
"about",
"the",
"result",
"of",
"the",
"test",
".",
"If",
"res",
"is",
"not",
"a",
"string",
"displays",
"yes",
"or",
"no",
"depending",
"on",
"whether",
"res",
"is",
"evaluated",
"as",
"true",
"or",
"false",
".",
"The",
"result",
"is",
"only",
"displayed",
"when",
"self",
".",
"did_show_result",
"is",
"not",
"set",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConf.py#L795-L810
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/intelc.py
|
linux_ver_normalize
|
def linux_ver_normalize(vstr):
"""Normalize a Linux compiler version number.
Intel changed from "80" to "9.0" in 2005, so we assume if the number
is greater than 60 it's an old-style number and otherwise new-style.
Always returns an old-style float like 80 or 90 for compatibility with Windows.
Shades of Y2K!"""
# Check for version number like 9.1.026: return 91.026
# XXX needs to be updated for 2011+ versions (like 2011.11.344 which is compiler v12.1.5)
m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr)
if m:
vmaj,vmin,build = m.groups()
return float(vmaj) * 10. + float(vmin) + float(build) / 1000.;
else:
f = float(vstr)
if is_windows:
return f
else:
if f < 60: return f * 10.0
else: return f
|
python
|
def linux_ver_normalize(vstr):
"""Normalize a Linux compiler version number.
Intel changed from "80" to "9.0" in 2005, so we assume if the number
is greater than 60 it's an old-style number and otherwise new-style.
Always returns an old-style float like 80 or 90 for compatibility with Windows.
Shades of Y2K!"""
# Check for version number like 9.1.026: return 91.026
# XXX needs to be updated for 2011+ versions (like 2011.11.344 which is compiler v12.1.5)
m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr)
if m:
vmaj,vmin,build = m.groups()
return float(vmaj) * 10. + float(vmin) + float(build) / 1000.;
else:
f = float(vstr)
if is_windows:
return f
else:
if f < 60: return f * 10.0
else: return f
|
[
"def",
"linux_ver_normalize",
"(",
"vstr",
")",
":",
"# Check for version number like 9.1.026: return 91.026",
"# XXX needs to be updated for 2011+ versions (like 2011.11.344 which is compiler v12.1.5)",
"m",
"=",
"re",
".",
"match",
"(",
"r'([0-9]+)\\.([0-9]+)\\.([0-9]+)'",
",",
"vstr",
")",
"if",
"m",
":",
"vmaj",
",",
"vmin",
",",
"build",
"=",
"m",
".",
"groups",
"(",
")",
"return",
"float",
"(",
"vmaj",
")",
"*",
"10.",
"+",
"float",
"(",
"vmin",
")",
"+",
"float",
"(",
"build",
")",
"/",
"1000.",
"else",
":",
"f",
"=",
"float",
"(",
"vstr",
")",
"if",
"is_windows",
":",
"return",
"f",
"else",
":",
"if",
"f",
"<",
"60",
":",
"return",
"f",
"*",
"10.0",
"else",
":",
"return",
"f"
] |
Normalize a Linux compiler version number.
Intel changed from "80" to "9.0" in 2005, so we assume if the number
is greater than 60 it's an old-style number and otherwise new-style.
Always returns an old-style float like 80 or 90 for compatibility with Windows.
Shades of Y2K!
|
[
"Normalize",
"a",
"Linux",
"compiler",
"version",
"number",
".",
"Intel",
"changed",
"from",
"80",
"to",
"9",
".",
"0",
"in",
"2005",
"so",
"we",
"assume",
"if",
"the",
"number",
"is",
"greater",
"than",
"60",
"it",
"s",
"an",
"old",
"-",
"style",
"number",
"and",
"otherwise",
"new",
"-",
"style",
".",
"Always",
"returns",
"an",
"old",
"-",
"style",
"float",
"like",
"80",
"or",
"90",
"for",
"compatibility",
"with",
"Windows",
".",
"Shades",
"of",
"Y2K!"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/intelc.py#L64-L82
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/node_descriptor.py
|
parse_node_descriptor
|
def parse_node_descriptor(desc, model):
"""Parse a string node descriptor.
The function creates an SGNode object without connecting its inputs and outputs
and returns a 3-tuple:
SGNode, [(input X, trigger X)], <processing function name>
Args:
desc (str): A description of the node to be created.
model (str): A device model for the node to be created that sets any
device specific limits on how the node is set up.
"""
try:
data = graph_node.parseString(desc)
except ParseException:
raise # TODO: Fix this to properly encapsulate the parse error
stream_desc = u' '.join(data['node'])
stream = DataStream.FromString(stream_desc)
node = SGNode(stream, model)
inputs = []
if 'input_a' in data:
input_a = data['input_a']
stream_a = DataStreamSelector.FromString(u' '.join(input_a['input_stream']))
trigger_a = None
if 'type' in input_a:
trigger_a = InputTrigger(input_a['type'], input_a['op'], int(input_a['reference'], 0))
inputs.append((stream_a, trigger_a))
if 'input_b' in data:
input_a = data['input_b']
stream_a = DataStreamSelector.FromString(u' '.join(input_a['input_stream']))
trigger_a = None
if 'type' in input_a:
trigger_a = InputTrigger(input_a['type'], input_a['op'], int(input_a['reference'], 0))
inputs.append((stream_a, trigger_a))
if 'combiner' in data and str(data['combiner']) == u'||':
node.trigger_combiner = SGNode.OrTriggerCombiner
else:
node.trigger_combiner = SGNode.AndTriggerCombiner
processing = data['processor']
return node, inputs, processing
|
python
|
def parse_node_descriptor(desc, model):
"""Parse a string node descriptor.
The function creates an SGNode object without connecting its inputs and outputs
and returns a 3-tuple:
SGNode, [(input X, trigger X)], <processing function name>
Args:
desc (str): A description of the node to be created.
model (str): A device model for the node to be created that sets any
device specific limits on how the node is set up.
"""
try:
data = graph_node.parseString(desc)
except ParseException:
raise # TODO: Fix this to properly encapsulate the parse error
stream_desc = u' '.join(data['node'])
stream = DataStream.FromString(stream_desc)
node = SGNode(stream, model)
inputs = []
if 'input_a' in data:
input_a = data['input_a']
stream_a = DataStreamSelector.FromString(u' '.join(input_a['input_stream']))
trigger_a = None
if 'type' in input_a:
trigger_a = InputTrigger(input_a['type'], input_a['op'], int(input_a['reference'], 0))
inputs.append((stream_a, trigger_a))
if 'input_b' in data:
input_a = data['input_b']
stream_a = DataStreamSelector.FromString(u' '.join(input_a['input_stream']))
trigger_a = None
if 'type' in input_a:
trigger_a = InputTrigger(input_a['type'], input_a['op'], int(input_a['reference'], 0))
inputs.append((stream_a, trigger_a))
if 'combiner' in data and str(data['combiner']) == u'||':
node.trigger_combiner = SGNode.OrTriggerCombiner
else:
node.trigger_combiner = SGNode.AndTriggerCombiner
processing = data['processor']
return node, inputs, processing
|
[
"def",
"parse_node_descriptor",
"(",
"desc",
",",
"model",
")",
":",
"try",
":",
"data",
"=",
"graph_node",
".",
"parseString",
"(",
"desc",
")",
"except",
"ParseException",
":",
"raise",
"# TODO: Fix this to properly encapsulate the parse error",
"stream_desc",
"=",
"u' '",
".",
"join",
"(",
"data",
"[",
"'node'",
"]",
")",
"stream",
"=",
"DataStream",
".",
"FromString",
"(",
"stream_desc",
")",
"node",
"=",
"SGNode",
"(",
"stream",
",",
"model",
")",
"inputs",
"=",
"[",
"]",
"if",
"'input_a'",
"in",
"data",
":",
"input_a",
"=",
"data",
"[",
"'input_a'",
"]",
"stream_a",
"=",
"DataStreamSelector",
".",
"FromString",
"(",
"u' '",
".",
"join",
"(",
"input_a",
"[",
"'input_stream'",
"]",
")",
")",
"trigger_a",
"=",
"None",
"if",
"'type'",
"in",
"input_a",
":",
"trigger_a",
"=",
"InputTrigger",
"(",
"input_a",
"[",
"'type'",
"]",
",",
"input_a",
"[",
"'op'",
"]",
",",
"int",
"(",
"input_a",
"[",
"'reference'",
"]",
",",
"0",
")",
")",
"inputs",
".",
"append",
"(",
"(",
"stream_a",
",",
"trigger_a",
")",
")",
"if",
"'input_b'",
"in",
"data",
":",
"input_a",
"=",
"data",
"[",
"'input_b'",
"]",
"stream_a",
"=",
"DataStreamSelector",
".",
"FromString",
"(",
"u' '",
".",
"join",
"(",
"input_a",
"[",
"'input_stream'",
"]",
")",
")",
"trigger_a",
"=",
"None",
"if",
"'type'",
"in",
"input_a",
":",
"trigger_a",
"=",
"InputTrigger",
"(",
"input_a",
"[",
"'type'",
"]",
",",
"input_a",
"[",
"'op'",
"]",
",",
"int",
"(",
"input_a",
"[",
"'reference'",
"]",
",",
"0",
")",
")",
"inputs",
".",
"append",
"(",
"(",
"stream_a",
",",
"trigger_a",
")",
")",
"if",
"'combiner'",
"in",
"data",
"and",
"str",
"(",
"data",
"[",
"'combiner'",
"]",
")",
"==",
"u'||'",
":",
"node",
".",
"trigger_combiner",
"=",
"SGNode",
".",
"OrTriggerCombiner",
"else",
":",
"node",
".",
"trigger_combiner",
"=",
"SGNode",
".",
"AndTriggerCombiner",
"processing",
"=",
"data",
"[",
"'processor'",
"]",
"return",
"node",
",",
"inputs",
",",
"processing"
] |
Parse a string node descriptor.
The function creates an SGNode object without connecting its inputs and outputs
and returns a 3-tuple:
SGNode, [(input X, trigger X)], <processing function name>
Args:
desc (str): A description of the node to be created.
model (str): A device model for the node to be created that sets any
device specific limits on how the node is set up.
|
[
"Parse",
"a",
"string",
"node",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/node_descriptor.py#L32-L84
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/node_descriptor.py
|
create_binary_descriptor
|
def create_binary_descriptor(descriptor):
"""Convert a string node descriptor into a 20-byte binary descriptor.
This is the inverse operation of parse_binary_descriptor and composing
the two operations is a noop.
Args:
descriptor (str): A string node descriptor
Returns:
bytes: A 20-byte binary node descriptor.
"""
func_names = {0: 'copy_latest_a', 1: 'average_a',
2: 'copy_all_a', 3: 'sum_a',
4: 'copy_count_a', 5: 'trigger_streamer',
6: 'call_rpc', 7: 'subtract_afromb'}
func_codes = {y: x for x, y in func_names.items()}
node, inputs, processing = parse_node_descriptor(descriptor, DeviceModel())
func_code = func_codes.get(processing)
if func_code is None:
raise ArgumentError("Unknown processing function", function=processing)
stream_a, trigger_a = inputs[0]
stream_a = stream_a.encode()
if len(inputs) == 2:
stream_b, trigger_b = inputs[1]
stream_b = stream_b.encode()
else:
stream_b, trigger_b = 0xFFFF, None
if trigger_a is None:
trigger_a = TrueTrigger()
if trigger_b is None:
trigger_b = TrueTrigger()
ref_a = 0
if isinstance(trigger_a, InputTrigger):
ref_a = trigger_a.reference
ref_b = 0
if isinstance(trigger_b, InputTrigger):
ref_b = trigger_b.reference
trigger_a = _create_binary_trigger(trigger_a)
trigger_b = _create_binary_trigger(trigger_b)
combiner = node.trigger_combiner
bin_desc = struct.pack("<LLHHHBBBB2x", ref_a, ref_b, node.stream.encode(), stream_a, stream_b, func_code, trigger_a, trigger_b, combiner)
return bin_desc
|
python
|
def create_binary_descriptor(descriptor):
"""Convert a string node descriptor into a 20-byte binary descriptor.
This is the inverse operation of parse_binary_descriptor and composing
the two operations is a noop.
Args:
descriptor (str): A string node descriptor
Returns:
bytes: A 20-byte binary node descriptor.
"""
func_names = {0: 'copy_latest_a', 1: 'average_a',
2: 'copy_all_a', 3: 'sum_a',
4: 'copy_count_a', 5: 'trigger_streamer',
6: 'call_rpc', 7: 'subtract_afromb'}
func_codes = {y: x for x, y in func_names.items()}
node, inputs, processing = parse_node_descriptor(descriptor, DeviceModel())
func_code = func_codes.get(processing)
if func_code is None:
raise ArgumentError("Unknown processing function", function=processing)
stream_a, trigger_a = inputs[0]
stream_a = stream_a.encode()
if len(inputs) == 2:
stream_b, trigger_b = inputs[1]
stream_b = stream_b.encode()
else:
stream_b, trigger_b = 0xFFFF, None
if trigger_a is None:
trigger_a = TrueTrigger()
if trigger_b is None:
trigger_b = TrueTrigger()
ref_a = 0
if isinstance(trigger_a, InputTrigger):
ref_a = trigger_a.reference
ref_b = 0
if isinstance(trigger_b, InputTrigger):
ref_b = trigger_b.reference
trigger_a = _create_binary_trigger(trigger_a)
trigger_b = _create_binary_trigger(trigger_b)
combiner = node.trigger_combiner
bin_desc = struct.pack("<LLHHHBBBB2x", ref_a, ref_b, node.stream.encode(), stream_a, stream_b, func_code, trigger_a, trigger_b, combiner)
return bin_desc
|
[
"def",
"create_binary_descriptor",
"(",
"descriptor",
")",
":",
"func_names",
"=",
"{",
"0",
":",
"'copy_latest_a'",
",",
"1",
":",
"'average_a'",
",",
"2",
":",
"'copy_all_a'",
",",
"3",
":",
"'sum_a'",
",",
"4",
":",
"'copy_count_a'",
",",
"5",
":",
"'trigger_streamer'",
",",
"6",
":",
"'call_rpc'",
",",
"7",
":",
"'subtract_afromb'",
"}",
"func_codes",
"=",
"{",
"y",
":",
"x",
"for",
"x",
",",
"y",
"in",
"func_names",
".",
"items",
"(",
")",
"}",
"node",
",",
"inputs",
",",
"processing",
"=",
"parse_node_descriptor",
"(",
"descriptor",
",",
"DeviceModel",
"(",
")",
")",
"func_code",
"=",
"func_codes",
".",
"get",
"(",
"processing",
")",
"if",
"func_code",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown processing function\"",
",",
"function",
"=",
"processing",
")",
"stream_a",
",",
"trigger_a",
"=",
"inputs",
"[",
"0",
"]",
"stream_a",
"=",
"stream_a",
".",
"encode",
"(",
")",
"if",
"len",
"(",
"inputs",
")",
"==",
"2",
":",
"stream_b",
",",
"trigger_b",
"=",
"inputs",
"[",
"1",
"]",
"stream_b",
"=",
"stream_b",
".",
"encode",
"(",
")",
"else",
":",
"stream_b",
",",
"trigger_b",
"=",
"0xFFFF",
",",
"None",
"if",
"trigger_a",
"is",
"None",
":",
"trigger_a",
"=",
"TrueTrigger",
"(",
")",
"if",
"trigger_b",
"is",
"None",
":",
"trigger_b",
"=",
"TrueTrigger",
"(",
")",
"ref_a",
"=",
"0",
"if",
"isinstance",
"(",
"trigger_a",
",",
"InputTrigger",
")",
":",
"ref_a",
"=",
"trigger_a",
".",
"reference",
"ref_b",
"=",
"0",
"if",
"isinstance",
"(",
"trigger_b",
",",
"InputTrigger",
")",
":",
"ref_b",
"=",
"trigger_b",
".",
"reference",
"trigger_a",
"=",
"_create_binary_trigger",
"(",
"trigger_a",
")",
"trigger_b",
"=",
"_create_binary_trigger",
"(",
"trigger_b",
")",
"combiner",
"=",
"node",
".",
"trigger_combiner",
"bin_desc",
"=",
"struct",
".",
"pack",
"(",
"\"<LLHHHBBBB2x\"",
",",
"ref_a",
",",
"ref_b",
",",
"node",
".",
"stream",
".",
"encode",
"(",
")",
",",
"stream_a",
",",
"stream_b",
",",
"func_code",
",",
"trigger_a",
",",
"trigger_b",
",",
"combiner",
")",
"return",
"bin_desc"
] |
Convert a string node descriptor into a 20-byte binary descriptor.
This is the inverse operation of parse_binary_descriptor and composing
the two operations is a noop.
Args:
descriptor (str): A string node descriptor
Returns:
bytes: A 20-byte binary node descriptor.
|
[
"Convert",
"a",
"string",
"node",
"descriptor",
"into",
"a",
"20",
"-",
"byte",
"binary",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/node_descriptor.py#L87-L142
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/node_descriptor.py
|
parse_binary_descriptor
|
def parse_binary_descriptor(bindata):
"""Convert a binary node descriptor into a string descriptor.
Binary node descriptor are 20-byte binary structures that encode all
information needed to create a graph node. They are used to communicate
that information to an embedded device in an efficent format. This
function exists to turn such a compressed node description back into
an understandable string.
Args:
bindata (bytes): The raw binary structure that contains the node
description.
Returns:
str: The corresponding string description of the same sensor_graph node
"""
func_names = {0: 'copy_latest_a', 1: 'average_a',
2: 'copy_all_a', 3: 'sum_a',
4: 'copy_count_a', 5: 'trigger_streamer',
6: 'call_rpc', 7: 'subtract_afromb'}
if len(bindata) != 20:
raise ArgumentError("Invalid binary node descriptor with incorrect size", size=len(bindata), expected=20, bindata=bindata)
a_trig, b_trig, stream_id, a_id, b_id, proc, a_cond, b_cond, trig_combiner = struct.unpack("<LLHHHBBBB2x", bindata)
node_stream = DataStream.FromEncoded(stream_id)
if a_id == 0xFFFF:
raise ArgumentError("Invalid binary node descriptor with invalid first input", input_selector=a_id)
a_selector = DataStreamSelector.FromEncoded(a_id)
a_trigger = _process_binary_trigger(a_trig, a_cond)
b_selector = None
b_trigger = None
if b_id != 0xFFFF:
b_selector = DataStreamSelector.FromEncoded(b_id)
b_trigger = _process_binary_trigger(b_trig, b_cond)
if trig_combiner == SGNode.AndTriggerCombiner:
comb = '&&'
elif trig_combiner == SGNode.OrTriggerCombiner:
comb = '||'
else:
raise ArgumentError("Invalid trigger combiner in binary node descriptor", combiner=trig_combiner)
if proc not in func_names:
raise ArgumentError("Unknown processing function", function_id=proc, known_functions=func_names)
func_name = func_names[proc]
# Handle one input nodes
if b_selector is None:
return '({} {}) => {} using {}'.format(a_selector, a_trigger, node_stream, func_name)
return '({} {} {} {} {}) => {} using {}'.format(a_selector, a_trigger, comb,
b_selector, b_trigger,
node_stream, func_name)
|
python
|
def parse_binary_descriptor(bindata):
"""Convert a binary node descriptor into a string descriptor.
Binary node descriptor are 20-byte binary structures that encode all
information needed to create a graph node. They are used to communicate
that information to an embedded device in an efficent format. This
function exists to turn such a compressed node description back into
an understandable string.
Args:
bindata (bytes): The raw binary structure that contains the node
description.
Returns:
str: The corresponding string description of the same sensor_graph node
"""
func_names = {0: 'copy_latest_a', 1: 'average_a',
2: 'copy_all_a', 3: 'sum_a',
4: 'copy_count_a', 5: 'trigger_streamer',
6: 'call_rpc', 7: 'subtract_afromb'}
if len(bindata) != 20:
raise ArgumentError("Invalid binary node descriptor with incorrect size", size=len(bindata), expected=20, bindata=bindata)
a_trig, b_trig, stream_id, a_id, b_id, proc, a_cond, b_cond, trig_combiner = struct.unpack("<LLHHHBBBB2x", bindata)
node_stream = DataStream.FromEncoded(stream_id)
if a_id == 0xFFFF:
raise ArgumentError("Invalid binary node descriptor with invalid first input", input_selector=a_id)
a_selector = DataStreamSelector.FromEncoded(a_id)
a_trigger = _process_binary_trigger(a_trig, a_cond)
b_selector = None
b_trigger = None
if b_id != 0xFFFF:
b_selector = DataStreamSelector.FromEncoded(b_id)
b_trigger = _process_binary_trigger(b_trig, b_cond)
if trig_combiner == SGNode.AndTriggerCombiner:
comb = '&&'
elif trig_combiner == SGNode.OrTriggerCombiner:
comb = '||'
else:
raise ArgumentError("Invalid trigger combiner in binary node descriptor", combiner=trig_combiner)
if proc not in func_names:
raise ArgumentError("Unknown processing function", function_id=proc, known_functions=func_names)
func_name = func_names[proc]
# Handle one input nodes
if b_selector is None:
return '({} {}) => {} using {}'.format(a_selector, a_trigger, node_stream, func_name)
return '({} {} {} {} {}) => {} using {}'.format(a_selector, a_trigger, comb,
b_selector, b_trigger,
node_stream, func_name)
|
[
"def",
"parse_binary_descriptor",
"(",
"bindata",
")",
":",
"func_names",
"=",
"{",
"0",
":",
"'copy_latest_a'",
",",
"1",
":",
"'average_a'",
",",
"2",
":",
"'copy_all_a'",
",",
"3",
":",
"'sum_a'",
",",
"4",
":",
"'copy_count_a'",
",",
"5",
":",
"'trigger_streamer'",
",",
"6",
":",
"'call_rpc'",
",",
"7",
":",
"'subtract_afromb'",
"}",
"if",
"len",
"(",
"bindata",
")",
"!=",
"20",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid binary node descriptor with incorrect size\"",
",",
"size",
"=",
"len",
"(",
"bindata",
")",
",",
"expected",
"=",
"20",
",",
"bindata",
"=",
"bindata",
")",
"a_trig",
",",
"b_trig",
",",
"stream_id",
",",
"a_id",
",",
"b_id",
",",
"proc",
",",
"a_cond",
",",
"b_cond",
",",
"trig_combiner",
"=",
"struct",
".",
"unpack",
"(",
"\"<LLHHHBBBB2x\"",
",",
"bindata",
")",
"node_stream",
"=",
"DataStream",
".",
"FromEncoded",
"(",
"stream_id",
")",
"if",
"a_id",
"==",
"0xFFFF",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid binary node descriptor with invalid first input\"",
",",
"input_selector",
"=",
"a_id",
")",
"a_selector",
"=",
"DataStreamSelector",
".",
"FromEncoded",
"(",
"a_id",
")",
"a_trigger",
"=",
"_process_binary_trigger",
"(",
"a_trig",
",",
"a_cond",
")",
"b_selector",
"=",
"None",
"b_trigger",
"=",
"None",
"if",
"b_id",
"!=",
"0xFFFF",
":",
"b_selector",
"=",
"DataStreamSelector",
".",
"FromEncoded",
"(",
"b_id",
")",
"b_trigger",
"=",
"_process_binary_trigger",
"(",
"b_trig",
",",
"b_cond",
")",
"if",
"trig_combiner",
"==",
"SGNode",
".",
"AndTriggerCombiner",
":",
"comb",
"=",
"'&&'",
"elif",
"trig_combiner",
"==",
"SGNode",
".",
"OrTriggerCombiner",
":",
"comb",
"=",
"'||'",
"else",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid trigger combiner in binary node descriptor\"",
",",
"combiner",
"=",
"trig_combiner",
")",
"if",
"proc",
"not",
"in",
"func_names",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown processing function\"",
",",
"function_id",
"=",
"proc",
",",
"known_functions",
"=",
"func_names",
")",
"func_name",
"=",
"func_names",
"[",
"proc",
"]",
"# Handle one input nodes",
"if",
"b_selector",
"is",
"None",
":",
"return",
"'({} {}) => {} using {}'",
".",
"format",
"(",
"a_selector",
",",
"a_trigger",
",",
"node_stream",
",",
"func_name",
")",
"return",
"'({} {} {} {} {}) => {} using {}'",
".",
"format",
"(",
"a_selector",
",",
"a_trigger",
",",
"comb",
",",
"b_selector",
",",
"b_trigger",
",",
"node_stream",
",",
"func_name",
")"
] |
Convert a binary node descriptor into a string descriptor.
Binary node descriptor are 20-byte binary structures that encode all
information needed to create a graph node. They are used to communicate
that information to an embedded device in an efficent format. This
function exists to turn such a compressed node description back into
an understandable string.
Args:
bindata (bytes): The raw binary structure that contains the node
description.
Returns:
str: The corresponding string description of the same sensor_graph node
|
[
"Convert",
"a",
"binary",
"node",
"descriptor",
"into",
"a",
"string",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/node_descriptor.py#L145-L204
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/node_descriptor.py
|
_process_binary_trigger
|
def _process_binary_trigger(trigger_value, condition):
"""Create an InputTrigger object."""
ops = {
0: ">",
1: "<",
2: ">=",
3: "<=",
4: "==",
5: 'always'
}
sources = {
0: 'value',
1: 'count'
}
encoded_source = condition & 0b1
encoded_op = condition >> 1
oper = ops.get(encoded_op, None)
source = sources.get(encoded_source, None)
if oper is None:
raise ArgumentError("Unknown operation in binary trigger", condition=condition, operation=encoded_op, known_ops=ops)
if source is None:
raise ArgumentError("Unknown value source in binary trigger", source=source, known_sources=sources)
if oper == 'always':
return TrueTrigger()
return InputTrigger(source, oper, trigger_value)
|
python
|
def _process_binary_trigger(trigger_value, condition):
"""Create an InputTrigger object."""
ops = {
0: ">",
1: "<",
2: ">=",
3: "<=",
4: "==",
5: 'always'
}
sources = {
0: 'value',
1: 'count'
}
encoded_source = condition & 0b1
encoded_op = condition >> 1
oper = ops.get(encoded_op, None)
source = sources.get(encoded_source, None)
if oper is None:
raise ArgumentError("Unknown operation in binary trigger", condition=condition, operation=encoded_op, known_ops=ops)
if source is None:
raise ArgumentError("Unknown value source in binary trigger", source=source, known_sources=sources)
if oper == 'always':
return TrueTrigger()
return InputTrigger(source, oper, trigger_value)
|
[
"def",
"_process_binary_trigger",
"(",
"trigger_value",
",",
"condition",
")",
":",
"ops",
"=",
"{",
"0",
":",
"\">\"",
",",
"1",
":",
"\"<\"",
",",
"2",
":",
"\">=\"",
",",
"3",
":",
"\"<=\"",
",",
"4",
":",
"\"==\"",
",",
"5",
":",
"'always'",
"}",
"sources",
"=",
"{",
"0",
":",
"'value'",
",",
"1",
":",
"'count'",
"}",
"encoded_source",
"=",
"condition",
"&",
"0b1",
"encoded_op",
"=",
"condition",
">>",
"1",
"oper",
"=",
"ops",
".",
"get",
"(",
"encoded_op",
",",
"None",
")",
"source",
"=",
"sources",
".",
"get",
"(",
"encoded_source",
",",
"None",
")",
"if",
"oper",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown operation in binary trigger\"",
",",
"condition",
"=",
"condition",
",",
"operation",
"=",
"encoded_op",
",",
"known_ops",
"=",
"ops",
")",
"if",
"source",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown value source in binary trigger\"",
",",
"source",
"=",
"source",
",",
"known_sources",
"=",
"sources",
")",
"if",
"oper",
"==",
"'always'",
":",
"return",
"TrueTrigger",
"(",
")",
"return",
"InputTrigger",
"(",
"source",
",",
"oper",
",",
"trigger_value",
")"
] |
Create an InputTrigger object.
|
[
"Create",
"an",
"InputTrigger",
"object",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/node_descriptor.py#L207-L238
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/node_descriptor.py
|
_create_binary_trigger
|
def _create_binary_trigger(trigger):
"""Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger."""
ops = {
0: ">",
1: "<",
2: ">=",
3: "<=",
4: "==",
5: 'always'
}
op_codes = {y: x for x, y in ops.items()}
source = 0
if isinstance(trigger, TrueTrigger):
op_code = op_codes['always']
elif isinstance(trigger, FalseTrigger):
raise ArgumentError("Cannot express a never trigger in binary descriptor", trigger=trigger)
else:
op_code = op_codes[trigger.comp_string]
if trigger.use_count:
source = 1
return (op_code << 1) | source
|
python
|
def _create_binary_trigger(trigger):
"""Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger."""
ops = {
0: ">",
1: "<",
2: ">=",
3: "<=",
4: "==",
5: 'always'
}
op_codes = {y: x for x, y in ops.items()}
source = 0
if isinstance(trigger, TrueTrigger):
op_code = op_codes['always']
elif isinstance(trigger, FalseTrigger):
raise ArgumentError("Cannot express a never trigger in binary descriptor", trigger=trigger)
else:
op_code = op_codes[trigger.comp_string]
if trigger.use_count:
source = 1
return (op_code << 1) | source
|
[
"def",
"_create_binary_trigger",
"(",
"trigger",
")",
":",
"ops",
"=",
"{",
"0",
":",
"\">\"",
",",
"1",
":",
"\"<\"",
",",
"2",
":",
"\">=\"",
",",
"3",
":",
"\"<=\"",
",",
"4",
":",
"\"==\"",
",",
"5",
":",
"'always'",
"}",
"op_codes",
"=",
"{",
"y",
":",
"x",
"for",
"x",
",",
"y",
"in",
"ops",
".",
"items",
"(",
")",
"}",
"source",
"=",
"0",
"if",
"isinstance",
"(",
"trigger",
",",
"TrueTrigger",
")",
":",
"op_code",
"=",
"op_codes",
"[",
"'always'",
"]",
"elif",
"isinstance",
"(",
"trigger",
",",
"FalseTrigger",
")",
":",
"raise",
"ArgumentError",
"(",
"\"Cannot express a never trigger in binary descriptor\"",
",",
"trigger",
"=",
"trigger",
")",
"else",
":",
"op_code",
"=",
"op_codes",
"[",
"trigger",
".",
"comp_string",
"]",
"if",
"trigger",
".",
"use_count",
":",
"source",
"=",
"1",
"return",
"(",
"op_code",
"<<",
"1",
")",
"|",
"source"
] |
Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger.
|
[
"Create",
"an",
"8",
"-",
"bit",
"binary",
"trigger",
"from",
"an",
"InputTrigger",
"TrueTrigger",
"FalseTrigger",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/node_descriptor.py#L241-L266
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/reports/report.py
|
IOTileReading._try_assign_utc_time
|
def _try_assign_utc_time(self, raw_time, time_base):
"""Try to assign a UTC time to this reading."""
# Check if the raw time is encoded UTC since y2k or just uptime
if raw_time != IOTileEvent.InvalidRawTime and (raw_time & (1 << 31)):
y2k_offset = self.raw_time ^ (1 << 31)
return self._Y2KReference + datetime.timedelta(seconds=y2k_offset)
if time_base is not None:
return time_base + datetime.timedelta(seconds=raw_time)
return None
|
python
|
def _try_assign_utc_time(self, raw_time, time_base):
"""Try to assign a UTC time to this reading."""
# Check if the raw time is encoded UTC since y2k or just uptime
if raw_time != IOTileEvent.InvalidRawTime and (raw_time & (1 << 31)):
y2k_offset = self.raw_time ^ (1 << 31)
return self._Y2KReference + datetime.timedelta(seconds=y2k_offset)
if time_base is not None:
return time_base + datetime.timedelta(seconds=raw_time)
return None
|
[
"def",
"_try_assign_utc_time",
"(",
"self",
",",
"raw_time",
",",
"time_base",
")",
":",
"# Check if the raw time is encoded UTC since y2k or just uptime",
"if",
"raw_time",
"!=",
"IOTileEvent",
".",
"InvalidRawTime",
"and",
"(",
"raw_time",
"&",
"(",
"1",
"<<",
"31",
")",
")",
":",
"y2k_offset",
"=",
"self",
".",
"raw_time",
"^",
"(",
"1",
"<<",
"31",
")",
"return",
"self",
".",
"_Y2KReference",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"y2k_offset",
")",
"if",
"time_base",
"is",
"not",
"None",
":",
"return",
"time_base",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"raw_time",
")",
"return",
"None"
] |
Try to assign a UTC time to this reading.
|
[
"Try",
"to",
"assign",
"a",
"UTC",
"time",
"to",
"this",
"reading",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/report.py#L52-L63
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/reports/report.py
|
IOTileReading.asdict
|
def asdict(self):
"""Encode the data in this reading into a dictionary.
Returns:
dict: A dictionary containing the information from this reading.
"""
timestamp_str = None
if self.reading_time is not None:
timestamp_str = self.reading_time.isoformat()
return {
'stream': self.stream,
'device_timestamp': self.raw_time,
'streamer_local_id': self.reading_id,
'timestamp': timestamp_str,
'value': self.value
}
|
python
|
def asdict(self):
"""Encode the data in this reading into a dictionary.
Returns:
dict: A dictionary containing the information from this reading.
"""
timestamp_str = None
if self.reading_time is not None:
timestamp_str = self.reading_time.isoformat()
return {
'stream': self.stream,
'device_timestamp': self.raw_time,
'streamer_local_id': self.reading_id,
'timestamp': timestamp_str,
'value': self.value
}
|
[
"def",
"asdict",
"(",
"self",
")",
":",
"timestamp_str",
"=",
"None",
"if",
"self",
".",
"reading_time",
"is",
"not",
"None",
":",
"timestamp_str",
"=",
"self",
".",
"reading_time",
".",
"isoformat",
"(",
")",
"return",
"{",
"'stream'",
":",
"self",
".",
"stream",
",",
"'device_timestamp'",
":",
"self",
".",
"raw_time",
",",
"'streamer_local_id'",
":",
"self",
".",
"reading_id",
",",
"'timestamp'",
":",
"timestamp_str",
",",
"'value'",
":",
"self",
".",
"value",
"}"
] |
Encode the data in this reading into a dictionary.
Returns:
dict: A dictionary containing the information from this reading.
|
[
"Encode",
"the",
"data",
"in",
"this",
"reading",
"into",
"a",
"dictionary",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/report.py#L65-L82
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/reports/report.py
|
IOTileEvent.asdict
|
def asdict(self):
"""Encode the data in this event into a dictionary.
The dictionary returned from this method is a reference to the data
stored in the IOTileEvent, not a copy. It should be treated as read
only.
Returns:
dict: A dictionary containing the information from this event.
"""
return {
'stream': self.stream,
'device_timestamp': self.raw_time,
'streamer_local_id': self.reading_id,
'timestamp': self.reading_time,
'extra_data': self.summary_data,
'data': self.raw_data
}
|
python
|
def asdict(self):
"""Encode the data in this event into a dictionary.
The dictionary returned from this method is a reference to the data
stored in the IOTileEvent, not a copy. It should be treated as read
only.
Returns:
dict: A dictionary containing the information from this event.
"""
return {
'stream': self.stream,
'device_timestamp': self.raw_time,
'streamer_local_id': self.reading_id,
'timestamp': self.reading_time,
'extra_data': self.summary_data,
'data': self.raw_data
}
|
[
"def",
"asdict",
"(",
"self",
")",
":",
"return",
"{",
"'stream'",
":",
"self",
".",
"stream",
",",
"'device_timestamp'",
":",
"self",
".",
"raw_time",
",",
"'streamer_local_id'",
":",
"self",
".",
"reading_id",
",",
"'timestamp'",
":",
"self",
".",
"reading_time",
",",
"'extra_data'",
":",
"self",
".",
"summary_data",
",",
"'data'",
":",
"self",
".",
"raw_data",
"}"
] |
Encode the data in this event into a dictionary.
The dictionary returned from this method is a reference to the data
stored in the IOTileEvent, not a copy. It should be treated as read
only.
Returns:
dict: A dictionary containing the information from this event.
|
[
"Encode",
"the",
"data",
"in",
"this",
"event",
"into",
"a",
"dictionary",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/report.py#L163-L181
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/reports/report.py
|
IOTileReport.save
|
def save(self, path):
"""Save a binary copy of this report
Args:
path (string): The path where we should save the binary copy of the report
"""
data = self.encode()
with open(path, "wb") as out:
out.write(data)
|
python
|
def save(self, path):
"""Save a binary copy of this report
Args:
path (string): The path where we should save the binary copy of the report
"""
data = self.encode()
with open(path, "wb") as out:
out.write(data)
|
[
"def",
"save",
"(",
"self",
",",
"path",
")",
":",
"data",
"=",
"self",
".",
"encode",
"(",
")",
"with",
"open",
"(",
"path",
",",
"\"wb\"",
")",
"as",
"out",
":",
"out",
".",
"write",
"(",
"data",
")"
] |
Save a binary copy of this report
Args:
path (string): The path where we should save the binary copy of the report
|
[
"Save",
"a",
"binary",
"copy",
"of",
"this",
"report"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/report.py#L294-L304
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/reports/report.py
|
IOTileReport.serialize
|
def serialize(self):
"""Turn this report into a dictionary that encodes all information including received timestamp"""
info = {}
info['received_time'] = self.received_time
info['encoded_report'] = bytes(self.encode())
# Handle python 2 / python 3 differences
report_format = info['encoded_report'][0]
if not isinstance(report_format, int):
report_format = ord(report_format)
info['report_format'] = report_format # Report format is the first byte of the encoded report
info['origin'] = self.origin
return info
|
python
|
def serialize(self):
"""Turn this report into a dictionary that encodes all information including received timestamp"""
info = {}
info['received_time'] = self.received_time
info['encoded_report'] = bytes(self.encode())
# Handle python 2 / python 3 differences
report_format = info['encoded_report'][0]
if not isinstance(report_format, int):
report_format = ord(report_format)
info['report_format'] = report_format # Report format is the first byte of the encoded report
info['origin'] = self.origin
return info
|
[
"def",
"serialize",
"(",
"self",
")",
":",
"info",
"=",
"{",
"}",
"info",
"[",
"'received_time'",
"]",
"=",
"self",
".",
"received_time",
"info",
"[",
"'encoded_report'",
"]",
"=",
"bytes",
"(",
"self",
".",
"encode",
"(",
")",
")",
"# Handle python 2 / python 3 differences",
"report_format",
"=",
"info",
"[",
"'encoded_report'",
"]",
"[",
"0",
"]",
"if",
"not",
"isinstance",
"(",
"report_format",
",",
"int",
")",
":",
"report_format",
"=",
"ord",
"(",
"report_format",
")",
"info",
"[",
"'report_format'",
"]",
"=",
"report_format",
"# Report format is the first byte of the encoded report",
"info",
"[",
"'origin'",
"]",
"=",
"self",
".",
"origin",
"return",
"info"
] |
Turn this report into a dictionary that encodes all information including received timestamp
|
[
"Turn",
"this",
"report",
"into",
"a",
"dictionary",
"that",
"encodes",
"all",
"information",
"including",
"received",
"timestamp"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/report.py#L306-L320
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Node/Alias.py
|
Alias.get_contents
|
def get_contents(self):
"""The contents of an alias is the concatenation
of the content signatures of all its sources."""
childsigs = [n.get_csig() for n in self.children()]
return ''.join(childsigs)
|
python
|
def get_contents(self):
"""The contents of an alias is the concatenation
of the content signatures of all its sources."""
childsigs = [n.get_csig() for n in self.children()]
return ''.join(childsigs)
|
[
"def",
"get_contents",
"(",
"self",
")",
":",
"childsigs",
"=",
"[",
"n",
".",
"get_csig",
"(",
")",
"for",
"n",
"in",
"self",
".",
"children",
"(",
")",
"]",
"return",
"''",
".",
"join",
"(",
"childsigs",
")"
] |
The contents of an alias is the concatenation
of the content signatures of all its sources.
|
[
"The",
"contents",
"of",
"an",
"alias",
"is",
"the",
"concatenation",
"of",
"the",
"content",
"signatures",
"of",
"all",
"its",
"sources",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Node/Alias.py#L130-L134
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cxx.py
|
generate
|
def generate(env):
"""
Add Builders and construction variables for Visual Age C++ compilers
to an Environment.
"""
import SCons.Tool
import SCons.Tool.cc
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
SCons.Tool.cc.add_common_cc_variables(env)
if 'CXX' not in env:
env['CXX'] = env.Detect(compilers) or compilers[0]
env['CXXFLAGS'] = SCons.Util.CLVar('')
env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['OBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CXXFILESUFFIX'] = '.cc'
|
python
|
def generate(env):
"""
Add Builders and construction variables for Visual Age C++ compilers
to an Environment.
"""
import SCons.Tool
import SCons.Tool.cc
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
SCons.Tool.cc.add_common_cc_variables(env)
if 'CXX' not in env:
env['CXX'] = env.Detect(compilers) or compilers[0]
env['CXXFLAGS'] = SCons.Util.CLVar('')
env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['OBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CXXFILESUFFIX'] = '.cc'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"import",
"SCons",
".",
"Tool",
"import",
"SCons",
".",
"Tool",
".",
"cc",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"for",
"suffix",
"in",
"CXXSuffixes",
":",
"static_obj",
".",
"add_action",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"CXXAction",
")",
"shared_obj",
".",
"add_action",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"ShCXXAction",
")",
"static_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"StaticObjectEmitter",
")",
"shared_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"SharedObjectEmitter",
")",
"SCons",
".",
"Tool",
".",
"cc",
".",
"add_common_cc_variables",
"(",
"env",
")",
"if",
"'CXX'",
"not",
"in",
"env",
":",
"env",
"[",
"'CXX'",
"]",
"=",
"env",
".",
"Detect",
"(",
"compilers",
")",
"or",
"compilers",
"[",
"0",
"]",
"env",
"[",
"'CXXFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env",
"[",
"'CXXCOM'",
"]",
"=",
"'$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'",
"env",
"[",
"'SHCXX'",
"]",
"=",
"'$CXX'",
"env",
"[",
"'SHCXXFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CXXFLAGS'",
")",
"env",
"[",
"'SHCXXCOM'",
"]",
"=",
"'$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'",
"env",
"[",
"'CPPDEFPREFIX'",
"]",
"=",
"'-D'",
"env",
"[",
"'CPPDEFSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'INCPREFIX'",
"]",
"=",
"'-I'",
"env",
"[",
"'INCSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'SHOBJSUFFIX'",
"]",
"=",
"'.os'",
"env",
"[",
"'OBJSUFFIX'",
"]",
"=",
"'.o'",
"env",
"[",
"'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'",
"]",
"=",
"0",
"env",
"[",
"'CXXFILESUFFIX'",
"]",
"=",
"'.cc'"
] |
Add Builders and construction variables for Visual Age C++ compilers
to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"Visual",
"Age",
"C",
"++",
"compilers",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cxx.py#L58-L91
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/streamer.py
|
DataStreamer.link_to_storage
|
def link_to_storage(self, sensor_log):
"""Attach this DataStreamer to an underlying SensorLog.
Calling this method is required if you want to use this DataStreamer
to generate reports from the underlying data in the SensorLog.
You can call it multiple times and it will unlink itself from any
previous SensorLog each time.
Args:
sensor_log (SensorLog): Actually create a StreamWalker to go along with this
streamer so that we can check if it's triggered.
"""
if self.walker is not None:
self._sensor_log.destroy_walker(self.walker)
self.walker = None
self.walker = sensor_log.create_walker(self.selector)
self._sensor_log = sensor_log
|
python
|
def link_to_storage(self, sensor_log):
"""Attach this DataStreamer to an underlying SensorLog.
Calling this method is required if you want to use this DataStreamer
to generate reports from the underlying data in the SensorLog.
You can call it multiple times and it will unlink itself from any
previous SensorLog each time.
Args:
sensor_log (SensorLog): Actually create a StreamWalker to go along with this
streamer so that we can check if it's triggered.
"""
if self.walker is not None:
self._sensor_log.destroy_walker(self.walker)
self.walker = None
self.walker = sensor_log.create_walker(self.selector)
self._sensor_log = sensor_log
|
[
"def",
"link_to_storage",
"(",
"self",
",",
"sensor_log",
")",
":",
"if",
"self",
".",
"walker",
"is",
"not",
"None",
":",
"self",
".",
"_sensor_log",
".",
"destroy_walker",
"(",
"self",
".",
"walker",
")",
"self",
".",
"walker",
"=",
"None",
"self",
".",
"walker",
"=",
"sensor_log",
".",
"create_walker",
"(",
"self",
".",
"selector",
")",
"self",
".",
"_sensor_log",
"=",
"sensor_log"
] |
Attach this DataStreamer to an underlying SensorLog.
Calling this method is required if you want to use this DataStreamer
to generate reports from the underlying data in the SensorLog.
You can call it multiple times and it will unlink itself from any
previous SensorLog each time.
Args:
sensor_log (SensorLog): Actually create a StreamWalker to go along with this
streamer so that we can check if it's triggered.
|
[
"Attach",
"this",
"DataStreamer",
"to",
"an",
"underlying",
"SensorLog",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/streamer.py#L63-L82
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/streamer.py
|
DataStreamer.triggered
|
def triggered(self, manual=False):
"""Check if this streamer should generate a report.
Streamers can be triggered automatically whenever they have data
or they can be triggered manually. This method returns True if the
streamer is currented triggered.
A streamer is triggered if it:
- (has data AND is automatic) OR
- (has data AND is manually triggered)
Args:
manual (bool): Indicate that the streamer has been manually triggered.
Returns:
bool: Whether the streamer can generate a report right now.
"""
if self.walker is None:
raise InternalError("You can only check if a streamer is triggered if you create it with a SensorLog")
if not self.automatic and not manual:
return False
return self.has_data()
|
python
|
def triggered(self, manual=False):
"""Check if this streamer should generate a report.
Streamers can be triggered automatically whenever they have data
or they can be triggered manually. This method returns True if the
streamer is currented triggered.
A streamer is triggered if it:
- (has data AND is automatic) OR
- (has data AND is manually triggered)
Args:
manual (bool): Indicate that the streamer has been manually triggered.
Returns:
bool: Whether the streamer can generate a report right now.
"""
if self.walker is None:
raise InternalError("You can only check if a streamer is triggered if you create it with a SensorLog")
if not self.automatic and not manual:
return False
return self.has_data()
|
[
"def",
"triggered",
"(",
"self",
",",
"manual",
"=",
"False",
")",
":",
"if",
"self",
".",
"walker",
"is",
"None",
":",
"raise",
"InternalError",
"(",
"\"You can only check if a streamer is triggered if you create it with a SensorLog\"",
")",
"if",
"not",
"self",
".",
"automatic",
"and",
"not",
"manual",
":",
"return",
"False",
"return",
"self",
".",
"has_data",
"(",
")"
] |
Check if this streamer should generate a report.
Streamers can be triggered automatically whenever they have data
or they can be triggered manually. This method returns True if the
streamer is currented triggered.
A streamer is triggered if it:
- (has data AND is automatic) OR
- (has data AND is manually triggered)
Args:
manual (bool): Indicate that the streamer has been manually triggered.
Returns:
bool: Whether the streamer can generate a report right now.
|
[
"Check",
"if",
"this",
"streamer",
"should",
"generate",
"a",
"report",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/streamer.py#L96-L120
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/streamer.py
|
DataStreamer.build_report
|
def build_report(self, device_id, max_size=None, device_uptime=0, report_id=None, auth_chain=None):
"""Build a report with all of the readings in this streamer.
This method will produce an IOTileReport subclass and, if necessary,
sign it using the passed authentication chain.
Args:
device_id (int): The UUID of the device to generate a report for.
max_size (int): Optional maximum number of bytes that the report can be
device_uptime (int): The device's uptime to use as the sent timestamp of the report
report_id (int): The report id to use if the report type require serialization.
auth_chain (AuthChain): An auth chain class to use to sign the report if the report
type requires signing.
Returns:
StreamerReport: The report, its highest id and the number of readings in it.
The highest reading id and number of readings are returned
separately from the report itself because, depending on the format
of the report (such as whether it is encrypted or does not contain
reading ids), these details may not be recoverable from the report
itself.
Raises:
InternalError: If there was no SensorLog passed when this streamer was created.
StreamEmptyError: If there is no data to generate a report from. This can only happen
if a call to triggered() returned False.
ArgumentError: If the report requires additional metadata that was not passed like a
signing key or report_id.
"""
if self.walker is None or self.index is None:
raise InternalError("You can only build a report with a DataStreamer if you create it with a SensorLog and a streamer index")
if self.requires_signing() and auth_chain is None:
raise ArgumentError("You must pass an auth chain to sign this report.")
if self.requires_id() and report_id is None:
raise ArgumentError("You must pass a report_id to serialize this report")
if self.format == 'individual':
reading = self.walker.pop()
highest_id = reading.reading_id
if self.report_type == 'telegram':
return StreamerReport(IndividualReadingReport.FromReadings(device_id, [reading]), 1, highest_id)
elif self.report_type == 'broadcast':
return StreamerReport(BroadcastReport.FromReadings(device_id, [reading], device_uptime), 1, highest_id)
elif self.format == 'hashedlist':
max_readings = (max_size - 20 - 24) // 16
if max_readings <= 0:
raise InternalError("max_size is too small to hold even a single reading", max_size=max_size)
readings = []
highest_id = 0
try:
while len(readings) < max_readings:
reading = self.walker.pop()
readings.append(reading)
if reading.reading_id > highest_id:
highest_id = reading.reading_id
except StreamEmptyError:
if len(readings) == 0:
raise
return StreamerReport(SignedListReport.FromReadings(device_id, readings, report_id=report_id, selector=self.selector.encode(),
streamer=self.index, sent_timestamp=device_uptime), len(readings), highest_id)
raise InternalError("Streamer report format or type is not supported currently", report_format=self.format, report_type=self.report_type)
|
python
|
def build_report(self, device_id, max_size=None, device_uptime=0, report_id=None, auth_chain=None):
"""Build a report with all of the readings in this streamer.
This method will produce an IOTileReport subclass and, if necessary,
sign it using the passed authentication chain.
Args:
device_id (int): The UUID of the device to generate a report for.
max_size (int): Optional maximum number of bytes that the report can be
device_uptime (int): The device's uptime to use as the sent timestamp of the report
report_id (int): The report id to use if the report type require serialization.
auth_chain (AuthChain): An auth chain class to use to sign the report if the report
type requires signing.
Returns:
StreamerReport: The report, its highest id and the number of readings in it.
The highest reading id and number of readings are returned
separately from the report itself because, depending on the format
of the report (such as whether it is encrypted or does not contain
reading ids), these details may not be recoverable from the report
itself.
Raises:
InternalError: If there was no SensorLog passed when this streamer was created.
StreamEmptyError: If there is no data to generate a report from. This can only happen
if a call to triggered() returned False.
ArgumentError: If the report requires additional metadata that was not passed like a
signing key or report_id.
"""
if self.walker is None or self.index is None:
raise InternalError("You can only build a report with a DataStreamer if you create it with a SensorLog and a streamer index")
if self.requires_signing() and auth_chain is None:
raise ArgumentError("You must pass an auth chain to sign this report.")
if self.requires_id() and report_id is None:
raise ArgumentError("You must pass a report_id to serialize this report")
if self.format == 'individual':
reading = self.walker.pop()
highest_id = reading.reading_id
if self.report_type == 'telegram':
return StreamerReport(IndividualReadingReport.FromReadings(device_id, [reading]), 1, highest_id)
elif self.report_type == 'broadcast':
return StreamerReport(BroadcastReport.FromReadings(device_id, [reading], device_uptime), 1, highest_id)
elif self.format == 'hashedlist':
max_readings = (max_size - 20 - 24) // 16
if max_readings <= 0:
raise InternalError("max_size is too small to hold even a single reading", max_size=max_size)
readings = []
highest_id = 0
try:
while len(readings) < max_readings:
reading = self.walker.pop()
readings.append(reading)
if reading.reading_id > highest_id:
highest_id = reading.reading_id
except StreamEmptyError:
if len(readings) == 0:
raise
return StreamerReport(SignedListReport.FromReadings(device_id, readings, report_id=report_id, selector=self.selector.encode(),
streamer=self.index, sent_timestamp=device_uptime), len(readings), highest_id)
raise InternalError("Streamer report format or type is not supported currently", report_format=self.format, report_type=self.report_type)
|
[
"def",
"build_report",
"(",
"self",
",",
"device_id",
",",
"max_size",
"=",
"None",
",",
"device_uptime",
"=",
"0",
",",
"report_id",
"=",
"None",
",",
"auth_chain",
"=",
"None",
")",
":",
"if",
"self",
".",
"walker",
"is",
"None",
"or",
"self",
".",
"index",
"is",
"None",
":",
"raise",
"InternalError",
"(",
"\"You can only build a report with a DataStreamer if you create it with a SensorLog and a streamer index\"",
")",
"if",
"self",
".",
"requires_signing",
"(",
")",
"and",
"auth_chain",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"You must pass an auth chain to sign this report.\"",
")",
"if",
"self",
".",
"requires_id",
"(",
")",
"and",
"report_id",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"You must pass a report_id to serialize this report\"",
")",
"if",
"self",
".",
"format",
"==",
"'individual'",
":",
"reading",
"=",
"self",
".",
"walker",
".",
"pop",
"(",
")",
"highest_id",
"=",
"reading",
".",
"reading_id",
"if",
"self",
".",
"report_type",
"==",
"'telegram'",
":",
"return",
"StreamerReport",
"(",
"IndividualReadingReport",
".",
"FromReadings",
"(",
"device_id",
",",
"[",
"reading",
"]",
")",
",",
"1",
",",
"highest_id",
")",
"elif",
"self",
".",
"report_type",
"==",
"'broadcast'",
":",
"return",
"StreamerReport",
"(",
"BroadcastReport",
".",
"FromReadings",
"(",
"device_id",
",",
"[",
"reading",
"]",
",",
"device_uptime",
")",
",",
"1",
",",
"highest_id",
")",
"elif",
"self",
".",
"format",
"==",
"'hashedlist'",
":",
"max_readings",
"=",
"(",
"max_size",
"-",
"20",
"-",
"24",
")",
"//",
"16",
"if",
"max_readings",
"<=",
"0",
":",
"raise",
"InternalError",
"(",
"\"max_size is too small to hold even a single reading\"",
",",
"max_size",
"=",
"max_size",
")",
"readings",
"=",
"[",
"]",
"highest_id",
"=",
"0",
"try",
":",
"while",
"len",
"(",
"readings",
")",
"<",
"max_readings",
":",
"reading",
"=",
"self",
".",
"walker",
".",
"pop",
"(",
")",
"readings",
".",
"append",
"(",
"reading",
")",
"if",
"reading",
".",
"reading_id",
">",
"highest_id",
":",
"highest_id",
"=",
"reading",
".",
"reading_id",
"except",
"StreamEmptyError",
":",
"if",
"len",
"(",
"readings",
")",
"==",
"0",
":",
"raise",
"return",
"StreamerReport",
"(",
"SignedListReport",
".",
"FromReadings",
"(",
"device_id",
",",
"readings",
",",
"report_id",
"=",
"report_id",
",",
"selector",
"=",
"self",
".",
"selector",
".",
"encode",
"(",
")",
",",
"streamer",
"=",
"self",
".",
"index",
",",
"sent_timestamp",
"=",
"device_uptime",
")",
",",
"len",
"(",
"readings",
")",
",",
"highest_id",
")",
"raise",
"InternalError",
"(",
"\"Streamer report format or type is not supported currently\"",
",",
"report_format",
"=",
"self",
".",
"format",
",",
"report_type",
"=",
"self",
".",
"report_type",
")"
] |
Build a report with all of the readings in this streamer.
This method will produce an IOTileReport subclass and, if necessary,
sign it using the passed authentication chain.
Args:
device_id (int): The UUID of the device to generate a report for.
max_size (int): Optional maximum number of bytes that the report can be
device_uptime (int): The device's uptime to use as the sent timestamp of the report
report_id (int): The report id to use if the report type require serialization.
auth_chain (AuthChain): An auth chain class to use to sign the report if the report
type requires signing.
Returns:
StreamerReport: The report, its highest id and the number of readings in it.
The highest reading id and number of readings are returned
separately from the report itself because, depending on the format
of the report (such as whether it is encrypted or does not contain
reading ids), these details may not be recoverable from the report
itself.
Raises:
InternalError: If there was no SensorLog passed when this streamer was created.
StreamEmptyError: If there is no data to generate a report from. This can only happen
if a call to triggered() returned False.
ArgumentError: If the report requires additional metadata that was not passed like a
signing key or report_id.
|
[
"Build",
"a",
"report",
"with",
"all",
"of",
"the",
"readings",
"in",
"this",
"streamer",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/streamer.py#L140-L209
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/slot.py
|
SlotIdentifier.matches
|
def matches(self, address, name=None):
"""Check if this slot identifier matches the given tile.
Matching can happen either by address or by module name (not currently implemented).
Returns:
bool: True if there is a match, otherwise False.
"""
if self.controller:
return address == 8
return self.address == address
|
python
|
def matches(self, address, name=None):
"""Check if this slot identifier matches the given tile.
Matching can happen either by address or by module name (not currently implemented).
Returns:
bool: True if there is a match, otherwise False.
"""
if self.controller:
return address == 8
return self.address == address
|
[
"def",
"matches",
"(",
"self",
",",
"address",
",",
"name",
"=",
"None",
")",
":",
"if",
"self",
".",
"controller",
":",
"return",
"address",
"==",
"8",
"return",
"self",
".",
"address",
"==",
"address"
] |
Check if this slot identifier matches the given tile.
Matching can happen either by address or by module name (not currently implemented).
Returns:
bool: True if there is a match, otherwise False.
|
[
"Check",
"if",
"this",
"slot",
"identifier",
"matches",
"the",
"given",
"tile",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/slot.py#L64-L76
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/slot.py
|
SlotIdentifier.FromString
|
def FromString(cls, desc):
"""Create a slot identifier from a string description.
The string needs to be either:
controller
OR
slot <X> where X is an integer that can be converted with int(X, 0)
Args:
desc (str): The string description of the slot
Returns:
SlotIdentifier
"""
desc = str(desc)
if desc == u'controller':
return SlotIdentifier(controller=True)
words = desc.split()
if len(words) != 2 or words[0] != u'slot':
raise ArgumentError(u"Illegal slot identifier", descriptor=desc)
try:
slot_id = int(words[1], 0)
except ValueError:
raise ArgumentError(u"Could not convert slot identifier to number", descriptor=desc, number=words[1])
return SlotIdentifier(slot=slot_id)
|
python
|
def FromString(cls, desc):
"""Create a slot identifier from a string description.
The string needs to be either:
controller
OR
slot <X> where X is an integer that can be converted with int(X, 0)
Args:
desc (str): The string description of the slot
Returns:
SlotIdentifier
"""
desc = str(desc)
if desc == u'controller':
return SlotIdentifier(controller=True)
words = desc.split()
if len(words) != 2 or words[0] != u'slot':
raise ArgumentError(u"Illegal slot identifier", descriptor=desc)
try:
slot_id = int(words[1], 0)
except ValueError:
raise ArgumentError(u"Could not convert slot identifier to number", descriptor=desc, number=words[1])
return SlotIdentifier(slot=slot_id)
|
[
"def",
"FromString",
"(",
"cls",
",",
"desc",
")",
":",
"desc",
"=",
"str",
"(",
"desc",
")",
"if",
"desc",
"==",
"u'controller'",
":",
"return",
"SlotIdentifier",
"(",
"controller",
"=",
"True",
")",
"words",
"=",
"desc",
".",
"split",
"(",
")",
"if",
"len",
"(",
"words",
")",
"!=",
"2",
"or",
"words",
"[",
"0",
"]",
"!=",
"u'slot'",
":",
"raise",
"ArgumentError",
"(",
"u\"Illegal slot identifier\"",
",",
"descriptor",
"=",
"desc",
")",
"try",
":",
"slot_id",
"=",
"int",
"(",
"words",
"[",
"1",
"]",
",",
"0",
")",
"except",
"ValueError",
":",
"raise",
"ArgumentError",
"(",
"u\"Could not convert slot identifier to number\"",
",",
"descriptor",
"=",
"desc",
",",
"number",
"=",
"words",
"[",
"1",
"]",
")",
"return",
"SlotIdentifier",
"(",
"slot",
"=",
"slot_id",
")"
] |
Create a slot identifier from a string description.
The string needs to be either:
controller
OR
slot <X> where X is an integer that can be converted with int(X, 0)
Args:
desc (str): The string description of the slot
Returns:
SlotIdentifier
|
[
"Create",
"a",
"slot",
"identifier",
"from",
"a",
"string",
"description",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/slot.py#L79-L109
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/slot.py
|
SlotIdentifier.FromEncoded
|
def FromEncoded(cls, bindata):
"""Create a slot identifier from an encoded binary descriptor.
These binary descriptors are used to communicate slot targeting
to an embedded device. They are exactly 8 bytes in length.
Args:
bindata (bytes): The 8-byte binary descriptor.
Returns:
SlotIdentifier
"""
if len(bindata) != 8:
raise ArgumentError("Invalid binary slot descriptor with invalid length", length=len(bindata), expected=8, data=bindata)
slot, match_op = struct.unpack("<B6xB", bindata)
match_name = cls.KNOWN_MATCH_CODES.get(match_op)
if match_name is None:
raise ArgumentError("Unknown match operation specified in binary slot descriptor", operation=match_op, known_match_ops=cls.KNOWN_MATCH_CODES)
if match_name == 'match_controller':
return SlotIdentifier(controller=True)
if match_name == 'match_slot':
return SlotIdentifier(slot=slot)
raise ArgumentError("Unsupported match operation in binary slot descriptor", match_op=match_name)
|
python
|
def FromEncoded(cls, bindata):
"""Create a slot identifier from an encoded binary descriptor.
These binary descriptors are used to communicate slot targeting
to an embedded device. They are exactly 8 bytes in length.
Args:
bindata (bytes): The 8-byte binary descriptor.
Returns:
SlotIdentifier
"""
if len(bindata) != 8:
raise ArgumentError("Invalid binary slot descriptor with invalid length", length=len(bindata), expected=8, data=bindata)
slot, match_op = struct.unpack("<B6xB", bindata)
match_name = cls.KNOWN_MATCH_CODES.get(match_op)
if match_name is None:
raise ArgumentError("Unknown match operation specified in binary slot descriptor", operation=match_op, known_match_ops=cls.KNOWN_MATCH_CODES)
if match_name == 'match_controller':
return SlotIdentifier(controller=True)
if match_name == 'match_slot':
return SlotIdentifier(slot=slot)
raise ArgumentError("Unsupported match operation in binary slot descriptor", match_op=match_name)
|
[
"def",
"FromEncoded",
"(",
"cls",
",",
"bindata",
")",
":",
"if",
"len",
"(",
"bindata",
")",
"!=",
"8",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid binary slot descriptor with invalid length\"",
",",
"length",
"=",
"len",
"(",
"bindata",
")",
",",
"expected",
"=",
"8",
",",
"data",
"=",
"bindata",
")",
"slot",
",",
"match_op",
"=",
"struct",
".",
"unpack",
"(",
"\"<B6xB\"",
",",
"bindata",
")",
"match_name",
"=",
"cls",
".",
"KNOWN_MATCH_CODES",
".",
"get",
"(",
"match_op",
")",
"if",
"match_name",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Unknown match operation specified in binary slot descriptor\"",
",",
"operation",
"=",
"match_op",
",",
"known_match_ops",
"=",
"cls",
".",
"KNOWN_MATCH_CODES",
")",
"if",
"match_name",
"==",
"'match_controller'",
":",
"return",
"SlotIdentifier",
"(",
"controller",
"=",
"True",
")",
"if",
"match_name",
"==",
"'match_slot'",
":",
"return",
"SlotIdentifier",
"(",
"slot",
"=",
"slot",
")",
"raise",
"ArgumentError",
"(",
"\"Unsupported match operation in binary slot descriptor\"",
",",
"match_op",
"=",
"match_name",
")"
] |
Create a slot identifier from an encoded binary descriptor.
These binary descriptors are used to communicate slot targeting
to an embedded device. They are exactly 8 bytes in length.
Args:
bindata (bytes): The 8-byte binary descriptor.
Returns:
SlotIdentifier
|
[
"Create",
"a",
"slot",
"identifier",
"from",
"an",
"encoded",
"binary",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/slot.py#L112-L140
|
train
|
iotile/coretools
|
iotilesensorgraph/iotile/sg/slot.py
|
SlotIdentifier.encode
|
def encode(self):
"""Encode this slot identifier into a binary descriptor.
Returns:
bytes: The 8-byte encoded slot identifier
"""
slot = 0
match_op = self.KNOWN_MATCH_NAMES['match_controller']
if not self.controller:
slot = self.slot
match_op = self.KNOWN_MATCH_NAMES['match_slot']
return struct.pack("<B6xB", slot, match_op)
|
python
|
def encode(self):
"""Encode this slot identifier into a binary descriptor.
Returns:
bytes: The 8-byte encoded slot identifier
"""
slot = 0
match_op = self.KNOWN_MATCH_NAMES['match_controller']
if not self.controller:
slot = self.slot
match_op = self.KNOWN_MATCH_NAMES['match_slot']
return struct.pack("<B6xB", slot, match_op)
|
[
"def",
"encode",
"(",
"self",
")",
":",
"slot",
"=",
"0",
"match_op",
"=",
"self",
".",
"KNOWN_MATCH_NAMES",
"[",
"'match_controller'",
"]",
"if",
"not",
"self",
".",
"controller",
":",
"slot",
"=",
"self",
".",
"slot",
"match_op",
"=",
"self",
".",
"KNOWN_MATCH_NAMES",
"[",
"'match_slot'",
"]",
"return",
"struct",
".",
"pack",
"(",
"\"<B6xB\"",
",",
"slot",
",",
"match_op",
")"
] |
Encode this slot identifier into a binary descriptor.
Returns:
bytes: The 8-byte encoded slot identifier
|
[
"Encode",
"this",
"slot",
"identifier",
"into",
"a",
"binary",
"descriptor",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/slot.py#L142-L156
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py
|
_scons_syntax_error
|
def _scons_syntax_error(e):
"""Handle syntax errors. Print out a message and show where the error
occurred.
"""
etype, value, tb = sys.exc_info()
lines = traceback.format_exception_only(etype, value)
for line in lines:
sys.stderr.write(line+'\n')
sys.exit(2)
|
python
|
def _scons_syntax_error(e):
"""Handle syntax errors. Print out a message and show where the error
occurred.
"""
etype, value, tb = sys.exc_info()
lines = traceback.format_exception_only(etype, value)
for line in lines:
sys.stderr.write(line+'\n')
sys.exit(2)
|
[
"def",
"_scons_syntax_error",
"(",
"e",
")",
":",
"etype",
",",
"value",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"lines",
"=",
"traceback",
".",
"format_exception_only",
"(",
"etype",
",",
"value",
")",
"for",
"line",
"in",
"lines",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"line",
"+",
"'\\n'",
")",
"sys",
".",
"exit",
"(",
"2",
")"
] |
Handle syntax errors. Print out a message and show where the error
occurred.
|
[
"Handle",
"syntax",
"errors",
".",
"Print",
"out",
"a",
"message",
"and",
"show",
"where",
"the",
"error",
"occurred",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py#L548-L556
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py
|
find_deepest_user_frame
|
def find_deepest_user_frame(tb):
"""
Find the deepest stack frame that is not part of SCons.
Input is a "pre-processed" stack trace in the form
returned by traceback.extract_tb() or traceback.extract_stack()
"""
tb.reverse()
# find the deepest traceback frame that is not part
# of SCons:
for frame in tb:
filename = frame[0]
if filename.find(os.sep+'SCons'+os.sep) == -1:
return frame
return tb[0]
|
python
|
def find_deepest_user_frame(tb):
"""
Find the deepest stack frame that is not part of SCons.
Input is a "pre-processed" stack trace in the form
returned by traceback.extract_tb() or traceback.extract_stack()
"""
tb.reverse()
# find the deepest traceback frame that is not part
# of SCons:
for frame in tb:
filename = frame[0]
if filename.find(os.sep+'SCons'+os.sep) == -1:
return frame
return tb[0]
|
[
"def",
"find_deepest_user_frame",
"(",
"tb",
")",
":",
"tb",
".",
"reverse",
"(",
")",
"# find the deepest traceback frame that is not part",
"# of SCons:",
"for",
"frame",
"in",
"tb",
":",
"filename",
"=",
"frame",
"[",
"0",
"]",
"if",
"filename",
".",
"find",
"(",
"os",
".",
"sep",
"+",
"'SCons'",
"+",
"os",
".",
"sep",
")",
"==",
"-",
"1",
":",
"return",
"frame",
"return",
"tb",
"[",
"0",
"]"
] |
Find the deepest stack frame that is not part of SCons.
Input is a "pre-processed" stack trace in the form
returned by traceback.extract_tb() or traceback.extract_stack()
|
[
"Find",
"the",
"deepest",
"stack",
"frame",
"that",
"is",
"not",
"part",
"of",
"SCons",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py#L558-L574
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py
|
_scons_user_error
|
def _scons_user_error(e):
"""Handle user errors. Print out a message and a description of the
error, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is
not part of SCons itself.
"""
global print_stacktrace
etype, value, tb = sys.exc_info()
if print_stacktrace:
traceback.print_exception(etype, value, tb)
filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb))
sys.stderr.write("\nscons: *** %s\n" % value)
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
sys.exit(2)
|
python
|
def _scons_user_error(e):
"""Handle user errors. Print out a message and a description of the
error, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is
not part of SCons itself.
"""
global print_stacktrace
etype, value, tb = sys.exc_info()
if print_stacktrace:
traceback.print_exception(etype, value, tb)
filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb))
sys.stderr.write("\nscons: *** %s\n" % value)
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
sys.exit(2)
|
[
"def",
"_scons_user_error",
"(",
"e",
")",
":",
"global",
"print_stacktrace",
"etype",
",",
"value",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"print_stacktrace",
":",
"traceback",
".",
"print_exception",
"(",
"etype",
",",
"value",
",",
"tb",
")",
"filename",
",",
"lineno",
",",
"routine",
",",
"dummy",
"=",
"find_deepest_user_frame",
"(",
"traceback",
".",
"extract_tb",
"(",
"tb",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"\\nscons: *** %s\\n\"",
"%",
"value",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'File \"%s\", line %d, in %s\\n'",
"%",
"(",
"filename",
",",
"lineno",
",",
"routine",
")",
")",
"sys",
".",
"exit",
"(",
"2",
")"
] |
Handle user errors. Print out a message and a description of the
error, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is
not part of SCons itself.
|
[
"Handle",
"user",
"errors",
".",
"Print",
"out",
"a",
"message",
"and",
"a",
"description",
"of",
"the",
"error",
"along",
"with",
"the",
"line",
"number",
"and",
"routine",
"where",
"it",
"occured",
".",
"The",
"file",
"and",
"line",
"number",
"will",
"be",
"the",
"deepest",
"stack",
"frame",
"that",
"is",
"not",
"part",
"of",
"SCons",
"itself",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py#L576-L589
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py
|
_scons_user_warning
|
def _scons_user_warning(e):
"""Handle user warnings. Print out a message and a description of
the warning, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is
not part of SCons itself.
"""
etype, value, tb = sys.exc_info()
filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb))
sys.stderr.write("\nscons: warning: %s\n" % e)
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
|
python
|
def _scons_user_warning(e):
"""Handle user warnings. Print out a message and a description of
the warning, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is
not part of SCons itself.
"""
etype, value, tb = sys.exc_info()
filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb))
sys.stderr.write("\nscons: warning: %s\n" % e)
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
|
[
"def",
"_scons_user_warning",
"(",
"e",
")",
":",
"etype",
",",
"value",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"filename",
",",
"lineno",
",",
"routine",
",",
"dummy",
"=",
"find_deepest_user_frame",
"(",
"traceback",
".",
"extract_tb",
"(",
"tb",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"\\nscons: warning: %s\\n\"",
"%",
"e",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'File \"%s\", line %d, in %s\\n'",
"%",
"(",
"filename",
",",
"lineno",
",",
"routine",
")",
")"
] |
Handle user warnings. Print out a message and a description of
the warning, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is
not part of SCons itself.
|
[
"Handle",
"user",
"warnings",
".",
"Print",
"out",
"a",
"message",
"and",
"a",
"description",
"of",
"the",
"warning",
"along",
"with",
"the",
"line",
"number",
"and",
"routine",
"where",
"it",
"occured",
".",
"The",
"file",
"and",
"line",
"number",
"will",
"be",
"the",
"deepest",
"stack",
"frame",
"that",
"is",
"not",
"part",
"of",
"SCons",
"itself",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py#L591-L600
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py
|
_SConstruct_exists
|
def _SConstruct_exists(dirname='', repositories=[], filelist=None):
"""This function checks that an SConstruct file exists in a directory.
If so, it returns the path of the file. By default, it checks the
current directory.
"""
if not filelist:
filelist = ['SConstruct', 'Sconstruct', 'sconstruct']
for file in filelist:
sfile = os.path.join(dirname, file)
if os.path.isfile(sfile):
return sfile
if not os.path.isabs(sfile):
for rep in repositories:
if os.path.isfile(os.path.join(rep, sfile)):
return sfile
return None
|
python
|
def _SConstruct_exists(dirname='', repositories=[], filelist=None):
"""This function checks that an SConstruct file exists in a directory.
If so, it returns the path of the file. By default, it checks the
current directory.
"""
if not filelist:
filelist = ['SConstruct', 'Sconstruct', 'sconstruct']
for file in filelist:
sfile = os.path.join(dirname, file)
if os.path.isfile(sfile):
return sfile
if not os.path.isabs(sfile):
for rep in repositories:
if os.path.isfile(os.path.join(rep, sfile)):
return sfile
return None
|
[
"def",
"_SConstruct_exists",
"(",
"dirname",
"=",
"''",
",",
"repositories",
"=",
"[",
"]",
",",
"filelist",
"=",
"None",
")",
":",
"if",
"not",
"filelist",
":",
"filelist",
"=",
"[",
"'SConstruct'",
",",
"'Sconstruct'",
",",
"'sconstruct'",
"]",
"for",
"file",
"in",
"filelist",
":",
"sfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"file",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"sfile",
")",
":",
"return",
"sfile",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"sfile",
")",
":",
"for",
"rep",
"in",
"repositories",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"rep",
",",
"sfile",
")",
")",
":",
"return",
"sfile",
"return",
"None"
] |
This function checks that an SConstruct file exists in a directory.
If so, it returns the path of the file. By default, it checks the
current directory.
|
[
"This",
"function",
"checks",
"that",
"an",
"SConstruct",
"file",
"exists",
"in",
"a",
"directory",
".",
"If",
"so",
"it",
"returns",
"the",
"path",
"of",
"the",
"file",
".",
"By",
"default",
"it",
"checks",
"the",
"current",
"directory",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py#L618-L633
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py
|
BuildTask.make_ready
|
def make_ready(self):
"""Make a task ready for execution"""
SCons.Taskmaster.OutOfDateTask.make_ready(self)
if self.out_of_date and self.options.debug_explain:
explanation = self.out_of_date[0].explain()
if explanation:
sys.stdout.write("scons: " + explanation)
|
python
|
def make_ready(self):
"""Make a task ready for execution"""
SCons.Taskmaster.OutOfDateTask.make_ready(self)
if self.out_of_date and self.options.debug_explain:
explanation = self.out_of_date[0].explain()
if explanation:
sys.stdout.write("scons: " + explanation)
|
[
"def",
"make_ready",
"(",
"self",
")",
":",
"SCons",
".",
"Taskmaster",
".",
"OutOfDateTask",
".",
"make_ready",
"(",
"self",
")",
"if",
"self",
".",
"out_of_date",
"and",
"self",
".",
"options",
".",
"debug_explain",
":",
"explanation",
"=",
"self",
".",
"out_of_date",
"[",
"0",
"]",
".",
"explain",
"(",
")",
"if",
"explanation",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"scons: \"",
"+",
"explanation",
")"
] |
Make a task ready for execution
|
[
"Make",
"a",
"task",
"ready",
"for",
"execution"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/Main.py#L306-L312
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/reference_controller.py
|
_unpack_version
|
def _unpack_version(tag_data):
"""Parse a packed version info struct into tag and major.minor version.
The tag and version are parsed out according to 20 bits for tag and
6 bits each for major and minor. The more interesting part is the
blacklisting performed for tags that are known to be untrustworthy.
In particular, the following applies to tags.
- tags < 1024 are reserved for development and have only locally defined
meaning. They are not for use in production.
- tags in [1024, 2048) are production tags but were used inconsistently
in the early days of Arch and hence cannot be trusted to correspond with
an actual device model.
- tags >= 2048 are reserved for supported production device variants.
- the tag and version 0 (0.0) is reserved for an unknown wildcard that
does not convey any information except that the tag and version are
not known.
"""
tag = tag_data & ((1 << 20) - 1)
version_data = tag_data >> 20
major = (version_data >> 6) & ((1 << 6) - 1)
minor = (version_data >> 0) & ((1 << 6) - 1)
return (tag, "{}.{}".format(major, minor))
|
python
|
def _unpack_version(tag_data):
"""Parse a packed version info struct into tag and major.minor version.
The tag and version are parsed out according to 20 bits for tag and
6 bits each for major and minor. The more interesting part is the
blacklisting performed for tags that are known to be untrustworthy.
In particular, the following applies to tags.
- tags < 1024 are reserved for development and have only locally defined
meaning. They are not for use in production.
- tags in [1024, 2048) are production tags but were used inconsistently
in the early days of Arch and hence cannot be trusted to correspond with
an actual device model.
- tags >= 2048 are reserved for supported production device variants.
- the tag and version 0 (0.0) is reserved for an unknown wildcard that
does not convey any information except that the tag and version are
not known.
"""
tag = tag_data & ((1 << 20) - 1)
version_data = tag_data >> 20
major = (version_data >> 6) & ((1 << 6) - 1)
minor = (version_data >> 0) & ((1 << 6) - 1)
return (tag, "{}.{}".format(major, minor))
|
[
"def",
"_unpack_version",
"(",
"tag_data",
")",
":",
"tag",
"=",
"tag_data",
"&",
"(",
"(",
"1",
"<<",
"20",
")",
"-",
"1",
")",
"version_data",
"=",
"tag_data",
">>",
"20",
"major",
"=",
"(",
"version_data",
">>",
"6",
")",
"&",
"(",
"(",
"1",
"<<",
"6",
")",
"-",
"1",
")",
"minor",
"=",
"(",
"version_data",
">>",
"0",
")",
"&",
"(",
"(",
"1",
"<<",
"6",
")",
"-",
"1",
")",
"return",
"(",
"tag",
",",
"\"{}.{}\"",
".",
"format",
"(",
"major",
",",
"minor",
")",
")"
] |
Parse a packed version info struct into tag and major.minor version.
The tag and version are parsed out according to 20 bits for tag and
6 bits each for major and minor. The more interesting part is the
blacklisting performed for tags that are known to be untrustworthy.
In particular, the following applies to tags.
- tags < 1024 are reserved for development and have only locally defined
meaning. They are not for use in production.
- tags in [1024, 2048) are production tags but were used inconsistently
in the early days of Arch and hence cannot be trusted to correspond with
an actual device model.
- tags >= 2048 are reserved for supported production device variants.
- the tag and version 0 (0.0) is reserved for an unknown wildcard that
does not convey any information except that the tag and version are
not known.
|
[
"Parse",
"a",
"packed",
"version",
"info",
"struct",
"into",
"tag",
"and",
"major",
".",
"minor",
"version",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/reference_controller.py#L322-L348
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/reference_controller.py
|
ReferenceController._handle_reset
|
def _handle_reset(self):
"""Reset this controller tile.
This process will call _handle_reset() for all of the controller
subsystem mixins in order to make sure they all return to their proper
reset state.
It will then reset all of the peripheral tiles to emulate the behavior
of a physical POD where the controller tile cuts power to all
peripheral tiles on reset for a clean boot.
This will clear all subsystems of this controller to their reset
states.
The order of these calls is important to guarantee that everything is
in the correct state before resetting the next subsystem.
The behavior of this function is different depending on whether
deferred is True or False. If it's true, this function will only
clear the config database and then queue all of the config streaming
rpcs to itself to load in all of our config variables. Once these
have been sent, it will reset the rest of the controller subsystems.
"""
self._logger.info("Resetting controller")
self._device.reset_count += 1
super(ReferenceController, self)._handle_reset()
# Load in all default values into our config variables before streaming
# updated data into them.
self.reset_config_variables()
|
python
|
def _handle_reset(self):
"""Reset this controller tile.
This process will call _handle_reset() for all of the controller
subsystem mixins in order to make sure they all return to their proper
reset state.
It will then reset all of the peripheral tiles to emulate the behavior
of a physical POD where the controller tile cuts power to all
peripheral tiles on reset for a clean boot.
This will clear all subsystems of this controller to their reset
states.
The order of these calls is important to guarantee that everything is
in the correct state before resetting the next subsystem.
The behavior of this function is different depending on whether
deferred is True or False. If it's true, this function will only
clear the config database and then queue all of the config streaming
rpcs to itself to load in all of our config variables. Once these
have been sent, it will reset the rest of the controller subsystems.
"""
self._logger.info("Resetting controller")
self._device.reset_count += 1
super(ReferenceController, self)._handle_reset()
# Load in all default values into our config variables before streaming
# updated data into them.
self.reset_config_variables()
|
[
"def",
"_handle_reset",
"(",
"self",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Resetting controller\"",
")",
"self",
".",
"_device",
".",
"reset_count",
"+=",
"1",
"super",
"(",
"ReferenceController",
",",
"self",
")",
".",
"_handle_reset",
"(",
")",
"# Load in all default values into our config variables before streaming",
"# updated data into them.",
"self",
".",
"reset_config_variables",
"(",
")"
] |
Reset this controller tile.
This process will call _handle_reset() for all of the controller
subsystem mixins in order to make sure they all return to their proper
reset state.
It will then reset all of the peripheral tiles to emulate the behavior
of a physical POD where the controller tile cuts power to all
peripheral tiles on reset for a clean boot.
This will clear all subsystems of this controller to their reset
states.
The order of these calls is important to guarantee that everything is
in the correct state before resetting the next subsystem.
The behavior of this function is different depending on whether
deferred is True or False. If it's true, this function will only
clear the config database and then queue all of the config streaming
rpcs to itself to load in all of our config variables. Once these
have been sent, it will reset the rest of the controller subsystems.
|
[
"Reset",
"this",
"controller",
"tile",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/reference_controller.py#L78-L109
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/reference_controller.py
|
ReferenceController._reset_vector
|
async def _reset_vector(self):
"""Initialize the controller's subsystems inside the emulation thread."""
# Send ourselves all of our config variable assignments
config_rpcs = self.config_database.stream_matching(8, self.name)
for rpc in config_rpcs:
await self._device.emulator.await_rpc(*rpc)
config_assignments = self.latch_config_variables()
self._logger.info("Latched config variables at reset for controller: %s", config_assignments)
for system in self._post_config_subsystems:
try:
system.clear_to_reset(config_assignments)
await asyncio.wait_for(system.initialize(), timeout=2.0)
except:
self._logger.exception("Error initializing %s", system)
raise
self._logger.info("Finished clearing controller to reset condition")
# Now reset all of the tiles
for address, _ in self._device.iter_tiles(include_controller=False):
self._logger.info("Sending reset signal to tile at address %d", address)
try:
await self._device.emulator.await_rpc(address, rpcs.RESET)
except TileNotFoundError:
pass
except:
self._logger.exception("Error sending reset signal to tile at address %d", address)
raise
self.initialized.set()
|
python
|
async def _reset_vector(self):
"""Initialize the controller's subsystems inside the emulation thread."""
# Send ourselves all of our config variable assignments
config_rpcs = self.config_database.stream_matching(8, self.name)
for rpc in config_rpcs:
await self._device.emulator.await_rpc(*rpc)
config_assignments = self.latch_config_variables()
self._logger.info("Latched config variables at reset for controller: %s", config_assignments)
for system in self._post_config_subsystems:
try:
system.clear_to_reset(config_assignments)
await asyncio.wait_for(system.initialize(), timeout=2.0)
except:
self._logger.exception("Error initializing %s", system)
raise
self._logger.info("Finished clearing controller to reset condition")
# Now reset all of the tiles
for address, _ in self._device.iter_tiles(include_controller=False):
self._logger.info("Sending reset signal to tile at address %d", address)
try:
await self._device.emulator.await_rpc(address, rpcs.RESET)
except TileNotFoundError:
pass
except:
self._logger.exception("Error sending reset signal to tile at address %d", address)
raise
self.initialized.set()
|
[
"async",
"def",
"_reset_vector",
"(",
"self",
")",
":",
"# Send ourselves all of our config variable assignments",
"config_rpcs",
"=",
"self",
".",
"config_database",
".",
"stream_matching",
"(",
"8",
",",
"self",
".",
"name",
")",
"for",
"rpc",
"in",
"config_rpcs",
":",
"await",
"self",
".",
"_device",
".",
"emulator",
".",
"await_rpc",
"(",
"*",
"rpc",
")",
"config_assignments",
"=",
"self",
".",
"latch_config_variables",
"(",
")",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Latched config variables at reset for controller: %s\"",
",",
"config_assignments",
")",
"for",
"system",
"in",
"self",
".",
"_post_config_subsystems",
":",
"try",
":",
"system",
".",
"clear_to_reset",
"(",
"config_assignments",
")",
"await",
"asyncio",
".",
"wait_for",
"(",
"system",
".",
"initialize",
"(",
")",
",",
"timeout",
"=",
"2.0",
")",
"except",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error initializing %s\"",
",",
"system",
")",
"raise",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Finished clearing controller to reset condition\"",
")",
"# Now reset all of the tiles",
"for",
"address",
",",
"_",
"in",
"self",
".",
"_device",
".",
"iter_tiles",
"(",
"include_controller",
"=",
"False",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Sending reset signal to tile at address %d\"",
",",
"address",
")",
"try",
":",
"await",
"self",
".",
"_device",
".",
"emulator",
".",
"await_rpc",
"(",
"address",
",",
"rpcs",
".",
"RESET",
")",
"except",
"TileNotFoundError",
":",
"pass",
"except",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error sending reset signal to tile at address %d\"",
",",
"address",
")",
"raise",
"self",
".",
"initialized",
".",
"set",
"(",
")"
] |
Initialize the controller's subsystems inside the emulation thread.
|
[
"Initialize",
"the",
"controller",
"s",
"subsystems",
"inside",
"the",
"emulation",
"thread",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/reference_controller.py#L111-L144
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/reference_controller.py
|
ReferenceController.hardware_version
|
def hardware_version(self):
"""Get a hardware identification string."""
hardware_string = self.hardware_string
if not isinstance(hardware_string, bytes):
hardware_string = self.hardware_string.encode('utf-8')
if len(hardware_string) > 10:
self._logger.warn("Truncating hardware string that was longer than 10 bytes: %s", self.hardware_string)
if len(hardware_string) < 10:
hardware_string += b'\0'*(10 - len(hardware_string))
return [hardware_string]
|
python
|
def hardware_version(self):
"""Get a hardware identification string."""
hardware_string = self.hardware_string
if not isinstance(hardware_string, bytes):
hardware_string = self.hardware_string.encode('utf-8')
if len(hardware_string) > 10:
self._logger.warn("Truncating hardware string that was longer than 10 bytes: %s", self.hardware_string)
if len(hardware_string) < 10:
hardware_string += b'\0'*(10 - len(hardware_string))
return [hardware_string]
|
[
"def",
"hardware_version",
"(",
"self",
")",
":",
"hardware_string",
"=",
"self",
".",
"hardware_string",
"if",
"not",
"isinstance",
"(",
"hardware_string",
",",
"bytes",
")",
":",
"hardware_string",
"=",
"self",
".",
"hardware_string",
".",
"encode",
"(",
"'utf-8'",
")",
"if",
"len",
"(",
"hardware_string",
")",
">",
"10",
":",
"self",
".",
"_logger",
".",
"warn",
"(",
"\"Truncating hardware string that was longer than 10 bytes: %s\"",
",",
"self",
".",
"hardware_string",
")",
"if",
"len",
"(",
"hardware_string",
")",
"<",
"10",
":",
"hardware_string",
"+=",
"b'\\0'",
"*",
"(",
"10",
"-",
"len",
"(",
"hardware_string",
")",
")",
"return",
"[",
"hardware_string",
"]"
] |
Get a hardware identification string.
|
[
"Get",
"a",
"hardware",
"identification",
"string",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/reference_controller.py#L199-L213
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/reference_controller.py
|
ReferenceController.controller_info
|
def controller_info(self):
"""Get the controller UUID, app tag and os tag."""
return [self._device.iotile_id, _pack_version(*self.os_info), _pack_version(*self.app_info)]
|
python
|
def controller_info(self):
"""Get the controller UUID, app tag and os tag."""
return [self._device.iotile_id, _pack_version(*self.os_info), _pack_version(*self.app_info)]
|
[
"def",
"controller_info",
"(",
"self",
")",
":",
"return",
"[",
"self",
".",
"_device",
".",
"iotile_id",
",",
"_pack_version",
"(",
"*",
"self",
".",
"os_info",
")",
",",
"_pack_version",
"(",
"*",
"self",
".",
"app_info",
")",
"]"
] |
Get the controller UUID, app tag and os tag.
|
[
"Get",
"the",
"controller",
"UUID",
"app",
"tag",
"and",
"os",
"tag",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/reference_controller.py#L216-L219
|
train
|
iotile/coretools
|
iotileemulate/iotile/emulate/reference/reference_controller.py
|
ReferenceController.load_sgf
|
def load_sgf(self, sgf_data):
"""Load, persist a sensor_graph file.
The data passed in `sgf_data` can either be a path or the already
loaded sgf lines as a string. It is determined to be sgf lines if
there is a '\n' character in the data, otherwise it is interpreted as
a path.
Note that this scenario just loads the sensor_graph directly into the
persisted sensor_graph inside the device. You will still need to
reset the device for the sensor_graph to enabled and run.
Args:
sgf_data (str): Either the path to an sgf file or its contents
as a string.
"""
if '\n' not in sgf_data:
with open(sgf_data, "r") as infile:
sgf_data = infile.read()
model = DeviceModel()
parser = SensorGraphFileParser()
parser.parse_file(data=sgf_data)
parser.compile(model)
opt = SensorGraphOptimizer()
opt.optimize(parser.sensor_graph, model=model)
sensor_graph = parser.sensor_graph
self._logger.info("Loading sensor_graph with %d nodes, %d streamers and %d configs",
len(sensor_graph.nodes), len(sensor_graph.streamers), len(sensor_graph.config_database))
# Directly load the sensor_graph into our persisted storage
self.sensor_graph.persisted_nodes = sensor_graph.dump_nodes()
self.sensor_graph.persisted_streamers = sensor_graph.dump_streamers()
self.sensor_graph.persisted_constants = []
for stream, value in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
reading = IOTileReading(stream.encode(), 0, value)
self.sensor_graph.persisted_constants.append((stream, reading))
self.sensor_graph.persisted_exists = True
# Clear all config variables and load in those from this sgf file
self.config_database.clear()
for slot in sorted(sensor_graph.config_database, key=lambda x: x.encode()):
for conf_var, (conf_type, conf_val) in sorted(sensor_graph.config_database[slot].items()):
self.config_database.add_direct(slot, conf_var, conf_type, conf_val)
# If we have an app tag and version set program them in
app_tag = sensor_graph.metadata_database.get('app_tag')
app_version = sensor_graph.metadata_database.get('app_version')
if app_tag is not None:
if app_version is None:
app_version = "0.0"
self.app_info = (app_tag, app_version)
|
python
|
def load_sgf(self, sgf_data):
"""Load, persist a sensor_graph file.
The data passed in `sgf_data` can either be a path or the already
loaded sgf lines as a string. It is determined to be sgf lines if
there is a '\n' character in the data, otherwise it is interpreted as
a path.
Note that this scenario just loads the sensor_graph directly into the
persisted sensor_graph inside the device. You will still need to
reset the device for the sensor_graph to enabled and run.
Args:
sgf_data (str): Either the path to an sgf file or its contents
as a string.
"""
if '\n' not in sgf_data:
with open(sgf_data, "r") as infile:
sgf_data = infile.read()
model = DeviceModel()
parser = SensorGraphFileParser()
parser.parse_file(data=sgf_data)
parser.compile(model)
opt = SensorGraphOptimizer()
opt.optimize(parser.sensor_graph, model=model)
sensor_graph = parser.sensor_graph
self._logger.info("Loading sensor_graph with %d nodes, %d streamers and %d configs",
len(sensor_graph.nodes), len(sensor_graph.streamers), len(sensor_graph.config_database))
# Directly load the sensor_graph into our persisted storage
self.sensor_graph.persisted_nodes = sensor_graph.dump_nodes()
self.sensor_graph.persisted_streamers = sensor_graph.dump_streamers()
self.sensor_graph.persisted_constants = []
for stream, value in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
reading = IOTileReading(stream.encode(), 0, value)
self.sensor_graph.persisted_constants.append((stream, reading))
self.sensor_graph.persisted_exists = True
# Clear all config variables and load in those from this sgf file
self.config_database.clear()
for slot in sorted(sensor_graph.config_database, key=lambda x: x.encode()):
for conf_var, (conf_type, conf_val) in sorted(sensor_graph.config_database[slot].items()):
self.config_database.add_direct(slot, conf_var, conf_type, conf_val)
# If we have an app tag and version set program them in
app_tag = sensor_graph.metadata_database.get('app_tag')
app_version = sensor_graph.metadata_database.get('app_version')
if app_tag is not None:
if app_version is None:
app_version = "0.0"
self.app_info = (app_tag, app_version)
|
[
"def",
"load_sgf",
"(",
"self",
",",
"sgf_data",
")",
":",
"if",
"'\\n'",
"not",
"in",
"sgf_data",
":",
"with",
"open",
"(",
"sgf_data",
",",
"\"r\"",
")",
"as",
"infile",
":",
"sgf_data",
"=",
"infile",
".",
"read",
"(",
")",
"model",
"=",
"DeviceModel",
"(",
")",
"parser",
"=",
"SensorGraphFileParser",
"(",
")",
"parser",
".",
"parse_file",
"(",
"data",
"=",
"sgf_data",
")",
"parser",
".",
"compile",
"(",
"model",
")",
"opt",
"=",
"SensorGraphOptimizer",
"(",
")",
"opt",
".",
"optimize",
"(",
"parser",
".",
"sensor_graph",
",",
"model",
"=",
"model",
")",
"sensor_graph",
"=",
"parser",
".",
"sensor_graph",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Loading sensor_graph with %d nodes, %d streamers and %d configs\"",
",",
"len",
"(",
"sensor_graph",
".",
"nodes",
")",
",",
"len",
"(",
"sensor_graph",
".",
"streamers",
")",
",",
"len",
"(",
"sensor_graph",
".",
"config_database",
")",
")",
"# Directly load the sensor_graph into our persisted storage",
"self",
".",
"sensor_graph",
".",
"persisted_nodes",
"=",
"sensor_graph",
".",
"dump_nodes",
"(",
")",
"self",
".",
"sensor_graph",
".",
"persisted_streamers",
"=",
"sensor_graph",
".",
"dump_streamers",
"(",
")",
"self",
".",
"sensor_graph",
".",
"persisted_constants",
"=",
"[",
"]",
"for",
"stream",
",",
"value",
"in",
"sorted",
"(",
"sensor_graph",
".",
"constant_database",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
".",
"encode",
"(",
")",
")",
":",
"reading",
"=",
"IOTileReading",
"(",
"stream",
".",
"encode",
"(",
")",
",",
"0",
",",
"value",
")",
"self",
".",
"sensor_graph",
".",
"persisted_constants",
".",
"append",
"(",
"(",
"stream",
",",
"reading",
")",
")",
"self",
".",
"sensor_graph",
".",
"persisted_exists",
"=",
"True",
"# Clear all config variables and load in those from this sgf file",
"self",
".",
"config_database",
".",
"clear",
"(",
")",
"for",
"slot",
"in",
"sorted",
"(",
"sensor_graph",
".",
"config_database",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
".",
"encode",
"(",
")",
")",
":",
"for",
"conf_var",
",",
"(",
"conf_type",
",",
"conf_val",
")",
"in",
"sorted",
"(",
"sensor_graph",
".",
"config_database",
"[",
"slot",
"]",
".",
"items",
"(",
")",
")",
":",
"self",
".",
"config_database",
".",
"add_direct",
"(",
"slot",
",",
"conf_var",
",",
"conf_type",
",",
"conf_val",
")",
"# If we have an app tag and version set program them in",
"app_tag",
"=",
"sensor_graph",
".",
"metadata_database",
".",
"get",
"(",
"'app_tag'",
")",
"app_version",
"=",
"sensor_graph",
".",
"metadata_database",
".",
"get",
"(",
"'app_version'",
")",
"if",
"app_tag",
"is",
"not",
"None",
":",
"if",
"app_version",
"is",
"None",
":",
"app_version",
"=",
"\"0.0\"",
"self",
".",
"app_info",
"=",
"(",
"app_tag",
",",
"app_version",
")"
] |
Load, persist a sensor_graph file.
The data passed in `sgf_data` can either be a path or the already
loaded sgf lines as a string. It is determined to be sgf lines if
there is a '\n' character in the data, otherwise it is interpreted as
a path.
Note that this scenario just loads the sensor_graph directly into the
persisted sensor_graph inside the device. You will still need to
reset the device for the sensor_graph to enabled and run.
Args:
sgf_data (str): Either the path to an sgf file or its contents
as a string.
|
[
"Load",
"persist",
"a",
"sensor_graph",
"file",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/reference_controller.py#L237-L297
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/site_scons/cfileparser.py
|
ParsedCFile._parse_file
|
def _parse_file(self):
"""Preprocess and parse C file into an AST"""
# We need to set the CPU type to pull in the right register definitions
# only preprocess the file (-E) and get rid of gcc extensions that aren't
# supported in ISO C.
args = utilities.build_includes(self.arch.includes())
# args.append('-mcpu=%s' % self.arch.property('chip'))
args.append('-E')
args.append('-D__attribute__(x)=')
args.append('-D__extension__=')
self.ast = parse_file(self.filepath, use_cpp=True, cpp_path='arm-none-eabi-gcc', cpp_args=args)
|
python
|
def _parse_file(self):
"""Preprocess and parse C file into an AST"""
# We need to set the CPU type to pull in the right register definitions
# only preprocess the file (-E) and get rid of gcc extensions that aren't
# supported in ISO C.
args = utilities.build_includes(self.arch.includes())
# args.append('-mcpu=%s' % self.arch.property('chip'))
args.append('-E')
args.append('-D__attribute__(x)=')
args.append('-D__extension__=')
self.ast = parse_file(self.filepath, use_cpp=True, cpp_path='arm-none-eabi-gcc', cpp_args=args)
|
[
"def",
"_parse_file",
"(",
"self",
")",
":",
"# We need to set the CPU type to pull in the right register definitions",
"# only preprocess the file (-E) and get rid of gcc extensions that aren't",
"# supported in ISO C.",
"args",
"=",
"utilities",
".",
"build_includes",
"(",
"self",
".",
"arch",
".",
"includes",
"(",
")",
")",
"# args.append('-mcpu=%s' % self.arch.property('chip'))",
"args",
".",
"append",
"(",
"'-E'",
")",
"args",
".",
"append",
"(",
"'-D__attribute__(x)='",
")",
"args",
".",
"append",
"(",
"'-D__extension__='",
")",
"self",
".",
"ast",
"=",
"parse_file",
"(",
"self",
".",
"filepath",
",",
"use_cpp",
"=",
"True",
",",
"cpp_path",
"=",
"'arm-none-eabi-gcc'",
",",
"cpp_args",
"=",
"args",
")"
] |
Preprocess and parse C file into an AST
|
[
"Preprocess",
"and",
"parse",
"C",
"file",
"into",
"an",
"AST"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/cfileparser.py#L35-L47
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
_clear_queue
|
def _clear_queue(to_clear):
"""Clear all items from a queue safely."""
while not to_clear.empty():
try:
to_clear.get(False)
to_clear.task_done()
except queue.Empty:
continue
|
python
|
def _clear_queue(to_clear):
"""Clear all items from a queue safely."""
while not to_clear.empty():
try:
to_clear.get(False)
to_clear.task_done()
except queue.Empty:
continue
|
[
"def",
"_clear_queue",
"(",
"to_clear",
")",
":",
"while",
"not",
"to_clear",
".",
"empty",
"(",
")",
":",
"try",
":",
"to_clear",
".",
"get",
"(",
"False",
")",
"to_clear",
".",
"task_done",
"(",
")",
"except",
"queue",
".",
"Empty",
":",
"continue"
] |
Clear all items from a queue safely.
|
[
"Clear",
"all",
"items",
"from",
"a",
"queue",
"safely",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L562-L570
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
_RecordedRPC.finish
|
def finish(self, status, response):
"""Mark the end of a recorded RPC."""
self.response = binascii.hexlify(response).decode('utf-8')
self.status = status
self.runtime = monotonic() - self._start_time
|
python
|
def finish(self, status, response):
"""Mark the end of a recorded RPC."""
self.response = binascii.hexlify(response).decode('utf-8')
self.status = status
self.runtime = monotonic() - self._start_time
|
[
"def",
"finish",
"(",
"self",
",",
"status",
",",
"response",
")",
":",
"self",
".",
"response",
"=",
"binascii",
".",
"hexlify",
"(",
"response",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"self",
".",
"status",
"=",
"status",
"self",
".",
"runtime",
"=",
"monotonic",
"(",
")",
"-",
"self",
".",
"_start_time"
] |
Mark the end of a recorded RPC.
|
[
"Mark",
"the",
"end",
"of",
"a",
"recorded",
"RPC",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L43-L48
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
_RecordedRPC.serialize
|
def serialize(self):
"""Convert this recorded RPC into a string."""
return "{},{: <26},{:2d},{:#06x},{:#04x},{:5.0f},{: <40},{: <40},{}".\
format(self.connection, self.start_stamp.isoformat(), self.address, self.rpc_id,
self.status, self.runtime * 1000, self.call, self.response, self.error)
|
python
|
def serialize(self):
"""Convert this recorded RPC into a string."""
return "{},{: <26},{:2d},{:#06x},{:#04x},{:5.0f},{: <40},{: <40},{}".\
format(self.connection, self.start_stamp.isoformat(), self.address, self.rpc_id,
self.status, self.runtime * 1000, self.call, self.response, self.error)
|
[
"def",
"serialize",
"(",
"self",
")",
":",
"return",
"\"{},{: <26},{:2d},{:#06x},{:#04x},{:5.0f},{: <40},{: <40},{}\"",
".",
"format",
"(",
"self",
".",
"connection",
",",
"self",
".",
"start_stamp",
".",
"isoformat",
"(",
")",
",",
"self",
".",
"address",
",",
"self",
".",
"rpc_id",
",",
"self",
".",
"status",
",",
"self",
".",
"runtime",
"*",
"1000",
",",
"self",
".",
"call",
",",
"self",
".",
"response",
",",
"self",
".",
"error",
")"
] |
Convert this recorded RPC into a string.
|
[
"Convert",
"this",
"recorded",
"RPC",
"into",
"a",
"string",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L50-L55
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.scan
|
def scan(self, wait=None):
"""Return the devices that have been found for this device adapter.
If the adapter indicates that we need to explicitly tell it to probe for devices, probe now.
By default we return the list of seen devices immediately, however there are two cases where
we will sleep here for a fixed period of time to let devices show up in our result list:
- If we are probing then we wait for 'minimum_scan_time'
- If we are told an explicit wait time that overrides everything and we wait that long
"""
min_scan = self.adapter.get_config('minimum_scan_time', 0.0)
probe_required = self.adapter.get_config('probe_required', False)
# Figure out how long and if we need to wait before returning our scan results
wait_time = None
elapsed = monotonic() - self._start_time
if elapsed < min_scan:
wait_time = min_scan - elapsed
# If we need to probe for devices rather than letting them just bubble up, start the probe
# and then use our min_scan_time to wait for them to arrive via the normal _on_scan event
if probe_required:
self._loop.run_coroutine(self.adapter.probe())
wait_time = min_scan
# If an explicit wait is specified that overrides everything else
if wait is not None:
wait_time = wait
if wait_time is not None:
sleep(wait_time)
to_remove = set()
now = monotonic()
with self._scan_lock:
for name, value in self._scanned_devices.items():
if value['expiration_time'] < now:
to_remove.add(name)
for name in to_remove:
del self._scanned_devices[name]
devices = sorted(self._scanned_devices.values(), key=lambda x: x['uuid'])
return devices
|
python
|
def scan(self, wait=None):
"""Return the devices that have been found for this device adapter.
If the adapter indicates that we need to explicitly tell it to probe for devices, probe now.
By default we return the list of seen devices immediately, however there are two cases where
we will sleep here for a fixed period of time to let devices show up in our result list:
- If we are probing then we wait for 'minimum_scan_time'
- If we are told an explicit wait time that overrides everything and we wait that long
"""
min_scan = self.adapter.get_config('minimum_scan_time', 0.0)
probe_required = self.adapter.get_config('probe_required', False)
# Figure out how long and if we need to wait before returning our scan results
wait_time = None
elapsed = monotonic() - self._start_time
if elapsed < min_scan:
wait_time = min_scan - elapsed
# If we need to probe for devices rather than letting them just bubble up, start the probe
# and then use our min_scan_time to wait for them to arrive via the normal _on_scan event
if probe_required:
self._loop.run_coroutine(self.adapter.probe())
wait_time = min_scan
# If an explicit wait is specified that overrides everything else
if wait is not None:
wait_time = wait
if wait_time is not None:
sleep(wait_time)
to_remove = set()
now = monotonic()
with self._scan_lock:
for name, value in self._scanned_devices.items():
if value['expiration_time'] < now:
to_remove.add(name)
for name in to_remove:
del self._scanned_devices[name]
devices = sorted(self._scanned_devices.values(), key=lambda x: x['uuid'])
return devices
|
[
"def",
"scan",
"(",
"self",
",",
"wait",
"=",
"None",
")",
":",
"min_scan",
"=",
"self",
".",
"adapter",
".",
"get_config",
"(",
"'minimum_scan_time'",
",",
"0.0",
")",
"probe_required",
"=",
"self",
".",
"adapter",
".",
"get_config",
"(",
"'probe_required'",
",",
"False",
")",
"# Figure out how long and if we need to wait before returning our scan results",
"wait_time",
"=",
"None",
"elapsed",
"=",
"monotonic",
"(",
")",
"-",
"self",
".",
"_start_time",
"if",
"elapsed",
"<",
"min_scan",
":",
"wait_time",
"=",
"min_scan",
"-",
"elapsed",
"# If we need to probe for devices rather than letting them just bubble up, start the probe",
"# and then use our min_scan_time to wait for them to arrive via the normal _on_scan event",
"if",
"probe_required",
":",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"probe",
"(",
")",
")",
"wait_time",
"=",
"min_scan",
"# If an explicit wait is specified that overrides everything else",
"if",
"wait",
"is",
"not",
"None",
":",
"wait_time",
"=",
"wait",
"if",
"wait_time",
"is",
"not",
"None",
":",
"sleep",
"(",
"wait_time",
")",
"to_remove",
"=",
"set",
"(",
")",
"now",
"=",
"monotonic",
"(",
")",
"with",
"self",
".",
"_scan_lock",
":",
"for",
"name",
",",
"value",
"in",
"self",
".",
"_scanned_devices",
".",
"items",
"(",
")",
":",
"if",
"value",
"[",
"'expiration_time'",
"]",
"<",
"now",
":",
"to_remove",
".",
"add",
"(",
"name",
")",
"for",
"name",
"in",
"to_remove",
":",
"del",
"self",
".",
"_scanned_devices",
"[",
"name",
"]",
"devices",
"=",
"sorted",
"(",
"self",
".",
"_scanned_devices",
".",
"values",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"'uuid'",
"]",
")",
"return",
"devices"
] |
Return the devices that have been found for this device adapter.
If the adapter indicates that we need to explicitly tell it to probe for devices, probe now.
By default we return the list of seen devices immediately, however there are two cases where
we will sleep here for a fixed period of time to let devices show up in our result list:
- If we are probing then we wait for 'minimum_scan_time'
- If we are told an explicit wait time that overrides everything and we wait that long
|
[
"Return",
"the",
"devices",
"that",
"have",
"been",
"found",
"for",
"this",
"device",
"adapter",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L113-L160
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.connect
|
def connect(self, uuid_value, wait=None):
"""Connect to a specific device by its uuid
Attempt to connect to a device that we have previously scanned using its UUID.
If wait is not None, then it is used in the same was a scan(wait) to override
default wait times with an explicit value.
Args:
uuid_value (int): The unique id of the device that we would like to connect to.
wait (float): Optional amount of time to force the device adapter to wait before
attempting to connect.
"""
if self.connected:
raise HardwareError("Cannot connect when we are already connected")
if uuid_value not in self._scanned_devices:
self.scan(wait=wait)
with self._scan_lock:
if uuid_value not in self._scanned_devices:
raise HardwareError("Could not find device to connect to by UUID", uuid=uuid_value)
connstring = self._scanned_devices[uuid_value]['connection_string']
self.connect_direct(connstring)
|
python
|
def connect(self, uuid_value, wait=None):
"""Connect to a specific device by its uuid
Attempt to connect to a device that we have previously scanned using its UUID.
If wait is not None, then it is used in the same was a scan(wait) to override
default wait times with an explicit value.
Args:
uuid_value (int): The unique id of the device that we would like to connect to.
wait (float): Optional amount of time to force the device adapter to wait before
attempting to connect.
"""
if self.connected:
raise HardwareError("Cannot connect when we are already connected")
if uuid_value not in self._scanned_devices:
self.scan(wait=wait)
with self._scan_lock:
if uuid_value not in self._scanned_devices:
raise HardwareError("Could not find device to connect to by UUID", uuid=uuid_value)
connstring = self._scanned_devices[uuid_value]['connection_string']
self.connect_direct(connstring)
|
[
"def",
"connect",
"(",
"self",
",",
"uuid_value",
",",
"wait",
"=",
"None",
")",
":",
"if",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot connect when we are already connected\"",
")",
"if",
"uuid_value",
"not",
"in",
"self",
".",
"_scanned_devices",
":",
"self",
".",
"scan",
"(",
"wait",
"=",
"wait",
")",
"with",
"self",
".",
"_scan_lock",
":",
"if",
"uuid_value",
"not",
"in",
"self",
".",
"_scanned_devices",
":",
"raise",
"HardwareError",
"(",
"\"Could not find device to connect to by UUID\"",
",",
"uuid",
"=",
"uuid_value",
")",
"connstring",
"=",
"self",
".",
"_scanned_devices",
"[",
"uuid_value",
"]",
"[",
"'connection_string'",
"]",
"self",
".",
"connect_direct",
"(",
"connstring",
")"
] |
Connect to a specific device by its uuid
Attempt to connect to a device that we have previously scanned using its UUID.
If wait is not None, then it is used in the same was a scan(wait) to override
default wait times with an explicit value.
Args:
uuid_value (int): The unique id of the device that we would like to connect to.
wait (float): Optional amount of time to force the device adapter to wait before
attempting to connect.
|
[
"Connect",
"to",
"a",
"specific",
"device",
"by",
"its",
"uuid"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L162-L187
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.connect_direct
|
def connect_direct(self, connection_string, no_rpc=False, force=False):
"""Directly connect to a device using its stream specific connection string.
Normally, all connections to a device include opening the RPC
interface to send RPCs. However, there are certain, very specific,
circumstances when you would not want to or be able to open the RPC
interface (such as when you are using the debug interface on a bare
MCU that has not been programmed yet). In those cases you can pass
no_rpc=True to not attempt to open the RPC interface. If you do not
open the RPC interface at connection time, there is no public
interface to open it later, so you must disconnect and reconnect to
the device in order to open the interface.
Args:
connection_string (str): The connection string that identifies the desired device.
no_rpc (bool): Do not open the RPC interface on the device (default=False).
force (bool): Whether to force another connection even if we think we are currently
connected. This is for internal use and not designed to be set externally.
"""
if not force and self.connected:
raise HardwareError("Cannot connect when we are already connected to '%s'" % self.connection_string)
self._loop.run_coroutine(self.adapter.connect(0, connection_string))
try:
if no_rpc:
self._logger.info("Not opening RPC interface on device %s", self.connection_string)
else:
self._loop.run_coroutine(self.adapter.open_interface(0, 'rpc'))
except HardwareError as exc:
self._logger.exception("Error opening RPC interface on device %s", connection_string)
self._loop.run_coroutine(self.adapter.disconnect(0))
raise exc
except Exception as exc:
self._logger.exception("Error opening RPC interface on device %s", connection_string)
self._loop.run_coroutine(self.adapter.disconnect(0))
raise HardwareError("Could not open RPC interface on device due to an exception: %s" % str(exc)) from exc
self.connected = True
self.connection_string = connection_string
self.connection_interrupted = False
|
python
|
def connect_direct(self, connection_string, no_rpc=False, force=False):
"""Directly connect to a device using its stream specific connection string.
Normally, all connections to a device include opening the RPC
interface to send RPCs. However, there are certain, very specific,
circumstances when you would not want to or be able to open the RPC
interface (such as when you are using the debug interface on a bare
MCU that has not been programmed yet). In those cases you can pass
no_rpc=True to not attempt to open the RPC interface. If you do not
open the RPC interface at connection time, there is no public
interface to open it later, so you must disconnect and reconnect to
the device in order to open the interface.
Args:
connection_string (str): The connection string that identifies the desired device.
no_rpc (bool): Do not open the RPC interface on the device (default=False).
force (bool): Whether to force another connection even if we think we are currently
connected. This is for internal use and not designed to be set externally.
"""
if not force and self.connected:
raise HardwareError("Cannot connect when we are already connected to '%s'" % self.connection_string)
self._loop.run_coroutine(self.adapter.connect(0, connection_string))
try:
if no_rpc:
self._logger.info("Not opening RPC interface on device %s", self.connection_string)
else:
self._loop.run_coroutine(self.adapter.open_interface(0, 'rpc'))
except HardwareError as exc:
self._logger.exception("Error opening RPC interface on device %s", connection_string)
self._loop.run_coroutine(self.adapter.disconnect(0))
raise exc
except Exception as exc:
self._logger.exception("Error opening RPC interface on device %s", connection_string)
self._loop.run_coroutine(self.adapter.disconnect(0))
raise HardwareError("Could not open RPC interface on device due to an exception: %s" % str(exc)) from exc
self.connected = True
self.connection_string = connection_string
self.connection_interrupted = False
|
[
"def",
"connect_direct",
"(",
"self",
",",
"connection_string",
",",
"no_rpc",
"=",
"False",
",",
"force",
"=",
"False",
")",
":",
"if",
"not",
"force",
"and",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot connect when we are already connected to '%s'\"",
"%",
"self",
".",
"connection_string",
")",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"connect",
"(",
"0",
",",
"connection_string",
")",
")",
"try",
":",
"if",
"no_rpc",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Not opening RPC interface on device %s\"",
",",
"self",
".",
"connection_string",
")",
"else",
":",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"open_interface",
"(",
"0",
",",
"'rpc'",
")",
")",
"except",
"HardwareError",
"as",
"exc",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error opening RPC interface on device %s\"",
",",
"connection_string",
")",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"disconnect",
"(",
"0",
")",
")",
"raise",
"exc",
"except",
"Exception",
"as",
"exc",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error opening RPC interface on device %s\"",
",",
"connection_string",
")",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"disconnect",
"(",
"0",
")",
")",
"raise",
"HardwareError",
"(",
"\"Could not open RPC interface on device due to an exception: %s\"",
"%",
"str",
"(",
"exc",
")",
")",
"from",
"exc",
"self",
".",
"connected",
"=",
"True",
"self",
".",
"connection_string",
"=",
"connection_string",
"self",
".",
"connection_interrupted",
"=",
"False"
] |
Directly connect to a device using its stream specific connection string.
Normally, all connections to a device include opening the RPC
interface to send RPCs. However, there are certain, very specific,
circumstances when you would not want to or be able to open the RPC
interface (such as when you are using the debug interface on a bare
MCU that has not been programmed yet). In those cases you can pass
no_rpc=True to not attempt to open the RPC interface. If you do not
open the RPC interface at connection time, there is no public
interface to open it later, so you must disconnect and reconnect to
the device in order to open the interface.
Args:
connection_string (str): The connection string that identifies the desired device.
no_rpc (bool): Do not open the RPC interface on the device (default=False).
force (bool): Whether to force another connection even if we think we are currently
connected. This is for internal use and not designed to be set externally.
|
[
"Directly",
"connect",
"to",
"a",
"device",
"using",
"its",
"stream",
"specific",
"connection",
"string",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L189-L230
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.disconnect
|
def disconnect(self):
"""Disconnect from the device that we are currently connected to."""
if not self.connected:
raise HardwareError("Cannot disconnect when we are not connected")
# Close the streaming and tracing interfaces when we disconnect
self._reports = None
self._traces = None
self._loop.run_coroutine(self.adapter.disconnect(0))
self.connected = False
self.connection_interrupted = False
self.connection_string = None
|
python
|
def disconnect(self):
"""Disconnect from the device that we are currently connected to."""
if not self.connected:
raise HardwareError("Cannot disconnect when we are not connected")
# Close the streaming and tracing interfaces when we disconnect
self._reports = None
self._traces = None
self._loop.run_coroutine(self.adapter.disconnect(0))
self.connected = False
self.connection_interrupted = False
self.connection_string = None
|
[
"def",
"disconnect",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot disconnect when we are not connected\"",
")",
"# Close the streaming and tracing interfaces when we disconnect",
"self",
".",
"_reports",
"=",
"None",
"self",
".",
"_traces",
"=",
"None",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"disconnect",
"(",
"0",
")",
")",
"self",
".",
"connected",
"=",
"False",
"self",
".",
"connection_interrupted",
"=",
"False",
"self",
".",
"connection_string",
"=",
"None"
] |
Disconnect from the device that we are currently connected to.
|
[
"Disconnect",
"from",
"the",
"device",
"that",
"we",
"are",
"currently",
"connected",
"to",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L232-L245
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream._try_reconnect
|
def _try_reconnect(self):
"""Try to recover an interrupted connection."""
try:
if self.connection_interrupted:
self.connect_direct(self.connection_string, force=True)
self.connection_interrupted = False
self.connected = True
# Reenable streaming interface if that was open before as well
if self._reports is not None:
self._loop.run_coroutine(self.adapter.open_interface(0, 'streaming'))
# Reenable tracing interface if that was open before as well
if self._traces is not None:
self._loop.run_coroutine(self.adapter.open_interface(0, 'tracing'))
except HardwareError as exc:
self._logger.exception("Error reconnecting to device after an unexpected disconnect")
raise HardwareError("Device disconnected unexpectedly and we could not reconnect", reconnect_error=exc) from exc
|
python
|
def _try_reconnect(self):
"""Try to recover an interrupted connection."""
try:
if self.connection_interrupted:
self.connect_direct(self.connection_string, force=True)
self.connection_interrupted = False
self.connected = True
# Reenable streaming interface if that was open before as well
if self._reports is not None:
self._loop.run_coroutine(self.adapter.open_interface(0, 'streaming'))
# Reenable tracing interface if that was open before as well
if self._traces is not None:
self._loop.run_coroutine(self.adapter.open_interface(0, 'tracing'))
except HardwareError as exc:
self._logger.exception("Error reconnecting to device after an unexpected disconnect")
raise HardwareError("Device disconnected unexpectedly and we could not reconnect", reconnect_error=exc) from exc
|
[
"def",
"_try_reconnect",
"(",
"self",
")",
":",
"try",
":",
"if",
"self",
".",
"connection_interrupted",
":",
"self",
".",
"connect_direct",
"(",
"self",
".",
"connection_string",
",",
"force",
"=",
"True",
")",
"self",
".",
"connection_interrupted",
"=",
"False",
"self",
".",
"connected",
"=",
"True",
"# Reenable streaming interface if that was open before as well",
"if",
"self",
".",
"_reports",
"is",
"not",
"None",
":",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"open_interface",
"(",
"0",
",",
"'streaming'",
")",
")",
"# Reenable tracing interface if that was open before as well",
"if",
"self",
".",
"_traces",
"is",
"not",
"None",
":",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"open_interface",
"(",
"0",
",",
"'tracing'",
")",
")",
"except",
"HardwareError",
"as",
"exc",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"\"Error reconnecting to device after an unexpected disconnect\"",
")",
"raise",
"HardwareError",
"(",
"\"Device disconnected unexpectedly and we could not reconnect\"",
",",
"reconnect_error",
"=",
"exc",
")",
"from",
"exc"
] |
Try to recover an interrupted connection.
|
[
"Try",
"to",
"recover",
"an",
"interrupted",
"connection",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L247-L265
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.send_rpc
|
def send_rpc(self, address, rpc_id, call_payload, timeout=3.0):
"""Send an rpc to our connected device.
The device must already be connected and the rpc interface open. This
method will synchronously send an RPC and wait for the response. Any
RPC errors will be raised as exceptions and if there were no errors, the
RPC's response payload will be returned as a binary bytearray.
See :meth:`AbstractDeviceAdapter.send_rpc` for documentation of the possible
exceptions that can be raised here.
Args:
address (int): The tile address containing the RPC
rpc_id (int): The ID of the RPC that we wish to call.
call_payload (bytes): The payload containing encoded arguments for the
RPC.
timeout (float): The maximum number of seconds to wait for the RPC to
finish. Defaults to 3s.
Returns:
bytearray: The RPC's response payload.
"""
if not self.connected:
raise HardwareError("Cannot send an RPC if we are not in a connected state")
if timeout is None:
timeout = 3.0
status = -1
payload = b''
recording = None
if self.connection_interrupted:
self._try_reconnect()
if self._record is not None:
recording = _RecordedRPC(self.connection_string, address, rpc_id, call_payload)
recording.start()
try:
payload = self._loop.run_coroutine(self.adapter.send_rpc(0, address, rpc_id, call_payload, timeout))
status, payload = pack_rpc_response(payload, None)
except VALID_RPC_EXCEPTIONS as exc:
status, payload = pack_rpc_response(payload, exc)
if self._record is not None:
recording.finish(status, payload)
self._recording.append(recording)
if self.connection_interrupted:
self._try_reconnect()
return unpack_rpc_response(status, payload, rpc_id, address)
|
python
|
def send_rpc(self, address, rpc_id, call_payload, timeout=3.0):
"""Send an rpc to our connected device.
The device must already be connected and the rpc interface open. This
method will synchronously send an RPC and wait for the response. Any
RPC errors will be raised as exceptions and if there were no errors, the
RPC's response payload will be returned as a binary bytearray.
See :meth:`AbstractDeviceAdapter.send_rpc` for documentation of the possible
exceptions that can be raised here.
Args:
address (int): The tile address containing the RPC
rpc_id (int): The ID of the RPC that we wish to call.
call_payload (bytes): The payload containing encoded arguments for the
RPC.
timeout (float): The maximum number of seconds to wait for the RPC to
finish. Defaults to 3s.
Returns:
bytearray: The RPC's response payload.
"""
if not self.connected:
raise HardwareError("Cannot send an RPC if we are not in a connected state")
if timeout is None:
timeout = 3.0
status = -1
payload = b''
recording = None
if self.connection_interrupted:
self._try_reconnect()
if self._record is not None:
recording = _RecordedRPC(self.connection_string, address, rpc_id, call_payload)
recording.start()
try:
payload = self._loop.run_coroutine(self.adapter.send_rpc(0, address, rpc_id, call_payload, timeout))
status, payload = pack_rpc_response(payload, None)
except VALID_RPC_EXCEPTIONS as exc:
status, payload = pack_rpc_response(payload, exc)
if self._record is not None:
recording.finish(status, payload)
self._recording.append(recording)
if self.connection_interrupted:
self._try_reconnect()
return unpack_rpc_response(status, payload, rpc_id, address)
|
[
"def",
"send_rpc",
"(",
"self",
",",
"address",
",",
"rpc_id",
",",
"call_payload",
",",
"timeout",
"=",
"3.0",
")",
":",
"if",
"not",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot send an RPC if we are not in a connected state\"",
")",
"if",
"timeout",
"is",
"None",
":",
"timeout",
"=",
"3.0",
"status",
"=",
"-",
"1",
"payload",
"=",
"b''",
"recording",
"=",
"None",
"if",
"self",
".",
"connection_interrupted",
":",
"self",
".",
"_try_reconnect",
"(",
")",
"if",
"self",
".",
"_record",
"is",
"not",
"None",
":",
"recording",
"=",
"_RecordedRPC",
"(",
"self",
".",
"connection_string",
",",
"address",
",",
"rpc_id",
",",
"call_payload",
")",
"recording",
".",
"start",
"(",
")",
"try",
":",
"payload",
"=",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"send_rpc",
"(",
"0",
",",
"address",
",",
"rpc_id",
",",
"call_payload",
",",
"timeout",
")",
")",
"status",
",",
"payload",
"=",
"pack_rpc_response",
"(",
"payload",
",",
"None",
")",
"except",
"VALID_RPC_EXCEPTIONS",
"as",
"exc",
":",
"status",
",",
"payload",
"=",
"pack_rpc_response",
"(",
"payload",
",",
"exc",
")",
"if",
"self",
".",
"_record",
"is",
"not",
"None",
":",
"recording",
".",
"finish",
"(",
"status",
",",
"payload",
")",
"self",
".",
"_recording",
".",
"append",
"(",
"recording",
")",
"if",
"self",
".",
"connection_interrupted",
":",
"self",
".",
"_try_reconnect",
"(",
")",
"return",
"unpack_rpc_response",
"(",
"status",
",",
"payload",
",",
"rpc_id",
",",
"address",
")"
] |
Send an rpc to our connected device.
The device must already be connected and the rpc interface open. This
method will synchronously send an RPC and wait for the response. Any
RPC errors will be raised as exceptions and if there were no errors, the
RPC's response payload will be returned as a binary bytearray.
See :meth:`AbstractDeviceAdapter.send_rpc` for documentation of the possible
exceptions that can be raised here.
Args:
address (int): The tile address containing the RPC
rpc_id (int): The ID of the RPC that we wish to call.
call_payload (bytes): The payload containing encoded arguments for the
RPC.
timeout (float): The maximum number of seconds to wait for the RPC to
finish. Defaults to 3s.
Returns:
bytearray: The RPC's response payload.
|
[
"Send",
"an",
"rpc",
"to",
"our",
"connected",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L267-L320
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.send_highspeed
|
def send_highspeed(self, data, progress_callback):
"""Send a script to a device at highspeed, reporting progress.
This method takes a binary blob and downloads it to the device as fast
as possible, calling the passed progress_callback periodically with
updates on how far it has gotten.
Args:
data (bytes): The binary blob that should be sent to the device at highspeed.
progress_callback (callable): A function that will be called periodically to
report progress. The signature must be callback(done_count, total_count)
where done_count and total_count will be passed as integers.
"""
if not self.connected:
raise HardwareError("Cannot send a script if we are not in a connected state")
if isinstance(data, str) and not isinstance(data, bytes):
raise ArgumentError("You must send bytes or bytearray to _send_highspeed", type=type(data))
if not isinstance(data, bytes):
data = bytes(data)
try:
self._on_progress = progress_callback
self._loop.run_coroutine(self.adapter.send_script(0, data))
finally:
self._on_progress = None
|
python
|
def send_highspeed(self, data, progress_callback):
"""Send a script to a device at highspeed, reporting progress.
This method takes a binary blob and downloads it to the device as fast
as possible, calling the passed progress_callback periodically with
updates on how far it has gotten.
Args:
data (bytes): The binary blob that should be sent to the device at highspeed.
progress_callback (callable): A function that will be called periodically to
report progress. The signature must be callback(done_count, total_count)
where done_count and total_count will be passed as integers.
"""
if not self.connected:
raise HardwareError("Cannot send a script if we are not in a connected state")
if isinstance(data, str) and not isinstance(data, bytes):
raise ArgumentError("You must send bytes or bytearray to _send_highspeed", type=type(data))
if not isinstance(data, bytes):
data = bytes(data)
try:
self._on_progress = progress_callback
self._loop.run_coroutine(self.adapter.send_script(0, data))
finally:
self._on_progress = None
|
[
"def",
"send_highspeed",
"(",
"self",
",",
"data",
",",
"progress_callback",
")",
":",
"if",
"not",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot send a script if we are not in a connected state\"",
")",
"if",
"isinstance",
"(",
"data",
",",
"str",
")",
"and",
"not",
"isinstance",
"(",
"data",
",",
"bytes",
")",
":",
"raise",
"ArgumentError",
"(",
"\"You must send bytes or bytearray to _send_highspeed\"",
",",
"type",
"=",
"type",
"(",
"data",
")",
")",
"if",
"not",
"isinstance",
"(",
"data",
",",
"bytes",
")",
":",
"data",
"=",
"bytes",
"(",
"data",
")",
"try",
":",
"self",
".",
"_on_progress",
"=",
"progress_callback",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"send_script",
"(",
"0",
",",
"data",
")",
")",
"finally",
":",
"self",
".",
"_on_progress",
"=",
"None"
] |
Send a script to a device at highspeed, reporting progress.
This method takes a binary blob and downloads it to the device as fast
as possible, calling the passed progress_callback periodically with
updates on how far it has gotten.
Args:
data (bytes): The binary blob that should be sent to the device at highspeed.
progress_callback (callable): A function that will be called periodically to
report progress. The signature must be callback(done_count, total_count)
where done_count and total_count will be passed as integers.
|
[
"Send",
"a",
"script",
"to",
"a",
"device",
"at",
"highspeed",
"reporting",
"progress",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L322-L349
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.enable_streaming
|
def enable_streaming(self):
"""Open the streaming interface and accumute reports in a queue.
This method is safe to call multiple times in a single device
connection. There is no way to check if the streaming interface is
opened or to close it once it is opened (apart from disconnecting from
the device).
The first time this method is called, it will open the streaming
interface and return a queue that will be filled asynchronously with
reports as they are received. Subsequent calls will just empty the
queue and return the same queue without interacting with the device at
all.
Returns:
queue.Queue: A queue that will be filled with reports from the device.
"""
if not self.connected:
raise HardwareError("Cannot enable streaming if we are not in a connected state")
if self._reports is not None:
_clear_queue(self._reports)
return self._reports
self._reports = queue.Queue()
self._loop.run_coroutine(self.adapter.open_interface(0, 'streaming'))
return self._reports
|
python
|
def enable_streaming(self):
"""Open the streaming interface and accumute reports in a queue.
This method is safe to call multiple times in a single device
connection. There is no way to check if the streaming interface is
opened or to close it once it is opened (apart from disconnecting from
the device).
The first time this method is called, it will open the streaming
interface and return a queue that will be filled asynchronously with
reports as they are received. Subsequent calls will just empty the
queue and return the same queue without interacting with the device at
all.
Returns:
queue.Queue: A queue that will be filled with reports from the device.
"""
if not self.connected:
raise HardwareError("Cannot enable streaming if we are not in a connected state")
if self._reports is not None:
_clear_queue(self._reports)
return self._reports
self._reports = queue.Queue()
self._loop.run_coroutine(self.adapter.open_interface(0, 'streaming'))
return self._reports
|
[
"def",
"enable_streaming",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot enable streaming if we are not in a connected state\"",
")",
"if",
"self",
".",
"_reports",
"is",
"not",
"None",
":",
"_clear_queue",
"(",
"self",
".",
"_reports",
")",
"return",
"self",
".",
"_reports",
"self",
".",
"_reports",
"=",
"queue",
".",
"Queue",
"(",
")",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"open_interface",
"(",
"0",
",",
"'streaming'",
")",
")",
"return",
"self",
".",
"_reports"
] |
Open the streaming interface and accumute reports in a queue.
This method is safe to call multiple times in a single device
connection. There is no way to check if the streaming interface is
opened or to close it once it is opened (apart from disconnecting from
the device).
The first time this method is called, it will open the streaming
interface and return a queue that will be filled asynchronously with
reports as they are received. Subsequent calls will just empty the
queue and return the same queue without interacting with the device at
all.
Returns:
queue.Queue: A queue that will be filled with reports from the device.
|
[
"Open",
"the",
"streaming",
"interface",
"and",
"accumute",
"reports",
"in",
"a",
"queue",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L351-L379
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.enable_tracing
|
def enable_tracing(self):
"""Open the tracing interface and accumulate traces in a queue.
This method is safe to call multiple times in a single device
connection. There is no way to check if the tracing interface is
opened or to close it once it is opened (apart from disconnecting from
the device).
The first time this method is called, it will open the tracing
interface and return a queue that will be filled asynchronously with
reports as they are received. Subsequent calls will just empty the
queue and return the same queue without interacting with the device at
all.
Returns:
queue.Queue: A queue that will be filled with trace data from the device.
The trace data will be in disjoint bytes objects in the queue
"""
if not self.connected:
raise HardwareError("Cannot enable tracing if we are not in a connected state")
if self._traces is not None:
_clear_queue(self._traces)
return self._traces
self._traces = queue.Queue()
self._loop.run_coroutine(self.adapter.open_interface(0, 'tracing'))
return self._traces
|
python
|
def enable_tracing(self):
"""Open the tracing interface and accumulate traces in a queue.
This method is safe to call multiple times in a single device
connection. There is no way to check if the tracing interface is
opened or to close it once it is opened (apart from disconnecting from
the device).
The first time this method is called, it will open the tracing
interface and return a queue that will be filled asynchronously with
reports as they are received. Subsequent calls will just empty the
queue and return the same queue without interacting with the device at
all.
Returns:
queue.Queue: A queue that will be filled with trace data from the device.
The trace data will be in disjoint bytes objects in the queue
"""
if not self.connected:
raise HardwareError("Cannot enable tracing if we are not in a connected state")
if self._traces is not None:
_clear_queue(self._traces)
return self._traces
self._traces = queue.Queue()
self._loop.run_coroutine(self.adapter.open_interface(0, 'tracing'))
return self._traces
|
[
"def",
"enable_tracing",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot enable tracing if we are not in a connected state\"",
")",
"if",
"self",
".",
"_traces",
"is",
"not",
"None",
":",
"_clear_queue",
"(",
"self",
".",
"_traces",
")",
"return",
"self",
".",
"_traces",
"self",
".",
"_traces",
"=",
"queue",
".",
"Queue",
"(",
")",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"open_interface",
"(",
"0",
",",
"'tracing'",
")",
")",
"return",
"self",
".",
"_traces"
] |
Open the tracing interface and accumulate traces in a queue.
This method is safe to call multiple times in a single device
connection. There is no way to check if the tracing interface is
opened or to close it once it is opened (apart from disconnecting from
the device).
The first time this method is called, it will open the tracing
interface and return a queue that will be filled asynchronously with
reports as they are received. Subsequent calls will just empty the
queue and return the same queue without interacting with the device at
all.
Returns:
queue.Queue: A queue that will be filled with trace data from the device.
The trace data will be in disjoint bytes objects in the queue
|
[
"Open",
"the",
"tracing",
"interface",
"and",
"accumulate",
"traces",
"in",
"a",
"queue",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L381-L411
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.enable_broadcasting
|
def enable_broadcasting(self):
"""Begin accumulating broadcast reports received from all devices.
This method will allocate a queue to receive broadcast reports that
will be filled asynchronously as broadcast reports are received.
Returns:
queue.Queue: A queue that will be filled with braodcast reports.
"""
if self._broadcast_reports is not None:
_clear_queue(self._broadcast_reports)
return self._broadcast_reports
self._broadcast_reports = queue.Queue()
return self._broadcast_reports
|
python
|
def enable_broadcasting(self):
"""Begin accumulating broadcast reports received from all devices.
This method will allocate a queue to receive broadcast reports that
will be filled asynchronously as broadcast reports are received.
Returns:
queue.Queue: A queue that will be filled with braodcast reports.
"""
if self._broadcast_reports is not None:
_clear_queue(self._broadcast_reports)
return self._broadcast_reports
self._broadcast_reports = queue.Queue()
return self._broadcast_reports
|
[
"def",
"enable_broadcasting",
"(",
"self",
")",
":",
"if",
"self",
".",
"_broadcast_reports",
"is",
"not",
"None",
":",
"_clear_queue",
"(",
"self",
".",
"_broadcast_reports",
")",
"return",
"self",
".",
"_broadcast_reports",
"self",
".",
"_broadcast_reports",
"=",
"queue",
".",
"Queue",
"(",
")",
"return",
"self",
".",
"_broadcast_reports"
] |
Begin accumulating broadcast reports received from all devices.
This method will allocate a queue to receive broadcast reports that
will be filled asynchronously as broadcast reports are received.
Returns:
queue.Queue: A queue that will be filled with braodcast reports.
|
[
"Begin",
"accumulating",
"broadcast",
"reports",
"received",
"from",
"all",
"devices",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L413-L428
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.enable_debug
|
def enable_debug(self):
"""Open the debug interface on the connected device."""
if not self.connected:
raise HardwareError("Cannot enable debug if we are not in a connected state")
self._loop.run_coroutine(self.adapter.open_interface(0, 'debug'))
|
python
|
def enable_debug(self):
"""Open the debug interface on the connected device."""
if not self.connected:
raise HardwareError("Cannot enable debug if we are not in a connected state")
self._loop.run_coroutine(self.adapter.open_interface(0, 'debug'))
|
[
"def",
"enable_debug",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"connected",
":",
"raise",
"HardwareError",
"(",
"\"Cannot enable debug if we are not in a connected state\"",
")",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"open_interface",
"(",
"0",
",",
"'debug'",
")",
")"
] |
Open the debug interface on the connected device.
|
[
"Open",
"the",
"debug",
"interface",
"on",
"the",
"connected",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L430-L436
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.debug_command
|
def debug_command(self, cmd, args=None, progress_callback=None):
"""Send a debug command to the connected device.
This generic method will send a named debug command with the given
arguments to the connected device. Debug commands are typically used
for things like forcible reflashing of firmware or other, debug-style,
operations. Not all transport protocols support debug commands and
the supported operations vary depeneding on the transport protocol.
Args:
cmd (str): The name of the debug command to send.
args (dict): Any arguments required by the given debug command
progress_callback (callable): A function that will be called periodically to
report progress. The signature must be callback(done_count, total_count)
where done_count and total_count will be passed as integers.
Returns:
object: The return value of the debug command, if there is one.
"""
if args is None:
args = {}
try:
self._on_progress = progress_callback
return self._loop.run_coroutine(self.adapter.debug(0, cmd, args))
finally:
self._on_progress = None
|
python
|
def debug_command(self, cmd, args=None, progress_callback=None):
"""Send a debug command to the connected device.
This generic method will send a named debug command with the given
arguments to the connected device. Debug commands are typically used
for things like forcible reflashing of firmware or other, debug-style,
operations. Not all transport protocols support debug commands and
the supported operations vary depeneding on the transport protocol.
Args:
cmd (str): The name of the debug command to send.
args (dict): Any arguments required by the given debug command
progress_callback (callable): A function that will be called periodically to
report progress. The signature must be callback(done_count, total_count)
where done_count and total_count will be passed as integers.
Returns:
object: The return value of the debug command, if there is one.
"""
if args is None:
args = {}
try:
self._on_progress = progress_callback
return self._loop.run_coroutine(self.adapter.debug(0, cmd, args))
finally:
self._on_progress = None
|
[
"def",
"debug_command",
"(",
"self",
",",
"cmd",
",",
"args",
"=",
"None",
",",
"progress_callback",
"=",
"None",
")",
":",
"if",
"args",
"is",
"None",
":",
"args",
"=",
"{",
"}",
"try",
":",
"self",
".",
"_on_progress",
"=",
"progress_callback",
"return",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"debug",
"(",
"0",
",",
"cmd",
",",
"args",
")",
")",
"finally",
":",
"self",
".",
"_on_progress",
"=",
"None"
] |
Send a debug command to the connected device.
This generic method will send a named debug command with the given
arguments to the connected device. Debug commands are typically used
for things like forcible reflashing of firmware or other, debug-style,
operations. Not all transport protocols support debug commands and
the supported operations vary depeneding on the transport protocol.
Args:
cmd (str): The name of the debug command to send.
args (dict): Any arguments required by the given debug command
progress_callback (callable): A function that will be called periodically to
report progress. The signature must be callback(done_count, total_count)
where done_count and total_count will be passed as integers.
Returns:
object: The return value of the debug command, if there is one.
|
[
"Send",
"a",
"debug",
"command",
"to",
"the",
"connected",
"device",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L438-L465
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream.close
|
def close(self):
"""Close this adapter stream.
This method may only be called once in the lifetime of an
AdapterStream and it will shutdown the underlying device adapter,
disconnect all devices and stop all background activity.
If this stream is configured to save a record of all RPCs, the RPCs
will be logged to a file at this point.
"""
try:
self._loop.run_coroutine(self.adapter.stop())
finally:
self._save_recording()
|
python
|
def close(self):
"""Close this adapter stream.
This method may only be called once in the lifetime of an
AdapterStream and it will shutdown the underlying device adapter,
disconnect all devices and stop all background activity.
If this stream is configured to save a record of all RPCs, the RPCs
will be logged to a file at this point.
"""
try:
self._loop.run_coroutine(self.adapter.stop())
finally:
self._save_recording()
|
[
"def",
"close",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"adapter",
".",
"stop",
"(",
")",
")",
"finally",
":",
"self",
".",
"_save_recording",
"(",
")"
] |
Close this adapter stream.
This method may only be called once in the lifetime of an
AdapterStream and it will shutdown the underlying device adapter,
disconnect all devices and stop all background activity.
If this stream is configured to save a record of all RPCs, the RPCs
will be logged to a file at this point.
|
[
"Close",
"this",
"adapter",
"stream",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L467-L481
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream._on_scan
|
def _on_scan(self, info):
"""Callback called when a new device is discovered on this CMDStream
Args:
info (dict): Information about the scanned device
"""
device_id = info['uuid']
expiration_time = info.get('validity_period', 60)
infocopy = deepcopy(info)
infocopy['expiration_time'] = monotonic() + expiration_time
with self._scan_lock:
self._scanned_devices[device_id] = infocopy
|
python
|
def _on_scan(self, info):
"""Callback called when a new device is discovered on this CMDStream
Args:
info (dict): Information about the scanned device
"""
device_id = info['uuid']
expiration_time = info.get('validity_period', 60)
infocopy = deepcopy(info)
infocopy['expiration_time'] = monotonic() + expiration_time
with self._scan_lock:
self._scanned_devices[device_id] = infocopy
|
[
"def",
"_on_scan",
"(",
"self",
",",
"info",
")",
":",
"device_id",
"=",
"info",
"[",
"'uuid'",
"]",
"expiration_time",
"=",
"info",
".",
"get",
"(",
"'validity_period'",
",",
"60",
")",
"infocopy",
"=",
"deepcopy",
"(",
"info",
")",
"infocopy",
"[",
"'expiration_time'",
"]",
"=",
"monotonic",
"(",
")",
"+",
"expiration_time",
"with",
"self",
".",
"_scan_lock",
":",
"self",
".",
"_scanned_devices",
"[",
"device_id",
"]",
"=",
"infocopy"
] |
Callback called when a new device is discovered on this CMDStream
Args:
info (dict): Information about the scanned device
|
[
"Callback",
"called",
"when",
"a",
"new",
"device",
"is",
"discovered",
"on",
"this",
"CMDStream"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L518-L532
|
train
|
iotile/coretools
|
iotilecore/iotile/core/hw/transport/adapterstream.py
|
AdapterStream._on_disconnect
|
def _on_disconnect(self):
"""Callback when a device is disconnected unexpectedly.
Args:
adapter_id (int): An ID for the adapter that was connected to the device
connection_id (int): An ID for the connection that has become disconnected
"""
self._logger.info("Connection to device %s was interrupted", self.connection_string)
self.connection_interrupted = True
|
python
|
def _on_disconnect(self):
"""Callback when a device is disconnected unexpectedly.
Args:
adapter_id (int): An ID for the adapter that was connected to the device
connection_id (int): An ID for the connection that has become disconnected
"""
self._logger.info("Connection to device %s was interrupted", self.connection_string)
self.connection_interrupted = True
|
[
"def",
"_on_disconnect",
"(",
"self",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Connection to device %s was interrupted\"",
",",
"self",
".",
"connection_string",
")",
"self",
".",
"connection_interrupted",
"=",
"True"
] |
Callback when a device is disconnected unexpectedly.
Args:
adapter_id (int): An ID for the adapter that was connected to the device
connection_id (int): An ID for the connection that has become disconnected
|
[
"Callback",
"when",
"a",
"device",
"is",
"disconnected",
"unexpectedly",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/transport/adapterstream.py#L534-L543
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/midl.py
|
midl_emitter
|
def midl_emitter(target, source, env):
"""Produces a list of outputs from the MIDL compiler"""
base, _ = SCons.Util.splitext(str(target[0]))
tlb = target[0]
incl = base + '.h'
interface = base + '_i.c'
targets = [tlb, incl, interface]
midlcom = env['MIDLCOM']
if midlcom.find('/proxy') != -1:
proxy = base + '_p.c'
targets.append(proxy)
if midlcom.find('/dlldata') != -1:
dlldata = base + '_data.c'
targets.append(dlldata)
return (targets, source)
|
python
|
def midl_emitter(target, source, env):
"""Produces a list of outputs from the MIDL compiler"""
base, _ = SCons.Util.splitext(str(target[0]))
tlb = target[0]
incl = base + '.h'
interface = base + '_i.c'
targets = [tlb, incl, interface]
midlcom = env['MIDLCOM']
if midlcom.find('/proxy') != -1:
proxy = base + '_p.c'
targets.append(proxy)
if midlcom.find('/dlldata') != -1:
dlldata = base + '_data.c'
targets.append(dlldata)
return (targets, source)
|
[
"def",
"midl_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"base",
",",
"_",
"=",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"tlb",
"=",
"target",
"[",
"0",
"]",
"incl",
"=",
"base",
"+",
"'.h'",
"interface",
"=",
"base",
"+",
"'_i.c'",
"targets",
"=",
"[",
"tlb",
",",
"incl",
",",
"interface",
"]",
"midlcom",
"=",
"env",
"[",
"'MIDLCOM'",
"]",
"if",
"midlcom",
".",
"find",
"(",
"'/proxy'",
")",
"!=",
"-",
"1",
":",
"proxy",
"=",
"base",
"+",
"'_p.c'",
"targets",
".",
"append",
"(",
"proxy",
")",
"if",
"midlcom",
".",
"find",
"(",
"'/dlldata'",
")",
"!=",
"-",
"1",
":",
"dlldata",
"=",
"base",
"+",
"'_data.c'",
"targets",
".",
"append",
"(",
"dlldata",
")",
"return",
"(",
"targets",
",",
"source",
")"
] |
Produces a list of outputs from the MIDL compiler
|
[
"Produces",
"a",
"list",
"of",
"outputs",
"from",
"the",
"MIDL",
"compiler"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/midl.py#L44-L61
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/midl.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for midl to an Environment."""
env['MIDL'] = 'MIDL.EXE'
env['MIDLFLAGS'] = SCons.Util.CLVar('/nologo')
env['MIDLCOM'] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL'
env['BUILDERS']['TypeLibrary'] = midl_builder
|
python
|
def generate(env):
"""Add Builders and construction variables for midl to an Environment."""
env['MIDL'] = 'MIDL.EXE'
env['MIDLFLAGS'] = SCons.Util.CLVar('/nologo')
env['MIDLCOM'] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL'
env['BUILDERS']['TypeLibrary'] = midl_builder
|
[
"def",
"generate",
"(",
"env",
")",
":",
"env",
"[",
"'MIDL'",
"]",
"=",
"'MIDL.EXE'",
"env",
"[",
"'MIDLFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'/nologo'",
")",
"env",
"[",
"'MIDLCOM'",
"]",
"=",
"'$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL'",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'TypeLibrary'",
"]",
"=",
"midl_builder"
] |
Add Builders and construction variables for midl to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"midl",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/midl.py#L73-L79
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConsign.py
|
Base.set_entry
|
def set_entry(self, filename, obj):
"""
Set the entry.
"""
self.entries[filename] = obj
self.dirty = True
|
python
|
def set_entry(self, filename, obj):
"""
Set the entry.
"""
self.entries[filename] = obj
self.dirty = True
|
[
"def",
"set_entry",
"(",
"self",
",",
"filename",
",",
"obj",
")",
":",
"self",
".",
"entries",
"[",
"filename",
"]",
"=",
"obj",
"self",
".",
"dirty",
"=",
"True"
] |
Set the entry.
|
[
"Set",
"the",
"entry",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConsign.py#L187-L192
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConsign.py
|
DirFile.write
|
def write(self, sync=1):
"""
Write the .sconsign file to disk.
Try to write to a temporary file first, and rename it if we
succeed. If we can't write to the temporary file, it's
probably because the directory isn't writable (and if so,
how did we build anything in this directory, anyway?), so
try to write directly to the .sconsign file as a backup.
If we can't rename, try to copy the temporary contents back
to the .sconsign file. Either way, always try to remove
the temporary file at the end.
"""
if not self.dirty:
return
self.merge()
temp = os.path.join(self.dir.get_internal_path(), '.scons%d' % os.getpid())
try:
file = open(temp, 'wb')
fname = temp
except IOError:
try:
file = open(self.sconsign, 'wb')
fname = self.sconsign
except IOError:
return
for key, entry in self.entries.items():
entry.convert_to_sconsign()
pickle.dump(self.entries, file, PICKLE_PROTOCOL)
file.close()
if fname != self.sconsign:
try:
mode = os.stat(self.sconsign)[0]
os.chmod(self.sconsign, 0o666)
os.unlink(self.sconsign)
except (IOError, OSError):
# Try to carry on in the face of either OSError
# (things like permission issues) or IOError (disk
# or network issues). If there's a really dangerous
# issue, it should get re-raised by the calls below.
pass
try:
os.rename(fname, self.sconsign)
except OSError:
# An OSError failure to rename may indicate something
# like the directory has no write permission, but
# the .sconsign file itself might still be writable,
# so try writing on top of it directly. An IOError
# here, or in any of the following calls, would get
# raised, indicating something like a potentially
# serious disk or network issue.
open(self.sconsign, 'wb').write(open(fname, 'rb').read())
os.chmod(self.sconsign, mode)
try:
os.unlink(temp)
except (IOError, OSError):
pass
|
python
|
def write(self, sync=1):
"""
Write the .sconsign file to disk.
Try to write to a temporary file first, and rename it if we
succeed. If we can't write to the temporary file, it's
probably because the directory isn't writable (and if so,
how did we build anything in this directory, anyway?), so
try to write directly to the .sconsign file as a backup.
If we can't rename, try to copy the temporary contents back
to the .sconsign file. Either way, always try to remove
the temporary file at the end.
"""
if not self.dirty:
return
self.merge()
temp = os.path.join(self.dir.get_internal_path(), '.scons%d' % os.getpid())
try:
file = open(temp, 'wb')
fname = temp
except IOError:
try:
file = open(self.sconsign, 'wb')
fname = self.sconsign
except IOError:
return
for key, entry in self.entries.items():
entry.convert_to_sconsign()
pickle.dump(self.entries, file, PICKLE_PROTOCOL)
file.close()
if fname != self.sconsign:
try:
mode = os.stat(self.sconsign)[0]
os.chmod(self.sconsign, 0o666)
os.unlink(self.sconsign)
except (IOError, OSError):
# Try to carry on in the face of either OSError
# (things like permission issues) or IOError (disk
# or network issues). If there's a really dangerous
# issue, it should get re-raised by the calls below.
pass
try:
os.rename(fname, self.sconsign)
except OSError:
# An OSError failure to rename may indicate something
# like the directory has no write permission, but
# the .sconsign file itself might still be writable,
# so try writing on top of it directly. An IOError
# here, or in any of the following calls, would get
# raised, indicating something like a potentially
# serious disk or network issue.
open(self.sconsign, 'wb').write(open(fname, 'rb').read())
os.chmod(self.sconsign, mode)
try:
os.unlink(temp)
except (IOError, OSError):
pass
|
[
"def",
"write",
"(",
"self",
",",
"sync",
"=",
"1",
")",
":",
"if",
"not",
"self",
".",
"dirty",
":",
"return",
"self",
".",
"merge",
"(",
")",
"temp",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"dir",
".",
"get_internal_path",
"(",
")",
",",
"'.scons%d'",
"%",
"os",
".",
"getpid",
"(",
")",
")",
"try",
":",
"file",
"=",
"open",
"(",
"temp",
",",
"'wb'",
")",
"fname",
"=",
"temp",
"except",
"IOError",
":",
"try",
":",
"file",
"=",
"open",
"(",
"self",
".",
"sconsign",
",",
"'wb'",
")",
"fname",
"=",
"self",
".",
"sconsign",
"except",
"IOError",
":",
"return",
"for",
"key",
",",
"entry",
"in",
"self",
".",
"entries",
".",
"items",
"(",
")",
":",
"entry",
".",
"convert_to_sconsign",
"(",
")",
"pickle",
".",
"dump",
"(",
"self",
".",
"entries",
",",
"file",
",",
"PICKLE_PROTOCOL",
")",
"file",
".",
"close",
"(",
")",
"if",
"fname",
"!=",
"self",
".",
"sconsign",
":",
"try",
":",
"mode",
"=",
"os",
".",
"stat",
"(",
"self",
".",
"sconsign",
")",
"[",
"0",
"]",
"os",
".",
"chmod",
"(",
"self",
".",
"sconsign",
",",
"0o666",
")",
"os",
".",
"unlink",
"(",
"self",
".",
"sconsign",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
":",
"# Try to carry on in the face of either OSError",
"# (things like permission issues) or IOError (disk",
"# or network issues). If there's a really dangerous",
"# issue, it should get re-raised by the calls below.",
"pass",
"try",
":",
"os",
".",
"rename",
"(",
"fname",
",",
"self",
".",
"sconsign",
")",
"except",
"OSError",
":",
"# An OSError failure to rename may indicate something",
"# like the directory has no write permission, but",
"# the .sconsign file itself might still be writable,",
"# so try writing on top of it directly. An IOError",
"# here, or in any of the following calls, would get",
"# raised, indicating something like a potentially",
"# serious disk or network issue.",
"open",
"(",
"self",
".",
"sconsign",
",",
"'wb'",
")",
".",
"write",
"(",
"open",
"(",
"fname",
",",
"'rb'",
")",
".",
"read",
"(",
")",
")",
"os",
".",
"chmod",
"(",
"self",
".",
"sconsign",
",",
"mode",
")",
"try",
":",
"os",
".",
"unlink",
"(",
"temp",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
":",
"pass"
] |
Write the .sconsign file to disk.
Try to write to a temporary file first, and rename it if we
succeed. If we can't write to the temporary file, it's
probably because the directory isn't writable (and if so,
how did we build anything in this directory, anyway?), so
try to write directly to the .sconsign file as a backup.
If we can't rename, try to copy the temporary contents back
to the .sconsign file. Either way, always try to remove
the temporary file at the end.
|
[
"Write",
"the",
".",
"sconsign",
"file",
"to",
"disk",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/SConsign.py#L343-L401
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sgilink.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for MIPSPro to an Environment."""
link.generate(env)
env['LINK'] = env.Detect(linkers) or 'cc'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
# __RPATH is set to $_RPATH in the platform specification if that
# platform supports it.
env['RPATHPREFIX'] = '-rpath '
env['RPATHSUFFIX'] = ''
env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
|
python
|
def generate(env):
"""Add Builders and construction variables for MIPSPro to an Environment."""
link.generate(env)
env['LINK'] = env.Detect(linkers) or 'cc'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
# __RPATH is set to $_RPATH in the platform specification if that
# platform supports it.
env['RPATHPREFIX'] = '-rpath '
env['RPATHSUFFIX'] = ''
env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"link",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'LINK'",
"]",
"=",
"env",
".",
"Detect",
"(",
"linkers",
")",
"or",
"'cc'",
"env",
"[",
"'SHLINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$LINKFLAGS -shared'",
")",
"# __RPATH is set to $_RPATH in the platform specification if that",
"# platform supports it.",
"env",
"[",
"'RPATHPREFIX'",
"]",
"=",
"'-rpath '",
"env",
"[",
"'RPATHSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'_RPATH'",
"]",
"=",
"'${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'"
] |
Add Builders and construction variables for MIPSPro to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"MIPSPro",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sgilink.py#L42-L53
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.start
|
async def start(self):
"""Start the supervisor server."""
await self.server.start()
self.port = self.server.port
|
python
|
async def start(self):
"""Start the supervisor server."""
await self.server.start()
self.port = self.server.port
|
[
"async",
"def",
"start",
"(",
"self",
")",
":",
"await",
"self",
".",
"server",
".",
"start",
"(",
")",
"self",
".",
"port",
"=",
"self",
".",
"server",
".",
"port"
] |
Start the supervisor server.
|
[
"Start",
"the",
"supervisor",
"server",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L86-L90
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.prepare_conn
|
async def prepare_conn(self, conn):
"""Setup a new connection from a client."""
client_id = str(uuid.uuid4())
monitor = functools.partial(self.send_event, client_id)
self._logger.info("New client connection: %s", client_id)
self.service_manager.add_monitor(monitor)
self.clients[client_id] = dict(connection=conn, monitor=monitor)
return client_id
|
python
|
async def prepare_conn(self, conn):
"""Setup a new connection from a client."""
client_id = str(uuid.uuid4())
monitor = functools.partial(self.send_event, client_id)
self._logger.info("New client connection: %s", client_id)
self.service_manager.add_monitor(monitor)
self.clients[client_id] = dict(connection=conn, monitor=monitor)
return client_id
|
[
"async",
"def",
"prepare_conn",
"(",
"self",
",",
"conn",
")",
":",
"client_id",
"=",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"monitor",
"=",
"functools",
".",
"partial",
"(",
"self",
".",
"send_event",
",",
"client_id",
")",
"self",
".",
"_logger",
".",
"info",
"(",
"\"New client connection: %s\"",
",",
"client_id",
")",
"self",
".",
"service_manager",
".",
"add_monitor",
"(",
"monitor",
")",
"self",
".",
"clients",
"[",
"client_id",
"]",
"=",
"dict",
"(",
"connection",
"=",
"conn",
",",
"monitor",
"=",
"monitor",
")",
"return",
"client_id"
] |
Setup a new connection from a client.
|
[
"Setup",
"a",
"new",
"connection",
"from",
"a",
"client",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L97-L108
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.teardown_conn
|
async def teardown_conn(self, context):
"""Teardown a connection from a client."""
client_id = context.user_data
self._logger.info("Tearing down client connection: %s", client_id)
if client_id not in self.clients:
self._logger.warning("client_id %s did not exist in teardown_conn", client_id)
else:
del self.clients[client_id]
|
python
|
async def teardown_conn(self, context):
"""Teardown a connection from a client."""
client_id = context.user_data
self._logger.info("Tearing down client connection: %s", client_id)
if client_id not in self.clients:
self._logger.warning("client_id %s did not exist in teardown_conn", client_id)
else:
del self.clients[client_id]
|
[
"async",
"def",
"teardown_conn",
"(",
"self",
",",
"context",
")",
":",
"client_id",
"=",
"context",
".",
"user_data",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Tearing down client connection: %s\"",
",",
"client_id",
")",
"if",
"client_id",
"not",
"in",
"self",
".",
"clients",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"client_id %s did not exist in teardown_conn\"",
",",
"client_id",
")",
"else",
":",
"del",
"self",
".",
"clients",
"[",
"client_id",
"]"
] |
Teardown a connection from a client.
|
[
"Teardown",
"a",
"connection",
"from",
"a",
"client",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L110-L119
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.send_event
|
async def send_event(self, client_id, service_name, event_name, event_info, directed_client=None):
"""Send an event to a client."""
if directed_client is not None and directed_client != client_id:
return
client_info = self.clients.get(client_id)
if client_info is None:
self._logger.warning("Attempted to send event to invalid client id: %s", client_id)
return
conn = client_info['connection']
event = dict(service=service_name)
if event_info is not None:
event['payload'] = event_info
self._logger.debug("Sending event: %s", event)
await self.server.send_event(conn, event_name, event)
|
python
|
async def send_event(self, client_id, service_name, event_name, event_info, directed_client=None):
"""Send an event to a client."""
if directed_client is not None and directed_client != client_id:
return
client_info = self.clients.get(client_id)
if client_info is None:
self._logger.warning("Attempted to send event to invalid client id: %s", client_id)
return
conn = client_info['connection']
event = dict(service=service_name)
if event_info is not None:
event['payload'] = event_info
self._logger.debug("Sending event: %s", event)
await self.server.send_event(conn, event_name, event)
|
[
"async",
"def",
"send_event",
"(",
"self",
",",
"client_id",
",",
"service_name",
",",
"event_name",
",",
"event_info",
",",
"directed_client",
"=",
"None",
")",
":",
"if",
"directed_client",
"is",
"not",
"None",
"and",
"directed_client",
"!=",
"client_id",
":",
"return",
"client_info",
"=",
"self",
".",
"clients",
".",
"get",
"(",
"client_id",
")",
"if",
"client_info",
"is",
"None",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"Attempted to send event to invalid client id: %s\"",
",",
"client_id",
")",
"return",
"conn",
"=",
"client_info",
"[",
"'connection'",
"]",
"event",
"=",
"dict",
"(",
"service",
"=",
"service_name",
")",
"if",
"event_info",
"is",
"not",
"None",
":",
"event",
"[",
"'payload'",
"]",
"=",
"event_info",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Sending event: %s\"",
",",
"event",
")",
"await",
"self",
".",
"server",
".",
"send_event",
"(",
"conn",
",",
"event_name",
",",
"event",
")"
] |
Send an event to a client.
|
[
"Send",
"an",
"event",
"to",
"a",
"client",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L121-L140
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.send_rpc
|
async def send_rpc(self, msg, _context):
"""Send an RPC to a service on behalf of a client."""
service = msg.get('name')
rpc_id = msg.get('rpc_id')
payload = msg.get('payload')
timeout = msg.get('timeout')
response_id = await self.service_manager.send_rpc_command(service, rpc_id, payload,
timeout)
try:
result = await self.service_manager.rpc_results.get(response_id, timeout=timeout)
except asyncio.TimeoutError:
self._logger.warning("RPC 0x%04X on service %s timed out after %f seconds",
rpc_id, service, timeout)
result = dict(result='timeout', response=b'')
return result
|
python
|
async def send_rpc(self, msg, _context):
"""Send an RPC to a service on behalf of a client."""
service = msg.get('name')
rpc_id = msg.get('rpc_id')
payload = msg.get('payload')
timeout = msg.get('timeout')
response_id = await self.service_manager.send_rpc_command(service, rpc_id, payload,
timeout)
try:
result = await self.service_manager.rpc_results.get(response_id, timeout=timeout)
except asyncio.TimeoutError:
self._logger.warning("RPC 0x%04X on service %s timed out after %f seconds",
rpc_id, service, timeout)
result = dict(result='timeout', response=b'')
return result
|
[
"async",
"def",
"send_rpc",
"(",
"self",
",",
"msg",
",",
"_context",
")",
":",
"service",
"=",
"msg",
".",
"get",
"(",
"'name'",
")",
"rpc_id",
"=",
"msg",
".",
"get",
"(",
"'rpc_id'",
")",
"payload",
"=",
"msg",
".",
"get",
"(",
"'payload'",
")",
"timeout",
"=",
"msg",
".",
"get",
"(",
"'timeout'",
")",
"response_id",
"=",
"await",
"self",
".",
"service_manager",
".",
"send_rpc_command",
"(",
"service",
",",
"rpc_id",
",",
"payload",
",",
"timeout",
")",
"try",
":",
"result",
"=",
"await",
"self",
".",
"service_manager",
".",
"rpc_results",
".",
"get",
"(",
"response_id",
",",
"timeout",
"=",
"timeout",
")",
"except",
"asyncio",
".",
"TimeoutError",
":",
"self",
".",
"_logger",
".",
"warning",
"(",
"\"RPC 0x%04X on service %s timed out after %f seconds\"",
",",
"rpc_id",
",",
"service",
",",
"timeout",
")",
"result",
"=",
"dict",
"(",
"result",
"=",
"'timeout'",
",",
"response",
"=",
"b''",
")",
"return",
"result"
] |
Send an RPC to a service on behalf of a client.
|
[
"Send",
"an",
"RPC",
"to",
"a",
"service",
"on",
"behalf",
"of",
"a",
"client",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L142-L160
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.respond_rpc
|
async def respond_rpc(self, msg, _context):
"""Respond to an RPC previously sent to a service."""
rpc_id = msg.get('response_uuid')
result = msg.get('result')
payload = msg.get('response')
self.service_manager.send_rpc_response(rpc_id, result, payload)
|
python
|
async def respond_rpc(self, msg, _context):
"""Respond to an RPC previously sent to a service."""
rpc_id = msg.get('response_uuid')
result = msg.get('result')
payload = msg.get('response')
self.service_manager.send_rpc_response(rpc_id, result, payload)
|
[
"async",
"def",
"respond_rpc",
"(",
"self",
",",
"msg",
",",
"_context",
")",
":",
"rpc_id",
"=",
"msg",
".",
"get",
"(",
"'response_uuid'",
")",
"result",
"=",
"msg",
".",
"get",
"(",
"'result'",
")",
"payload",
"=",
"msg",
".",
"get",
"(",
"'response'",
")",
"self",
".",
"service_manager",
".",
"send_rpc_response",
"(",
"rpc_id",
",",
"result",
",",
"payload",
")"
] |
Respond to an RPC previously sent to a service.
|
[
"Respond",
"to",
"an",
"RPC",
"previously",
"sent",
"to",
"a",
"service",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L162-L169
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.set_agent
|
async def set_agent(self, msg, context):
"""Mark a client as the RPC agent for a service."""
service = msg.get('name')
client = context.user_data
self.service_manager.set_agent(service, client)
|
python
|
async def set_agent(self, msg, context):
"""Mark a client as the RPC agent for a service."""
service = msg.get('name')
client = context.user_data
self.service_manager.set_agent(service, client)
|
[
"async",
"def",
"set_agent",
"(",
"self",
",",
"msg",
",",
"context",
")",
":",
"service",
"=",
"msg",
".",
"get",
"(",
"'name'",
")",
"client",
"=",
"context",
".",
"user_data",
"self",
".",
"service_manager",
".",
"set_agent",
"(",
"service",
",",
"client",
")"
] |
Mark a client as the RPC agent for a service.
|
[
"Mark",
"a",
"client",
"as",
"the",
"RPC",
"agent",
"for",
"a",
"service",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L176-L182
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.service_messages
|
async def service_messages(self, msg, _context):
"""Get all messages for a service."""
msgs = self.service_manager.service_messages(msg.get('name'))
return [x.to_dict() for x in msgs]
|
python
|
async def service_messages(self, msg, _context):
"""Get all messages for a service."""
msgs = self.service_manager.service_messages(msg.get('name'))
return [x.to_dict() for x in msgs]
|
[
"async",
"def",
"service_messages",
"(",
"self",
",",
"msg",
",",
"_context",
")",
":",
"msgs",
"=",
"self",
".",
"service_manager",
".",
"service_messages",
"(",
"msg",
".",
"get",
"(",
"'name'",
")",
")",
"return",
"[",
"x",
".",
"to_dict",
"(",
")",
"for",
"x",
"in",
"msgs",
"]"
] |
Get all messages for a service.
|
[
"Get",
"all",
"messages",
"for",
"a",
"service",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L219-L223
|
train
|
iotile/coretools
|
iotilegateway/iotilegateway/supervisor/server.py
|
IOTileSupervisor.service_headline
|
async def service_headline(self, msg, _context):
"""Get the headline for a service."""
headline = self.service_manager.service_headline(msg.get('name'))
if headline is not None:
headline = headline.to_dict()
return headline
|
python
|
async def service_headline(self, msg, _context):
"""Get the headline for a service."""
headline = self.service_manager.service_headline(msg.get('name'))
if headline is not None:
headline = headline.to_dict()
return headline
|
[
"async",
"def",
"service_headline",
"(",
"self",
",",
"msg",
",",
"_context",
")",
":",
"headline",
"=",
"self",
".",
"service_manager",
".",
"service_headline",
"(",
"msg",
".",
"get",
"(",
"'name'",
")",
")",
"if",
"headline",
"is",
"not",
"None",
":",
"headline",
"=",
"headline",
".",
"to_dict",
"(",
")",
"return",
"headline"
] |
Get the headline for a service.
|
[
"Get",
"the",
"headline",
"for",
"a",
"service",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/server.py#L225-L232
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/nasm.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for nasm to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
env['AS'] = 'nasm'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
|
python
|
def generate(env):
"""Add Builders and construction variables for nasm to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
env['AS'] = 'nasm'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"for",
"suffix",
"in",
"ASSuffixes",
":",
"static_obj",
".",
"add_action",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"ASAction",
")",
"static_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"StaticObjectEmitter",
")",
"for",
"suffix",
"in",
"ASPPSuffixes",
":",
"static_obj",
".",
"add_action",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"ASPPAction",
")",
"static_obj",
".",
"add_emitter",
"(",
"suffix",
",",
"SCons",
".",
"Defaults",
".",
"StaticObjectEmitter",
")",
"env",
"[",
"'AS'",
"]",
"=",
"'nasm'",
"env",
"[",
"'ASFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env",
"[",
"'ASPPFLAGS'",
"]",
"=",
"'$ASFLAGS'",
"env",
"[",
"'ASCOM'",
"]",
"=",
"'$AS $ASFLAGS -o $TARGET $SOURCES'",
"env",
"[",
"'ASPPCOM'",
"]",
"=",
"'$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'"
] |
Add Builders and construction variables for nasm to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"nasm",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/nasm.py#L47-L63
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sunlink.py
|
generate
|
def generate(env):
"""Add Builders and construction variables for Forte to an Environment."""
link.generate(env)
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G')
env['RPATHPREFIX'] = '-R'
env['RPATHSUFFIX'] = ''
env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
# Support for versioned libraries
link._setup_versioned_lib_variables(env, tool = 'sunlink', use_soname = True)
env['LINKCALLBACKS'] = link._versioned_lib_callbacks()
|
python
|
def generate(env):
"""Add Builders and construction variables for Forte to an Environment."""
link.generate(env)
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G')
env['RPATHPREFIX'] = '-R'
env['RPATHSUFFIX'] = ''
env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
# Support for versioned libraries
link._setup_versioned_lib_variables(env, tool = 'sunlink', use_soname = True)
env['LINKCALLBACKS'] = link._versioned_lib_callbacks()
|
[
"def",
"generate",
"(",
"env",
")",
":",
"link",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'SHLINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$LINKFLAGS -G'",
")",
"env",
"[",
"'RPATHPREFIX'",
"]",
"=",
"'-R'",
"env",
"[",
"'RPATHSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'_RPATH'",
"]",
"=",
"'${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'",
"# Support for versioned libraries",
"link",
".",
"_setup_versioned_lib_variables",
"(",
"env",
",",
"tool",
"=",
"'sunlink'",
",",
"use_soname",
"=",
"True",
")",
"env",
"[",
"'LINKCALLBACKS'",
"]",
"=",
"link",
".",
"_versioned_lib_callbacks",
"(",
")"
] |
Add Builders and construction variables for Forte to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"Forte",
"to",
"an",
"Environment",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sunlink.py#L59-L71
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/schema_verify/verifier.py
|
Verifier._get_short_description
|
def _get_short_description(self):
"""Return the first line of a multiline description
Returns:
string: The short description, otherwise None
"""
if self.description is None:
return None
lines = [x for x in self.description.split('\n')]
if len(lines) == 1:
return lines[0]
elif len(lines) >= 3 and lines[1] == '':
return lines[0]
return None
|
python
|
def _get_short_description(self):
"""Return the first line of a multiline description
Returns:
string: The short description, otherwise None
"""
if self.description is None:
return None
lines = [x for x in self.description.split('\n')]
if len(lines) == 1:
return lines[0]
elif len(lines) >= 3 and lines[1] == '':
return lines[0]
return None
|
[
"def",
"_get_short_description",
"(",
"self",
")",
":",
"if",
"self",
".",
"description",
"is",
"None",
":",
"return",
"None",
"lines",
"=",
"[",
"x",
"for",
"x",
"in",
"self",
".",
"description",
".",
"split",
"(",
"'\\n'",
")",
"]",
"if",
"len",
"(",
"lines",
")",
"==",
"1",
":",
"return",
"lines",
"[",
"0",
"]",
"elif",
"len",
"(",
"lines",
")",
">=",
"3",
"and",
"lines",
"[",
"1",
"]",
"==",
"''",
":",
"return",
"lines",
"[",
"0",
"]",
"return",
"None"
] |
Return the first line of a multiline description
Returns:
string: The short description, otherwise None
|
[
"Return",
"the",
"first",
"line",
"of",
"a",
"multiline",
"description"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/schema_verify/verifier.py#L70-L86
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/schema_verify/verifier.py
|
Verifier._get_long_description
|
def _get_long_description(self):
"""Return the subsequent lines of a multiline description
Returns:
string: The long description, otherwise None
"""
if self.description is None:
return None
lines = [x for x in self.description.split('\n')]
if len(lines) == 1:
return None
elif len(lines) >= 3 and lines[1] == '':
return '\n'.join(lines[2:])
return self.description
|
python
|
def _get_long_description(self):
"""Return the subsequent lines of a multiline description
Returns:
string: The long description, otherwise None
"""
if self.description is None:
return None
lines = [x for x in self.description.split('\n')]
if len(lines) == 1:
return None
elif len(lines) >= 3 and lines[1] == '':
return '\n'.join(lines[2:])
return self.description
|
[
"def",
"_get_long_description",
"(",
"self",
")",
":",
"if",
"self",
".",
"description",
"is",
"None",
":",
"return",
"None",
"lines",
"=",
"[",
"x",
"for",
"x",
"in",
"self",
".",
"description",
".",
"split",
"(",
"'\\n'",
")",
"]",
"if",
"len",
"(",
"lines",
")",
"==",
"1",
":",
"return",
"None",
"elif",
"len",
"(",
"lines",
")",
">=",
"3",
"and",
"lines",
"[",
"1",
"]",
"==",
"''",
":",
"return",
"'\\n'",
".",
"join",
"(",
"lines",
"[",
"2",
":",
"]",
")",
"return",
"self",
".",
"description"
] |
Return the subsequent lines of a multiline description
Returns:
string: The long description, otherwise None
|
[
"Return",
"the",
"subsequent",
"lines",
"of",
"a",
"multiline",
"description"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/schema_verify/verifier.py#L88-L105
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/schema_verify/verifier.py
|
Verifier.wrap_lines
|
def wrap_lines(self, text, indent_level, indent_size=4):
"""Indent a multiline string
Args:
text (string): The string to indent
indent_level (int): The number of indent_size spaces to prepend
to each line
indent_size (int): The number of spaces to prepend for each indent
level
Returns:
string: The indented block of text
"""
indent = ' '*indent_size*indent_level
lines = text.split('\n')
wrapped_lines = []
for line in lines:
if line == '':
wrapped_lines.append(line)
else:
wrapped_lines.append(indent + line)
return '\n'.join(wrapped_lines)
|
python
|
def wrap_lines(self, text, indent_level, indent_size=4):
"""Indent a multiline string
Args:
text (string): The string to indent
indent_level (int): The number of indent_size spaces to prepend
to each line
indent_size (int): The number of spaces to prepend for each indent
level
Returns:
string: The indented block of text
"""
indent = ' '*indent_size*indent_level
lines = text.split('\n')
wrapped_lines = []
for line in lines:
if line == '':
wrapped_lines.append(line)
else:
wrapped_lines.append(indent + line)
return '\n'.join(wrapped_lines)
|
[
"def",
"wrap_lines",
"(",
"self",
",",
"text",
",",
"indent_level",
",",
"indent_size",
"=",
"4",
")",
":",
"indent",
"=",
"' '",
"*",
"indent_size",
"*",
"indent_level",
"lines",
"=",
"text",
".",
"split",
"(",
"'\\n'",
")",
"wrapped_lines",
"=",
"[",
"]",
"for",
"line",
"in",
"lines",
":",
"if",
"line",
"==",
"''",
":",
"wrapped_lines",
".",
"append",
"(",
"line",
")",
"else",
":",
"wrapped_lines",
".",
"append",
"(",
"indent",
"+",
"line",
")",
"return",
"'\\n'",
".",
"join",
"(",
"wrapped_lines",
")"
] |
Indent a multiline string
Args:
text (string): The string to indent
indent_level (int): The number of indent_size spaces to prepend
to each line
indent_size (int): The number of spaces to prepend for each indent
level
Returns:
string: The indented block of text
|
[
"Indent",
"a",
"multiline",
"string"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/schema_verify/verifier.py#L107-L132
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/schema_verify/verifier.py
|
Verifier.format_name
|
def format_name(self, name, indent_size=4):
"""Format the name of this verifier
The name will be formatted as:
<name>: <short description>
long description if one is given followed by \n
otherwise no long description
Args:
name (string): A name for this validator
indent_size (int): The number of spaces to indent the
description
Returns:
string: The formatted name block with a short and or long
description appended.
"""
name_block = ''
if self.short_desc is None:
name_block += name + '\n'
else:
name_block += name + ': ' + self.short_desc + '\n'
if self.long_desc is not None:
name_block += self.wrap_lines(self.long_desc, 1, indent_size=indent_size)
name_block += '\n'
return name_block
|
python
|
def format_name(self, name, indent_size=4):
"""Format the name of this verifier
The name will be formatted as:
<name>: <short description>
long description if one is given followed by \n
otherwise no long description
Args:
name (string): A name for this validator
indent_size (int): The number of spaces to indent the
description
Returns:
string: The formatted name block with a short and or long
description appended.
"""
name_block = ''
if self.short_desc is None:
name_block += name + '\n'
else:
name_block += name + ': ' + self.short_desc + '\n'
if self.long_desc is not None:
name_block += self.wrap_lines(self.long_desc, 1, indent_size=indent_size)
name_block += '\n'
return name_block
|
[
"def",
"format_name",
"(",
"self",
",",
"name",
",",
"indent_size",
"=",
"4",
")",
":",
"name_block",
"=",
"''",
"if",
"self",
".",
"short_desc",
"is",
"None",
":",
"name_block",
"+=",
"name",
"+",
"'\\n'",
"else",
":",
"name_block",
"+=",
"name",
"+",
"': '",
"+",
"self",
".",
"short_desc",
"+",
"'\\n'",
"if",
"self",
".",
"long_desc",
"is",
"not",
"None",
":",
"name_block",
"+=",
"self",
".",
"wrap_lines",
"(",
"self",
".",
"long_desc",
",",
"1",
",",
"indent_size",
"=",
"indent_size",
")",
"name_block",
"+=",
"'\\n'",
"return",
"name_block"
] |
Format the name of this verifier
The name will be formatted as:
<name>: <short description>
long description if one is given followed by \n
otherwise no long description
Args:
name (string): A name for this validator
indent_size (int): The number of spaces to indent the
description
Returns:
string: The formatted name block with a short and or long
description appended.
|
[
"Format",
"the",
"name",
"of",
"this",
"verifier"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/schema_verify/verifier.py#L134-L162
|
train
|
iotile/coretools
|
iotilecore/iotile/core/utilities/schema_verify/verifier.py
|
Verifier.trim_whitespace
|
def trim_whitespace(self, text):
"""Remove leading whitespace from each line of a multiline string
Args:
text (string): The text to be unindented
Returns:
string: The unindented block of text
"""
lines = text.split('\n')
new_lines = [x.lstrip() for x in lines]
return '\n'.join(new_lines)
|
python
|
def trim_whitespace(self, text):
"""Remove leading whitespace from each line of a multiline string
Args:
text (string): The text to be unindented
Returns:
string: The unindented block of text
"""
lines = text.split('\n')
new_lines = [x.lstrip() for x in lines]
return '\n'.join(new_lines)
|
[
"def",
"trim_whitespace",
"(",
"self",
",",
"text",
")",
":",
"lines",
"=",
"text",
".",
"split",
"(",
"'\\n'",
")",
"new_lines",
"=",
"[",
"x",
".",
"lstrip",
"(",
")",
"for",
"x",
"in",
"lines",
"]",
"return",
"'\\n'",
".",
"join",
"(",
"new_lines",
")"
] |
Remove leading whitespace from each line of a multiline string
Args:
text (string): The text to be unindented
Returns:
string: The unindented block of text
|
[
"Remove",
"leading",
"whitespace",
"from",
"each",
"line",
"of",
"a",
"multiline",
"string"
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/schema_verify/verifier.py#L164-L177
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__extend_targets_sources
|
def __extend_targets_sources(target, source):
""" Prepare the lists of target and source files. """
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target[:]
elif not SCons.Util.is_List(source):
source = [source]
if len(target) < len(source):
target.extend(source[len(target):])
return target, source
|
python
|
def __extend_targets_sources(target, source):
""" Prepare the lists of target and source files. """
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target[:]
elif not SCons.Util.is_List(source):
source = [source]
if len(target) < len(source):
target.extend(source[len(target):])
return target, source
|
[
"def",
"__extend_targets_sources",
"(",
"target",
",",
"source",
")",
":",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"target",
")",
":",
"target",
"=",
"[",
"target",
"]",
"if",
"not",
"source",
":",
"source",
"=",
"target",
"[",
":",
"]",
"elif",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"source",
")",
":",
"source",
"=",
"[",
"source",
"]",
"if",
"len",
"(",
"target",
")",
"<",
"len",
"(",
"source",
")",
":",
"target",
".",
"extend",
"(",
"source",
"[",
"len",
"(",
"target",
")",
":",
"]",
")",
"return",
"target",
",",
"source"
] |
Prepare the lists of target and source files.
|
[
"Prepare",
"the",
"lists",
"of",
"target",
"and",
"source",
"files",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L77-L88
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__select_builder
|
def __select_builder(lxml_builder, libxml2_builder, cmdline_builder):
""" Selects a builder, based on which Python modules are present. """
if prefer_xsltproc:
return cmdline_builder
if not has_libxml2:
# At the moment we prefer libxml2 over lxml, the latter can lead
# to conflicts when installed together with libxml2.
if has_lxml:
return lxml_builder
else:
return cmdline_builder
return libxml2_builder
|
python
|
def __select_builder(lxml_builder, libxml2_builder, cmdline_builder):
""" Selects a builder, based on which Python modules are present. """
if prefer_xsltproc:
return cmdline_builder
if not has_libxml2:
# At the moment we prefer libxml2 over lxml, the latter can lead
# to conflicts when installed together with libxml2.
if has_lxml:
return lxml_builder
else:
return cmdline_builder
return libxml2_builder
|
[
"def",
"__select_builder",
"(",
"lxml_builder",
",",
"libxml2_builder",
",",
"cmdline_builder",
")",
":",
"if",
"prefer_xsltproc",
":",
"return",
"cmdline_builder",
"if",
"not",
"has_libxml2",
":",
"# At the moment we prefer libxml2 over lxml, the latter can lead",
"# to conflicts when installed together with libxml2.",
"if",
"has_lxml",
":",
"return",
"lxml_builder",
"else",
":",
"return",
"cmdline_builder",
"return",
"libxml2_builder"
] |
Selects a builder, based on which Python modules are present.
|
[
"Selects",
"a",
"builder",
"based",
"on",
"which",
"Python",
"modules",
"are",
"present",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L98-L111
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py
|
__ensure_suffix
|
def __ensure_suffix(t, suffix):
""" Ensure that the target t has the given suffix. """
tpath = str(t)
if not tpath.endswith(suffix):
return tpath+suffix
return t
|
python
|
def __ensure_suffix(t, suffix):
""" Ensure that the target t has the given suffix. """
tpath = str(t)
if not tpath.endswith(suffix):
return tpath+suffix
return t
|
[
"def",
"__ensure_suffix",
"(",
"t",
",",
"suffix",
")",
":",
"tpath",
"=",
"str",
"(",
"t",
")",
"if",
"not",
"tpath",
".",
"endswith",
"(",
"suffix",
")",
":",
"return",
"tpath",
"+",
"suffix",
"return",
"t"
] |
Ensure that the target t has the given suffix.
|
[
"Ensure",
"that",
"the",
"target",
"t",
"has",
"the",
"given",
"suffix",
"."
] |
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/docbook/__init__.py#L113-L119
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.