repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
Task._exception_raise
def _exception_raise(self): """ Raises a pending exception that was recorded while getting a Task ready for execution. """ exc = self.exc_info()[:] try: exc_type, exc_value, exc_traceback = exc except ValueError: exc_type, exc_value = exc exc_traceback = None # raise exc_type(exc_value).with_traceback(exc_traceback) if sys.version_info[0] == 2: exec("raise exc_type, exc_value, exc_traceback") else: # sys.version_info[0] == 3: if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'): # If exc_value is an exception, then just reraise exec("raise exc_value.with_traceback(exc_traceback)") else: # else we'll create an exception using the value and raise that exec("raise exc_type(exc_value).with_traceback(exc_traceback)")
python
def _exception_raise(self): """ Raises a pending exception that was recorded while getting a Task ready for execution. """ exc = self.exc_info()[:] try: exc_type, exc_value, exc_traceback = exc except ValueError: exc_type, exc_value = exc exc_traceback = None # raise exc_type(exc_value).with_traceback(exc_traceback) if sys.version_info[0] == 2: exec("raise exc_type, exc_value, exc_traceback") else: # sys.version_info[0] == 3: if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'): # If exc_value is an exception, then just reraise exec("raise exc_value.with_traceback(exc_traceback)") else: # else we'll create an exception using the value and raise that exec("raise exc_type(exc_value).with_traceback(exc_traceback)")
[ "def", "_exception_raise", "(", "self", ")", ":", "exc", "=", "self", ".", "exc_info", "(", ")", "[", ":", "]", "try", ":", "exc_type", ",", "exc_value", ",", "exc_traceback", "=", "exc", "except", "ValueError", ":", "exc_type", ",", "exc_value", "=", "exc", "exc_traceback", "=", "None", "# raise exc_type(exc_value).with_traceback(exc_traceback)", "if", "sys", ".", "version_info", "[", "0", "]", "==", "2", ":", "exec", "(", "\"raise exc_type, exc_value, exc_traceback\"", ")", "else", ":", "# sys.version_info[0] == 3:", "if", "isinstance", "(", "exc_value", ",", "Exception", ")", ":", "#hasattr(exc_value, 'with_traceback'):", "# If exc_value is an exception, then just reraise", "exec", "(", "\"raise exc_value.with_traceback(exc_traceback)\"", ")", "else", ":", "# else we'll create an exception using the value and raise that", "exec", "(", "\"raise exc_type(exc_value).with_traceback(exc_traceback)\"", ")" ]
Raises a pending exception that was recorded while getting a Task ready for execution.
[ "Raises", "a", "pending", "exception", "that", "was", "recorded", "while", "getting", "a", "Task", "ready", "for", "execution", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L535-L556
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
Taskmaster.no_next_candidate
def no_next_candidate(self): """ Stops Taskmaster processing by not returning a next candidate. Note that we have to clean-up the Taskmaster candidate list because the cycle detection depends on the fact all nodes have been processed somehow. """ while self.candidates: candidates = self.candidates self.candidates = [] self.will_not_build(candidates) return None
python
def no_next_candidate(self): """ Stops Taskmaster processing by not returning a next candidate. Note that we have to clean-up the Taskmaster candidate list because the cycle detection depends on the fact all nodes have been processed somehow. """ while self.candidates: candidates = self.candidates self.candidates = [] self.will_not_build(candidates) return None
[ "def", "no_next_candidate", "(", "self", ")", ":", "while", "self", ".", "candidates", ":", "candidates", "=", "self", ".", "candidates", "self", ".", "candidates", "=", "[", "]", "self", ".", "will_not_build", "(", "candidates", ")", "return", "None" ]
Stops Taskmaster processing by not returning a next candidate. Note that we have to clean-up the Taskmaster candidate list because the cycle detection depends on the fact all nodes have been processed somehow.
[ "Stops", "Taskmaster", "processing", "by", "not", "returning", "a", "next", "candidate", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L660-L672
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
Taskmaster._validate_pending_children
def _validate_pending_children(self): """ Validate the content of the pending_children set. Assert if an internal error is found. This function is used strictly for debugging the taskmaster by checking that no invariants are violated. It is not used in normal operation. The pending_children set is used to detect cycles in the dependency graph. We call a "pending child" a child that is found in the "pending" state when checking the dependencies of its parent node. A pending child can occur when the Taskmaster completes a loop through a cycle. For example, let's imagine a graph made of three nodes (A, B and C) making a cycle. The evaluation starts at node A. The Taskmaster first considers whether node A's child B is up-to-date. Then, recursively, node B needs to check whether node C is up-to-date. This leaves us with a dependency graph looking like:: Next candidate \ \ Node A (Pending) --> Node B(Pending) --> Node C (NoState) ^ | | | +-------------------------------------+ Now, when the Taskmaster examines the Node C's child Node A, it finds that Node A is in the "pending" state. Therefore, Node A is a pending child of node C. Pending children indicate that the Taskmaster has potentially loop back through a cycle. We say potentially because it could also occur when a DAG is evaluated in parallel. For example, consider the following graph:: Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... | ^ | | +----------> Node D (NoState) --------+ / Next candidate / The Taskmaster first evaluates the nodes A, B, and C and starts building some children of node C. Assuming, that the maximum parallel level has not been reached, the Taskmaster will examine Node D. It will find that Node C is a pending child of Node D. In summary, evaluating a graph with a cycle will always involve a pending child at one point. A pending child might indicate either a cycle or a diamond-shaped DAG. Only a fraction of the nodes ends-up being a "pending child" of another node. This keeps the pending_children set small in practice. We can differentiate between the two cases if we wait until the end of the build. At this point, all the pending children nodes due to a diamond-shaped DAG will have been properly built (or will have failed to build). But, the pending children involved in a cycle will still be in the pending state. The taskmaster removes nodes from the pending_children set as soon as a pending_children node moves out of the pending state. This also helps to keep the pending_children set small. """ for n in self.pending_children: assert n.state in (NODE_PENDING, NODE_EXECUTING), \ (str(n), StateString[n.state]) assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) for p in n.waiting_parents: assert p.ref_count > 0, (str(n), str(p), p.ref_count)
python
def _validate_pending_children(self): """ Validate the content of the pending_children set. Assert if an internal error is found. This function is used strictly for debugging the taskmaster by checking that no invariants are violated. It is not used in normal operation. The pending_children set is used to detect cycles in the dependency graph. We call a "pending child" a child that is found in the "pending" state when checking the dependencies of its parent node. A pending child can occur when the Taskmaster completes a loop through a cycle. For example, let's imagine a graph made of three nodes (A, B and C) making a cycle. The evaluation starts at node A. The Taskmaster first considers whether node A's child B is up-to-date. Then, recursively, node B needs to check whether node C is up-to-date. This leaves us with a dependency graph looking like:: Next candidate \ \ Node A (Pending) --> Node B(Pending) --> Node C (NoState) ^ | | | +-------------------------------------+ Now, when the Taskmaster examines the Node C's child Node A, it finds that Node A is in the "pending" state. Therefore, Node A is a pending child of node C. Pending children indicate that the Taskmaster has potentially loop back through a cycle. We say potentially because it could also occur when a DAG is evaluated in parallel. For example, consider the following graph:: Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... | ^ | | +----------> Node D (NoState) --------+ / Next candidate / The Taskmaster first evaluates the nodes A, B, and C and starts building some children of node C. Assuming, that the maximum parallel level has not been reached, the Taskmaster will examine Node D. It will find that Node C is a pending child of Node D. In summary, evaluating a graph with a cycle will always involve a pending child at one point. A pending child might indicate either a cycle or a diamond-shaped DAG. Only a fraction of the nodes ends-up being a "pending child" of another node. This keeps the pending_children set small in practice. We can differentiate between the two cases if we wait until the end of the build. At this point, all the pending children nodes due to a diamond-shaped DAG will have been properly built (or will have failed to build). But, the pending children involved in a cycle will still be in the pending state. The taskmaster removes nodes from the pending_children set as soon as a pending_children node moves out of the pending state. This also helps to keep the pending_children set small. """ for n in self.pending_children: assert n.state in (NODE_PENDING, NODE_EXECUTING), \ (str(n), StateString[n.state]) assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) for p in n.waiting_parents: assert p.ref_count > 0, (str(n), str(p), p.ref_count)
[ "def", "_validate_pending_children", "(", "self", ")", ":", "for", "n", "in", "self", ".", "pending_children", ":", "assert", "n", ".", "state", "in", "(", "NODE_PENDING", ",", "NODE_EXECUTING", ")", ",", "(", "str", "(", "n", ")", ",", "StateString", "[", "n", ".", "state", "]", ")", "assert", "len", "(", "n", ".", "waiting_parents", ")", "!=", "0", ",", "(", "str", "(", "n", ")", ",", "len", "(", "n", ".", "waiting_parents", ")", ")", "for", "p", "in", "n", ".", "waiting_parents", ":", "assert", "p", ".", "ref_count", ">", "0", ",", "(", "str", "(", "n", ")", ",", "str", "(", "p", ")", ",", "p", ".", "ref_count", ")" ]
Validate the content of the pending_children set. Assert if an internal error is found. This function is used strictly for debugging the taskmaster by checking that no invariants are violated. It is not used in normal operation. The pending_children set is used to detect cycles in the dependency graph. We call a "pending child" a child that is found in the "pending" state when checking the dependencies of its parent node. A pending child can occur when the Taskmaster completes a loop through a cycle. For example, let's imagine a graph made of three nodes (A, B and C) making a cycle. The evaluation starts at node A. The Taskmaster first considers whether node A's child B is up-to-date. Then, recursively, node B needs to check whether node C is up-to-date. This leaves us with a dependency graph looking like:: Next candidate \ \ Node A (Pending) --> Node B(Pending) --> Node C (NoState) ^ | | | +-------------------------------------+ Now, when the Taskmaster examines the Node C's child Node A, it finds that Node A is in the "pending" state. Therefore, Node A is a pending child of node C. Pending children indicate that the Taskmaster has potentially loop back through a cycle. We say potentially because it could also occur when a DAG is evaluated in parallel. For example, consider the following graph:: Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... | ^ | | +----------> Node D (NoState) --------+ / Next candidate / The Taskmaster first evaluates the nodes A, B, and C and starts building some children of node C. Assuming, that the maximum parallel level has not been reached, the Taskmaster will examine Node D. It will find that Node C is a pending child of Node D. In summary, evaluating a graph with a cycle will always involve a pending child at one point. A pending child might indicate either a cycle or a diamond-shaped DAG. Only a fraction of the nodes ends-up being a "pending child" of another node. This keeps the pending_children set small in practice. We can differentiate between the two cases if we wait until the end of the build. At this point, all the pending children nodes due to a diamond-shaped DAG will have been properly built (or will have failed to build). But, the pending children involved in a cycle will still be in the pending state. The taskmaster removes nodes from the pending_children set as soon as a pending_children node moves out of the pending state. This also helps to keep the pending_children set small.
[ "Validate", "the", "content", "of", "the", "pending_children", "set", ".", "Assert", "if", "an", "internal", "error", "is", "found", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L674-L749
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
Taskmaster.next_task
def next_task(self): """ Returns the next task to be executed. This simply asks for the next Node to be evaluated, and then wraps it in the specific Task subclass with which we were initialized. """ node = self._find_next_ready_node() if node is None: return None executor = node.get_executor() if executor is None: return None tlist = executor.get_all_targets() task = self.tasker(self, tlist, node in self.original_top, node) try: task.make_ready() except Exception as e : # We had a problem just trying to get this task ready (like # a child couldn't be linked to a VariantDir when deciding # whether this node is current). Arrange to raise the # exception when the Task is "executed." self.ready_exc = sys.exc_info() if self.ready_exc: task.exception_set(self.ready_exc) self.ready_exc = None return task
python
def next_task(self): """ Returns the next task to be executed. This simply asks for the next Node to be evaluated, and then wraps it in the specific Task subclass with which we were initialized. """ node = self._find_next_ready_node() if node is None: return None executor = node.get_executor() if executor is None: return None tlist = executor.get_all_targets() task = self.tasker(self, tlist, node in self.original_top, node) try: task.make_ready() except Exception as e : # We had a problem just trying to get this task ready (like # a child couldn't be linked to a VariantDir when deciding # whether this node is current). Arrange to raise the # exception when the Task is "executed." self.ready_exc = sys.exc_info() if self.ready_exc: task.exception_set(self.ready_exc) self.ready_exc = None return task
[ "def", "next_task", "(", "self", ")", ":", "node", "=", "self", ".", "_find_next_ready_node", "(", ")", "if", "node", "is", "None", ":", "return", "None", "executor", "=", "node", ".", "get_executor", "(", ")", "if", "executor", "is", "None", ":", "return", "None", "tlist", "=", "executor", ".", "get_all_targets", "(", ")", "task", "=", "self", ".", "tasker", "(", "self", ",", "tlist", ",", "node", "in", "self", ".", "original_top", ",", "node", ")", "try", ":", "task", ".", "make_ready", "(", ")", "except", "Exception", "as", "e", ":", "# We had a problem just trying to get this task ready (like", "# a child couldn't be linked to a VariantDir when deciding", "# whether this node is current). Arrange to raise the", "# exception when the Task is \"executed.\"", "self", ".", "ready_exc", "=", "sys", ".", "exc_info", "(", ")", "if", "self", ".", "ready_exc", ":", "task", ".", "exception_set", "(", "self", ".", "ready_exc", ")", "self", ".", "ready_exc", "=", "None", "return", "task" ]
Returns the next task to be executed. This simply asks for the next Node to be evaluated, and then wraps it in the specific Task subclass with which we were initialized.
[ "Returns", "the", "next", "task", "to", "be", "executed", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L952-L985
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py
Taskmaster.cleanup
def cleanup(self): """ Check for dependency cycles. """ if not self.pending_children: return nclist = [(n, find_cycle([n], set())) for n in self.pending_children] genuine_cycles = [ node for node,cycle in nclist if cycle or node.get_state() != NODE_EXECUTED ] if not genuine_cycles: # All of the "cycles" found were single nodes in EXECUTED state, # which is to say, they really weren't cycles. Just return. return desc = 'Found dependency cycle(s):\n' for node, cycle in nclist: if cycle: desc = desc + " " + " -> ".join(map(str, cycle)) + "\n" else: desc = desc + \ " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ (node, repr(node), StateString[node.get_state()]) raise SCons.Errors.UserError(desc)
python
def cleanup(self): """ Check for dependency cycles. """ if not self.pending_children: return nclist = [(n, find_cycle([n], set())) for n in self.pending_children] genuine_cycles = [ node for node,cycle in nclist if cycle or node.get_state() != NODE_EXECUTED ] if not genuine_cycles: # All of the "cycles" found were single nodes in EXECUTED state, # which is to say, they really weren't cycles. Just return. return desc = 'Found dependency cycle(s):\n' for node, cycle in nclist: if cycle: desc = desc + " " + " -> ".join(map(str, cycle)) + "\n" else: desc = desc + \ " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ (node, repr(node), StateString[node.get_state()]) raise SCons.Errors.UserError(desc)
[ "def", "cleanup", "(", "self", ")", ":", "if", "not", "self", ".", "pending_children", ":", "return", "nclist", "=", "[", "(", "n", ",", "find_cycle", "(", "[", "n", "]", ",", "set", "(", ")", ")", ")", "for", "n", "in", "self", ".", "pending_children", "]", "genuine_cycles", "=", "[", "node", "for", "node", ",", "cycle", "in", "nclist", "if", "cycle", "or", "node", ".", "get_state", "(", ")", "!=", "NODE_EXECUTED", "]", "if", "not", "genuine_cycles", ":", "# All of the \"cycles\" found were single nodes in EXECUTED state,", "# which is to say, they really weren't cycles. Just return.", "return", "desc", "=", "'Found dependency cycle(s):\\n'", "for", "node", ",", "cycle", "in", "nclist", ":", "if", "cycle", ":", "desc", "=", "desc", "+", "\" \"", "+", "\" -> \"", ".", "join", "(", "map", "(", "str", ",", "cycle", ")", ")", "+", "\"\\n\"", "else", ":", "desc", "=", "desc", "+", "\" Internal Error: no cycle found for node %s (%s) in state %s\\n\"", "%", "(", "node", ",", "repr", "(", "node", ")", ",", "StateString", "[", "node", ".", "get_state", "(", ")", "]", ")", "raise", "SCons", ".", "Errors", ".", "UserError", "(", "desc", ")" ]
Check for dependency cycles.
[ "Check", "for", "dependency", "cycles", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L1037-L1064
train
iotile/coretools
iotilebuild/iotile/build/dev/resolverchain.py
DependencyResolverChain.instantiate_resolver
def instantiate_resolver(self, name, args): """Directly instantiate a dependency resolver by name with the given arguments Args: name (string): The name of the class that we want to instantiate args (dict): The arguments to pass to the resolver factory Returns: DependencyResolver """ if name not in self._known_resolvers: raise ArgumentError("Attempting to instantiate unknown dependency resolver", name=name) return self._known_resolvers[name](args)
python
def instantiate_resolver(self, name, args): """Directly instantiate a dependency resolver by name with the given arguments Args: name (string): The name of the class that we want to instantiate args (dict): The arguments to pass to the resolver factory Returns: DependencyResolver """ if name not in self._known_resolvers: raise ArgumentError("Attempting to instantiate unknown dependency resolver", name=name) return self._known_resolvers[name](args)
[ "def", "instantiate_resolver", "(", "self", ",", "name", ",", "args", ")", ":", "if", "name", "not", "in", "self", ".", "_known_resolvers", ":", "raise", "ArgumentError", "(", "\"Attempting to instantiate unknown dependency resolver\"", ",", "name", "=", "name", ")", "return", "self", ".", "_known_resolvers", "[", "name", "]", "(", "args", ")" ]
Directly instantiate a dependency resolver by name with the given arguments Args: name (string): The name of the class that we want to instantiate args (dict): The arguments to pass to the resolver factory Returns: DependencyResolver
[ "Directly", "instantiate", "a", "dependency", "resolver", "by", "name", "with", "the", "given", "arguments" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L53-L66
train
iotile/coretools
iotilebuild/iotile/build/dev/resolverchain.py
DependencyResolverChain.pull_release
def pull_release(self, name, version, destfolder=".", force=False): """Download and unpack a released iotile component by name and version range If the folder that would be created already exists, this command fails unless you pass force=True Args: name (string): The name of the component to download version (SemanticVersionRange): The valid versions of the component to fetch destfolder (string): The folder into which to unpack the result, defaults to the current working directory force (bool): Forcibly overwrite whatever is currently in the folder that would be fetched. Raises: ExternalError: If the destination folder exists and force is not specified ArgumentError: If the specified component could not be found with the required version """ unique_id = name.replace('/', '_') depdict = { 'name': name, 'unique_id': unique_id, 'required_version': version, 'required_version_string': str(version) } destdir = os.path.join(destfolder, unique_id) if os.path.exists(destdir): if not force: raise ExternalError("Output directory exists and force was not specified, aborting", output_directory=destdir) shutil.rmtree(destdir) result = self.update_dependency(None, depdict, destdir) if result != "installed": raise ArgumentError("Could not find component to satisfy name/version combination")
python
def pull_release(self, name, version, destfolder=".", force=False): """Download and unpack a released iotile component by name and version range If the folder that would be created already exists, this command fails unless you pass force=True Args: name (string): The name of the component to download version (SemanticVersionRange): The valid versions of the component to fetch destfolder (string): The folder into which to unpack the result, defaults to the current working directory force (bool): Forcibly overwrite whatever is currently in the folder that would be fetched. Raises: ExternalError: If the destination folder exists and force is not specified ArgumentError: If the specified component could not be found with the required version """ unique_id = name.replace('/', '_') depdict = { 'name': name, 'unique_id': unique_id, 'required_version': version, 'required_version_string': str(version) } destdir = os.path.join(destfolder, unique_id) if os.path.exists(destdir): if not force: raise ExternalError("Output directory exists and force was not specified, aborting", output_directory=destdir) shutil.rmtree(destdir) result = self.update_dependency(None, depdict, destdir) if result != "installed": raise ArgumentError("Could not find component to satisfy name/version combination")
[ "def", "pull_release", "(", "self", ",", "name", ",", "version", ",", "destfolder", "=", "\".\"", ",", "force", "=", "False", ")", ":", "unique_id", "=", "name", ".", "replace", "(", "'/'", ",", "'_'", ")", "depdict", "=", "{", "'name'", ":", "name", ",", "'unique_id'", ":", "unique_id", ",", "'required_version'", ":", "version", ",", "'required_version_string'", ":", "str", "(", "version", ")", "}", "destdir", "=", "os", ".", "path", ".", "join", "(", "destfolder", ",", "unique_id", ")", "if", "os", ".", "path", ".", "exists", "(", "destdir", ")", ":", "if", "not", "force", ":", "raise", "ExternalError", "(", "\"Output directory exists and force was not specified, aborting\"", ",", "output_directory", "=", "destdir", ")", "shutil", ".", "rmtree", "(", "destdir", ")", "result", "=", "self", ".", "update_dependency", "(", "None", ",", "depdict", ",", "destdir", ")", "if", "result", "!=", "\"installed\"", ":", "raise", "ArgumentError", "(", "\"Could not find component to satisfy name/version combination\"", ")" ]
Download and unpack a released iotile component by name and version range If the folder that would be created already exists, this command fails unless you pass force=True Args: name (string): The name of the component to download version (SemanticVersionRange): The valid versions of the component to fetch destfolder (string): The folder into which to unpack the result, defaults to the current working directory force (bool): Forcibly overwrite whatever is currently in the folder that would be fetched. Raises: ExternalError: If the destination folder exists and force is not specified ArgumentError: If the specified component could not be found with the required version
[ "Download", "and", "unpack", "a", "released", "iotile", "component", "by", "name", "and", "version", "range" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L68-L106
train
iotile/coretools
iotilebuild/iotile/build/dev/resolverchain.py
DependencyResolverChain.update_dependency
def update_dependency(self, tile, depinfo, destdir=None): """Attempt to install or update a dependency to the latest version. Args: tile (IOTile): An IOTile object describing the tile that has the dependency depinfo (dict): a dictionary from tile.dependencies specifying the dependency destdir (string): An optional folder into which to unpack the dependency Returns: string: a string indicating the outcome. Possible values are: "already installed" "installed" "updated" "not found" """ if destdir is None: destdir = os.path.join(tile.folder, 'build', 'deps', depinfo['unique_id']) has_version = False had_version = False if os.path.exists(destdir): has_version = True had_version = True for priority, rule in self.rules: if not self._check_rule(rule, depinfo): continue resolver = self._find_resolver(rule) if has_version: deptile = IOTile(destdir) # If the dependency is not up to date, don't do anything depstatus = self._check_dep(depinfo, deptile, resolver) if depstatus is False: shutil.rmtree(destdir) has_version = False else: continue # Now try to resolve this dependency with the latest version result = resolver.resolve(depinfo, destdir) if not result['found'] and result.get('stop', False): return 'not found' if not result['found']: continue settings = { 'resolver': resolver.__class__.__name__, 'factory_args': rule[2] } if 'settings' in result: settings['settings'] = result['settings'] self._save_depsettings(destdir, settings) if had_version: return "updated" return "installed" if has_version: return "already installed" return "not found"
python
def update_dependency(self, tile, depinfo, destdir=None): """Attempt to install or update a dependency to the latest version. Args: tile (IOTile): An IOTile object describing the tile that has the dependency depinfo (dict): a dictionary from tile.dependencies specifying the dependency destdir (string): An optional folder into which to unpack the dependency Returns: string: a string indicating the outcome. Possible values are: "already installed" "installed" "updated" "not found" """ if destdir is None: destdir = os.path.join(tile.folder, 'build', 'deps', depinfo['unique_id']) has_version = False had_version = False if os.path.exists(destdir): has_version = True had_version = True for priority, rule in self.rules: if not self._check_rule(rule, depinfo): continue resolver = self._find_resolver(rule) if has_version: deptile = IOTile(destdir) # If the dependency is not up to date, don't do anything depstatus = self._check_dep(depinfo, deptile, resolver) if depstatus is False: shutil.rmtree(destdir) has_version = False else: continue # Now try to resolve this dependency with the latest version result = resolver.resolve(depinfo, destdir) if not result['found'] and result.get('stop', False): return 'not found' if not result['found']: continue settings = { 'resolver': resolver.__class__.__name__, 'factory_args': rule[2] } if 'settings' in result: settings['settings'] = result['settings'] self._save_depsettings(destdir, settings) if had_version: return "updated" return "installed" if has_version: return "already installed" return "not found"
[ "def", "update_dependency", "(", "self", ",", "tile", ",", "depinfo", ",", "destdir", "=", "None", ")", ":", "if", "destdir", "is", "None", ":", "destdir", "=", "os", ".", "path", ".", "join", "(", "tile", ".", "folder", ",", "'build'", ",", "'deps'", ",", "depinfo", "[", "'unique_id'", "]", ")", "has_version", "=", "False", "had_version", "=", "False", "if", "os", ".", "path", ".", "exists", "(", "destdir", ")", ":", "has_version", "=", "True", "had_version", "=", "True", "for", "priority", ",", "rule", "in", "self", ".", "rules", ":", "if", "not", "self", ".", "_check_rule", "(", "rule", ",", "depinfo", ")", ":", "continue", "resolver", "=", "self", ".", "_find_resolver", "(", "rule", ")", "if", "has_version", ":", "deptile", "=", "IOTile", "(", "destdir", ")", "# If the dependency is not up to date, don't do anything", "depstatus", "=", "self", ".", "_check_dep", "(", "depinfo", ",", "deptile", ",", "resolver", ")", "if", "depstatus", "is", "False", ":", "shutil", ".", "rmtree", "(", "destdir", ")", "has_version", "=", "False", "else", ":", "continue", "# Now try to resolve this dependency with the latest version", "result", "=", "resolver", ".", "resolve", "(", "depinfo", ",", "destdir", ")", "if", "not", "result", "[", "'found'", "]", "and", "result", ".", "get", "(", "'stop'", ",", "False", ")", ":", "return", "'not found'", "if", "not", "result", "[", "'found'", "]", ":", "continue", "settings", "=", "{", "'resolver'", ":", "resolver", ".", "__class__", ".", "__name__", ",", "'factory_args'", ":", "rule", "[", "2", "]", "}", "if", "'settings'", "in", "result", ":", "settings", "[", "'settings'", "]", "=", "result", "[", "'settings'", "]", "self", ".", "_save_depsettings", "(", "destdir", ",", "settings", ")", "if", "had_version", ":", "return", "\"updated\"", "return", "\"installed\"", "if", "has_version", ":", "return", "\"already installed\"", "return", "\"not found\"" ]
Attempt to install or update a dependency to the latest version. Args: tile (IOTile): An IOTile object describing the tile that has the dependency depinfo (dict): a dictionary from tile.dependencies specifying the dependency destdir (string): An optional folder into which to unpack the dependency Returns: string: a string indicating the outcome. Possible values are: "already installed" "installed" "updated" "not found"
[ "Attempt", "to", "install", "or", "update", "a", "dependency", "to", "the", "latest", "version", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L108-L176
train
iotile/coretools
iotilebuild/iotile/build/dev/resolverchain.py
DependencyResolverChain._check_dep
def _check_dep(self, depinfo, deptile, resolver): """Check if a dependency tile is up to date Returns: bool: True if it is up to date, False if it not and None if this resolver cannot assess whether or not it is up to date. """ try: settings = self._load_depsettings(deptile) except IOError: return False # If this dependency was initially resolved with a different resolver, then # we cannot check if it is up to date if settings['resolver'] != resolver.__class__.__name__: return None resolver_settings = {} if 'settings' in settings: resolver_settings = settings['settings'] return resolver.check(depinfo, deptile, resolver_settings)
python
def _check_dep(self, depinfo, deptile, resolver): """Check if a dependency tile is up to date Returns: bool: True if it is up to date, False if it not and None if this resolver cannot assess whether or not it is up to date. """ try: settings = self._load_depsettings(deptile) except IOError: return False # If this dependency was initially resolved with a different resolver, then # we cannot check if it is up to date if settings['resolver'] != resolver.__class__.__name__: return None resolver_settings = {} if 'settings' in settings: resolver_settings = settings['settings'] return resolver.check(depinfo, deptile, resolver_settings)
[ "def", "_check_dep", "(", "self", ",", "depinfo", ",", "deptile", ",", "resolver", ")", ":", "try", ":", "settings", "=", "self", ".", "_load_depsettings", "(", "deptile", ")", "except", "IOError", ":", "return", "False", "# If this dependency was initially resolved with a different resolver, then", "# we cannot check if it is up to date", "if", "settings", "[", "'resolver'", "]", "!=", "resolver", ".", "__class__", ".", "__name__", ":", "return", "None", "resolver_settings", "=", "{", "}", "if", "'settings'", "in", "settings", ":", "resolver_settings", "=", "settings", "[", "'settings'", "]", "return", "resolver", ".", "check", "(", "depinfo", ",", "deptile", ",", "resolver_settings", ")" ]
Check if a dependency tile is up to date Returns: bool: True if it is up to date, False if it not and None if this resolver cannot assess whether or not it is up to date.
[ "Check", "if", "a", "dependency", "tile", "is", "up", "to", "date" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L192-L214
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
_log_future_exception
def _log_future_exception(future, logger): """Log any exception raised by future.""" if not future.done(): return try: future.result() except: #pylint:disable=bare-except;This is a background logging helper logger.warning("Exception in ignored future: %s", future, exc_info=True)
python
def _log_future_exception(future, logger): """Log any exception raised by future.""" if not future.done(): return try: future.result() except: #pylint:disable=bare-except;This is a background logging helper logger.warning("Exception in ignored future: %s", future, exc_info=True)
[ "def", "_log_future_exception", "(", "future", ",", "logger", ")", ":", "if", "not", "future", ".", "done", "(", ")", ":", "return", "try", ":", "future", ".", "result", "(", ")", "except", ":", "#pylint:disable=bare-except;This is a background logging helper", "logger", ".", "warning", "(", "\"Exception in ignored future: %s\"", ",", "future", ",", "exc_info", "=", "True", ")" ]
Log any exception raised by future.
[ "Log", "any", "exception", "raised", "by", "future", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L647-L656
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundTask.create_subtask
def create_subtask(self, cor, name=None, stop_timeout=1.0): """Create and add a subtask from a coroutine. This function will create a BackgroundTask and then call self.add_subtask() on it. Args: cor (coroutine): The coroutine that should be wrapped in a background task. name (str): An optional name for the task. stop_timeout (float): The maximum time to wait for this subtask to die after stopping it. Returns: Backgroundtask: The created subtask. """ if self.stopped: raise InternalError("Cannot add a subtask to a parent that is already stopped") subtask = BackgroundTask(cor, name, loop=self._loop, stop_timeout=stop_timeout) self.add_subtask(subtask) return subtask
python
def create_subtask(self, cor, name=None, stop_timeout=1.0): """Create and add a subtask from a coroutine. This function will create a BackgroundTask and then call self.add_subtask() on it. Args: cor (coroutine): The coroutine that should be wrapped in a background task. name (str): An optional name for the task. stop_timeout (float): The maximum time to wait for this subtask to die after stopping it. Returns: Backgroundtask: The created subtask. """ if self.stopped: raise InternalError("Cannot add a subtask to a parent that is already stopped") subtask = BackgroundTask(cor, name, loop=self._loop, stop_timeout=stop_timeout) self.add_subtask(subtask) return subtask
[ "def", "create_subtask", "(", "self", ",", "cor", ",", "name", "=", "None", ",", "stop_timeout", "=", "1.0", ")", ":", "if", "self", ".", "stopped", ":", "raise", "InternalError", "(", "\"Cannot add a subtask to a parent that is already stopped\"", ")", "subtask", "=", "BackgroundTask", "(", "cor", ",", "name", ",", "loop", "=", "self", ".", "_loop", ",", "stop_timeout", "=", "stop_timeout", ")", "self", ".", "add_subtask", "(", "subtask", ")", "return", "subtask" ]
Create and add a subtask from a coroutine. This function will create a BackgroundTask and then call self.add_subtask() on it. Args: cor (coroutine): The coroutine that should be wrapped in a background task. name (str): An optional name for the task. stop_timeout (float): The maximum time to wait for this subtask to die after stopping it. Returns: Backgroundtask: The created subtask.
[ "Create", "and", "add", "a", "subtask", "from", "a", "coroutine", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L113-L135
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundTask.add_subtask
def add_subtask(self, subtask): """Link a subtask to this parent task. This will cause stop() to block until the subtask has also finished. Calling stop will not directly cancel the subtask. It is expected that your finalizer for this parent task will cancel or otherwise stop the subtask. Args: subtask (BackgroundTask): Another task that will be stopped when this task is stopped. """ if self.stopped: raise InternalError("Cannot add a subtask to a parent that is already stopped") if not isinstance(subtask, BackgroundTask): raise ArgumentError("Subtasks must inherit from BackgroundTask, task={}".format(subtask)) #pylint:disable=protected-access;It is the same class as us so is equivalent to self access. if subtask._loop != self._loop: raise ArgumentError("Subtasks must run in the same BackgroundEventLoop as their parent", subtask=subtask, parent=self) self.subtasks.append(subtask)
python
def add_subtask(self, subtask): """Link a subtask to this parent task. This will cause stop() to block until the subtask has also finished. Calling stop will not directly cancel the subtask. It is expected that your finalizer for this parent task will cancel or otherwise stop the subtask. Args: subtask (BackgroundTask): Another task that will be stopped when this task is stopped. """ if self.stopped: raise InternalError("Cannot add a subtask to a parent that is already stopped") if not isinstance(subtask, BackgroundTask): raise ArgumentError("Subtasks must inherit from BackgroundTask, task={}".format(subtask)) #pylint:disable=protected-access;It is the same class as us so is equivalent to self access. if subtask._loop != self._loop: raise ArgumentError("Subtasks must run in the same BackgroundEventLoop as their parent", subtask=subtask, parent=self) self.subtasks.append(subtask)
[ "def", "add_subtask", "(", "self", ",", "subtask", ")", ":", "if", "self", ".", "stopped", ":", "raise", "InternalError", "(", "\"Cannot add a subtask to a parent that is already stopped\"", ")", "if", "not", "isinstance", "(", "subtask", ",", "BackgroundTask", ")", ":", "raise", "ArgumentError", "(", "\"Subtasks must inherit from BackgroundTask, task={}\"", ".", "format", "(", "subtask", ")", ")", "#pylint:disable=protected-access;It is the same class as us so is equivalent to self access.", "if", "subtask", ".", "_loop", "!=", "self", ".", "_loop", ":", "raise", "ArgumentError", "(", "\"Subtasks must run in the same BackgroundEventLoop as their parent\"", ",", "subtask", "=", "subtask", ",", "parent", "=", "self", ")", "self", ".", "subtasks", ".", "append", "(", "subtask", ")" ]
Link a subtask to this parent task. This will cause stop() to block until the subtask has also finished. Calling stop will not directly cancel the subtask. It is expected that your finalizer for this parent task will cancel or otherwise stop the subtask. Args: subtask (BackgroundTask): Another task that will be stopped when this task is stopped.
[ "Link", "a", "subtask", "to", "this", "parent", "task", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L137-L161
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundTask.stop
async def stop(self): """Stop this task and wait until it and all its subtasks end. This function will finalize this task either by using the finalizer function passed during creation or by calling task.cancel() if no finalizer was passed. It will then call join() on this task and any registered subtasks with the given maximum timeout, raising asyncio.TimeoutError if the tasks did not exit within the given timeout. This method should only be called once. After this method returns, the task is finished and no more subtasks can be added. If this task is being tracked inside of the BackgroundEventLoop that it is part of, it will automatically be removed from the event loop's list of tasks. """ if self.stopped: return self._logger.debug("Stopping task %s", self.name) if self._finalizer is not None: try: result = self._finalizer(self) if inspect.isawaitable(result): await result except: #pylint:disable=bare-except;We need to make sure we always wait for the task self._logger.exception("Error running finalizer for task %s", self.name) elif self.task is not None: self.task.cancel() tasks = [] if self.task is not None: tasks.append(self.task) tasks.extend(x.task for x in self.subtasks) finished = asyncio.gather(*tasks, return_exceptions=True) outcomes = [] try: outcomes = await asyncio.wait_for(finished, timeout=self._stop_timeout) except asyncio.TimeoutError as err: # See discussion here: https://github.com/python/asyncio/issues/253#issuecomment-120138132 # This prevents a nuisance log error message, finished is guaranteed # to be cancelled but not awaited when wait_for() has a timeout. try: outcomes = await finished except asyncio.CancelledError: pass # See https://mail.python.org/pipermail/python-3000/2008-May/013740.html # for why we need to explictly name the error here raise err finally: self.stopped = True for outcome in outcomes: if isinstance(outcome, Exception) and not isinstance(outcome, asyncio.CancelledError): self._logger.error(outcome) if self in self._loop.tasks: self._loop.tasks.remove(self)
python
async def stop(self): """Stop this task and wait until it and all its subtasks end. This function will finalize this task either by using the finalizer function passed during creation or by calling task.cancel() if no finalizer was passed. It will then call join() on this task and any registered subtasks with the given maximum timeout, raising asyncio.TimeoutError if the tasks did not exit within the given timeout. This method should only be called once. After this method returns, the task is finished and no more subtasks can be added. If this task is being tracked inside of the BackgroundEventLoop that it is part of, it will automatically be removed from the event loop's list of tasks. """ if self.stopped: return self._logger.debug("Stopping task %s", self.name) if self._finalizer is not None: try: result = self._finalizer(self) if inspect.isawaitable(result): await result except: #pylint:disable=bare-except;We need to make sure we always wait for the task self._logger.exception("Error running finalizer for task %s", self.name) elif self.task is not None: self.task.cancel() tasks = [] if self.task is not None: tasks.append(self.task) tasks.extend(x.task for x in self.subtasks) finished = asyncio.gather(*tasks, return_exceptions=True) outcomes = [] try: outcomes = await asyncio.wait_for(finished, timeout=self._stop_timeout) except asyncio.TimeoutError as err: # See discussion here: https://github.com/python/asyncio/issues/253#issuecomment-120138132 # This prevents a nuisance log error message, finished is guaranteed # to be cancelled but not awaited when wait_for() has a timeout. try: outcomes = await finished except asyncio.CancelledError: pass # See https://mail.python.org/pipermail/python-3000/2008-May/013740.html # for why we need to explictly name the error here raise err finally: self.stopped = True for outcome in outcomes: if isinstance(outcome, Exception) and not isinstance(outcome, asyncio.CancelledError): self._logger.error(outcome) if self in self._loop.tasks: self._loop.tasks.remove(self)
[ "async", "def", "stop", "(", "self", ")", ":", "if", "self", ".", "stopped", ":", "return", "self", ".", "_logger", ".", "debug", "(", "\"Stopping task %s\"", ",", "self", ".", "name", ")", "if", "self", ".", "_finalizer", "is", "not", "None", ":", "try", ":", "result", "=", "self", ".", "_finalizer", "(", "self", ")", "if", "inspect", ".", "isawaitable", "(", "result", ")", ":", "await", "result", "except", ":", "#pylint:disable=bare-except;We need to make sure we always wait for the task", "self", ".", "_logger", ".", "exception", "(", "\"Error running finalizer for task %s\"", ",", "self", ".", "name", ")", "elif", "self", ".", "task", "is", "not", "None", ":", "self", ".", "task", ".", "cancel", "(", ")", "tasks", "=", "[", "]", "if", "self", ".", "task", "is", "not", "None", ":", "tasks", ".", "append", "(", "self", ".", "task", ")", "tasks", ".", "extend", "(", "x", ".", "task", "for", "x", "in", "self", ".", "subtasks", ")", "finished", "=", "asyncio", ".", "gather", "(", "*", "tasks", ",", "return_exceptions", "=", "True", ")", "outcomes", "=", "[", "]", "try", ":", "outcomes", "=", "await", "asyncio", ".", "wait_for", "(", "finished", ",", "timeout", "=", "self", ".", "_stop_timeout", ")", "except", "asyncio", ".", "TimeoutError", "as", "err", ":", "# See discussion here: https://github.com/python/asyncio/issues/253#issuecomment-120138132", "# This prevents a nuisance log error message, finished is guaranteed", "# to be cancelled but not awaited when wait_for() has a timeout.", "try", ":", "outcomes", "=", "await", "finished", "except", "asyncio", ".", "CancelledError", ":", "pass", "# See https://mail.python.org/pipermail/python-3000/2008-May/013740.html", "# for why we need to explictly name the error here", "raise", "err", "finally", ":", "self", ".", "stopped", "=", "True", "for", "outcome", "in", "outcomes", ":", "if", "isinstance", "(", "outcome", ",", "Exception", ")", "and", "not", "isinstance", "(", "outcome", ",", "asyncio", ".", "CancelledError", ")", ":", "self", ".", "_logger", ".", "error", "(", "outcome", ")", "if", "self", "in", "self", ".", "_loop", ".", "tasks", ":", "self", ".", "_loop", ".", "tasks", ".", "remove", "(", "self", ")" ]
Stop this task and wait until it and all its subtasks end. This function will finalize this task either by using the finalizer function passed during creation or by calling task.cancel() if no finalizer was passed. It will then call join() on this task and any registered subtasks with the given maximum timeout, raising asyncio.TimeoutError if the tasks did not exit within the given timeout. This method should only be called once. After this method returns, the task is finished and no more subtasks can be added. If this task is being tracked inside of the BackgroundEventLoop that it is part of, it will automatically be removed from the event loop's list of tasks.
[ "Stop", "this", "task", "and", "wait", "until", "it", "and", "all", "its", "subtasks", "end", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L163-L227
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundTask.stop_threadsafe
def stop_threadsafe(self): """Stop this task from another thread and wait for it to finish. This method must not be called from within the BackgroundEventLoop but will inject self.stop() into the event loop and block until it returns. Raises: TimeoutExpiredError: If the task does not stop in the given timeout specified in __init__() """ if self.stopped: return try: self._loop.run_coroutine(self.stop()) except asyncio.TimeoutError: raise TimeoutExpiredError("Timeout stopping task {} with {} subtasks".format(self.name, len(self.subtasks)))
python
def stop_threadsafe(self): """Stop this task from another thread and wait for it to finish. This method must not be called from within the BackgroundEventLoop but will inject self.stop() into the event loop and block until it returns. Raises: TimeoutExpiredError: If the task does not stop in the given timeout specified in __init__() """ if self.stopped: return try: self._loop.run_coroutine(self.stop()) except asyncio.TimeoutError: raise TimeoutExpiredError("Timeout stopping task {} with {} subtasks".format(self.name, len(self.subtasks)))
[ "def", "stop_threadsafe", "(", "self", ")", ":", "if", "self", ".", "stopped", ":", "return", "try", ":", "self", ".", "_loop", ".", "run_coroutine", "(", "self", ".", "stop", "(", ")", ")", "except", "asyncio", ".", "TimeoutError", ":", "raise", "TimeoutExpiredError", "(", "\"Timeout stopping task {} with {} subtasks\"", ".", "format", "(", "self", ".", "name", ",", "len", "(", "self", ".", "subtasks", ")", ")", ")" ]
Stop this task from another thread and wait for it to finish. This method must not be called from within the BackgroundEventLoop but will inject self.stop() into the event loop and block until it returns. Raises: TimeoutExpiredError: If the task does not stop in the given timeout specified in __init__()
[ "Stop", "this", "task", "from", "another", "thread", "and", "wait", "for", "it", "to", "finish", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L229-L247
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop.start
def start(self, aug='EventLoopThread'): """Ensure the background loop is running. This method is safe to call multiple times. If the loop is already running, it will not do anything. """ if self.stopping: raise LoopStoppingError("Cannot perform action while loop is stopping.") if not self.loop: self._logger.debug("Starting event loop") self.loop = asyncio.new_event_loop() self.thread = threading.Thread(target=self._loop_thread_main, name=aug, daemon=True) self.thread.start()
python
def start(self, aug='EventLoopThread'): """Ensure the background loop is running. This method is safe to call multiple times. If the loop is already running, it will not do anything. """ if self.stopping: raise LoopStoppingError("Cannot perform action while loop is stopping.") if not self.loop: self._logger.debug("Starting event loop") self.loop = asyncio.new_event_loop() self.thread = threading.Thread(target=self._loop_thread_main, name=aug, daemon=True) self.thread.start()
[ "def", "start", "(", "self", ",", "aug", "=", "'EventLoopThread'", ")", ":", "if", "self", ".", "stopping", ":", "raise", "LoopStoppingError", "(", "\"Cannot perform action while loop is stopping.\"", ")", "if", "not", "self", ".", "loop", ":", "self", ".", "_logger", ".", "debug", "(", "\"Starting event loop\"", ")", "self", ".", "loop", "=", "asyncio", ".", "new_event_loop", "(", ")", "self", ".", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_loop_thread_main", ",", "name", "=", "aug", ",", "daemon", "=", "True", ")", "self", ".", "thread", ".", "start", "(", ")" ]
Ensure the background loop is running. This method is safe to call multiple times. If the loop is already running, it will not do anything.
[ "Ensure", "the", "background", "loop", "is", "running", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L281-L295
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop.wait_for_interrupt
def wait_for_interrupt(self, check_interval=1.0, max_time=None): """Run the event loop until we receive a ctrl-c interrupt or max_time passes. This method will wake up every 1 second by default to check for any interrupt signals or if the maximum runtime has expired. This can be set lower for testing purpose to reduce latency but in production settings, this can cause increased CPU usage so 1 second is an appropriate value. Args: check_interval (float): How often to wake up and check for a SIGTERM. Defaults to 1s. Setting this faster is useful for unit testing. Cannot be < 0.01 s. max_time (float): Stop the event loop after max_time seconds. This is useful for testing purposes. Defaults to None, which means run forever until interrupt. """ self.start() wait = max(check_interval, 0.01) accum = 0 try: while max_time is None or accum < max_time: try: time.sleep(wait) except IOError: pass # IOError comes when this call is interrupted in a signal handler accum += wait except KeyboardInterrupt: pass
python
def wait_for_interrupt(self, check_interval=1.0, max_time=None): """Run the event loop until we receive a ctrl-c interrupt or max_time passes. This method will wake up every 1 second by default to check for any interrupt signals or if the maximum runtime has expired. This can be set lower for testing purpose to reduce latency but in production settings, this can cause increased CPU usage so 1 second is an appropriate value. Args: check_interval (float): How often to wake up and check for a SIGTERM. Defaults to 1s. Setting this faster is useful for unit testing. Cannot be < 0.01 s. max_time (float): Stop the event loop after max_time seconds. This is useful for testing purposes. Defaults to None, which means run forever until interrupt. """ self.start() wait = max(check_interval, 0.01) accum = 0 try: while max_time is None or accum < max_time: try: time.sleep(wait) except IOError: pass # IOError comes when this call is interrupted in a signal handler accum += wait except KeyboardInterrupt: pass
[ "def", "wait_for_interrupt", "(", "self", ",", "check_interval", "=", "1.0", ",", "max_time", "=", "None", ")", ":", "self", ".", "start", "(", ")", "wait", "=", "max", "(", "check_interval", ",", "0.01", ")", "accum", "=", "0", "try", ":", "while", "max_time", "is", "None", "or", "accum", "<", "max_time", ":", "try", ":", "time", ".", "sleep", "(", "wait", ")", "except", "IOError", ":", "pass", "# IOError comes when this call is interrupted in a signal handler", "accum", "+=", "wait", "except", "KeyboardInterrupt", ":", "pass" ]
Run the event loop until we receive a ctrl-c interrupt or max_time passes. This method will wake up every 1 second by default to check for any interrupt signals or if the maximum runtime has expired. This can be set lower for testing purpose to reduce latency but in production settings, this can cause increased CPU usage so 1 second is an appropriate value. Args: check_interval (float): How often to wake up and check for a SIGTERM. Defaults to 1s. Setting this faster is useful for unit testing. Cannot be < 0.01 s. max_time (float): Stop the event loop after max_time seconds. This is useful for testing purposes. Defaults to None, which means run forever until interrupt.
[ "Run", "the", "event", "loop", "until", "we", "receive", "a", "ctrl", "-", "c", "interrupt", "or", "max_time", "passes", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L297-L329
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop.stop
def stop(self): """Synchronously stop the background loop from outside. This method will block until the background loop is completely stopped so it cannot be called from inside the loop itself. This method is safe to call multiple times. If the loop is not currently running it will return without doing anything. """ if not self.loop: return if self.inside_loop(): raise InternalError("BackgroundEventLoop.stop() called from inside event loop; " "would have deadlocked.") try: self.run_coroutine(self._stop_internal()) self.thread.join() except: self._logger.exception("Error stopping BackgroundEventLoop") raise finally: self.thread = None self.loop = None self.tasks = set()
python
def stop(self): """Synchronously stop the background loop from outside. This method will block until the background loop is completely stopped so it cannot be called from inside the loop itself. This method is safe to call multiple times. If the loop is not currently running it will return without doing anything. """ if not self.loop: return if self.inside_loop(): raise InternalError("BackgroundEventLoop.stop() called from inside event loop; " "would have deadlocked.") try: self.run_coroutine(self._stop_internal()) self.thread.join() except: self._logger.exception("Error stopping BackgroundEventLoop") raise finally: self.thread = None self.loop = None self.tasks = set()
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "loop", ":", "return", "if", "self", ".", "inside_loop", "(", ")", ":", "raise", "InternalError", "(", "\"BackgroundEventLoop.stop() called from inside event loop; \"", "\"would have deadlocked.\"", ")", "try", ":", "self", ".", "run_coroutine", "(", "self", ".", "_stop_internal", "(", ")", ")", "self", ".", "thread", ".", "join", "(", ")", "except", ":", "self", ".", "_logger", ".", "exception", "(", "\"Error stopping BackgroundEventLoop\"", ")", "raise", "finally", ":", "self", ".", "thread", "=", "None", "self", ".", "loop", "=", "None", "self", ".", "tasks", "=", "set", "(", ")" ]
Synchronously stop the background loop from outside. This method will block until the background loop is completely stopped so it cannot be called from inside the loop itself. This method is safe to call multiple times. If the loop is not currently running it will return without doing anything.
[ "Synchronously", "stop", "the", "background", "loop", "from", "outside", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L331-L357
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop._stop_internal
async def _stop_internal(self): """Cleanly stop the event loop after shutting down all tasks.""" # Make sure we only try to stop once if self.stopping is True: return self.stopping = True awaitables = [task.stop() for task in self.tasks] results = await asyncio.gather(*awaitables, return_exceptions=True) for task, result in zip(self.tasks, results): if isinstance(result, Exception): self._logger.error("Error stopping task %s: %s", task.name, repr(result)) # It is important to defer this call by one loop cycle so # that this coroutine is finalized and anyone blocking on it # resumes execution. self.loop.call_soon(self.loop.stop)
python
async def _stop_internal(self): """Cleanly stop the event loop after shutting down all tasks.""" # Make sure we only try to stop once if self.stopping is True: return self.stopping = True awaitables = [task.stop() for task in self.tasks] results = await asyncio.gather(*awaitables, return_exceptions=True) for task, result in zip(self.tasks, results): if isinstance(result, Exception): self._logger.error("Error stopping task %s: %s", task.name, repr(result)) # It is important to defer this call by one loop cycle so # that this coroutine is finalized and anyone blocking on it # resumes execution. self.loop.call_soon(self.loop.stop)
[ "async", "def", "_stop_internal", "(", "self", ")", ":", "# Make sure we only try to stop once", "if", "self", ".", "stopping", "is", "True", ":", "return", "self", ".", "stopping", "=", "True", "awaitables", "=", "[", "task", ".", "stop", "(", ")", "for", "task", "in", "self", ".", "tasks", "]", "results", "=", "await", "asyncio", ".", "gather", "(", "*", "awaitables", ",", "return_exceptions", "=", "True", ")", "for", "task", ",", "result", "in", "zip", "(", "self", ".", "tasks", ",", "results", ")", ":", "if", "isinstance", "(", "result", ",", "Exception", ")", ":", "self", ".", "_logger", ".", "error", "(", "\"Error stopping task %s: %s\"", ",", "task", ".", "name", ",", "repr", "(", "result", ")", ")", "# It is important to defer this call by one loop cycle so", "# that this coroutine is finalized and anyone blocking on it", "# resumes execution.", "self", ".", "loop", ".", "call_soon", "(", "self", ".", "loop", ".", "stop", ")" ]
Cleanly stop the event loop after shutting down all tasks.
[ "Cleanly", "stop", "the", "event", "loop", "after", "shutting", "down", "all", "tasks", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L388-L406
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop._loop_thread_main
def _loop_thread_main(self): """Main background thread running the event loop.""" asyncio.set_event_loop(self.loop) self._loop_check.inside_loop = True try: self._logger.debug("Starting loop in background thread") self.loop.run_forever() self._logger.debug("Finished loop in background thread") except: # pylint:disable=bare-except;This is a background worker thread. self._logger.exception("Exception raised from event loop thread") finally: self.loop.close()
python
def _loop_thread_main(self): """Main background thread running the event loop.""" asyncio.set_event_loop(self.loop) self._loop_check.inside_loop = True try: self._logger.debug("Starting loop in background thread") self.loop.run_forever() self._logger.debug("Finished loop in background thread") except: # pylint:disable=bare-except;This is a background worker thread. self._logger.exception("Exception raised from event loop thread") finally: self.loop.close()
[ "def", "_loop_thread_main", "(", "self", ")", ":", "asyncio", ".", "set_event_loop", "(", "self", ".", "loop", ")", "self", ".", "_loop_check", ".", "inside_loop", "=", "True", "try", ":", "self", ".", "_logger", ".", "debug", "(", "\"Starting loop in background thread\"", ")", "self", ".", "loop", ".", "run_forever", "(", ")", "self", ".", "_logger", ".", "debug", "(", "\"Finished loop in background thread\"", ")", "except", ":", "# pylint:disable=bare-except;This is a background worker thread.", "self", ".", "_logger", ".", "exception", "(", "\"Exception raised from event loop thread\"", ")", "finally", ":", "self", ".", "loop", ".", "close", "(", ")" ]
Main background thread running the event loop.
[ "Main", "background", "thread", "running", "the", "event", "loop", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L408-L421
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop.add_task
def add_task(self, cor, name=None, finalizer=None, stop_timeout=1.0, parent=None): """Schedule a task to run on the background event loop. This method will start the given coroutine as a task and keep track of it so that it can be properly shutdown which the event loop is stopped. If parent is None, the task will be stopped by calling finalizer() inside the event loop and then awaiting the task. If finalizer is None then task.cancel() will be called to stop the task. If finalizer is specified, it is called with a single argument (self, this BackgroundTask). Finalizer can be a simple function, or any awaitable. If it is an awaitable it will be awaited. If parent is not None, it must be a BackgroundTask object previously created by a call to BackgroundEventLoop.add_task() and this task will be registered as a subtask of that task. It is that task's job then to cancel this task or otherwise stop it when it is stopped. This method is safe to call either from inside the event loop itself or from any other thread without fear of deadlock or race. Args: cor (coroutine or asyncio.Task): An asyncio Task or the coroutine that we should execute as a task. If a coroutine is given it is scheduled as a task in threadsafe manner automatically. name (str): The name of the task for pretty printing and debug purposes. If not specified, it defaults to the underlying asyncio task object instance name. finalizer (callable): An optional callable that should be invoked to cancel the task. If not specified, calling stop() will result in cancel() being called on the underlying task. stop_timeout (float): The maximum amount of time to wait for this task to stop when stop() is called in seconds. None indicates an unlimited amount of time. Default is 1. This is ignored if parent is not None. parent (BackgroundTask): A previously created task that will take responsibility for stopping this task when it is stopped. Returns: BackgroundTask: The BackgroundTask representing this task. """ if self.stopping: raise LoopStoppingError("Cannot add task because loop is stopping") # Ensure the loop exists and is started self.start() if parent is not None and parent not in self.tasks: raise ArgumentError("Designated parent task {} is not registered".format(parent)) task = BackgroundTask(cor, name, finalizer, stop_timeout, loop=self) if parent is None: self.tasks.add(task) self._logger.debug("Added primary task %s", task.name) else: parent.add_subtask(task) self._logger.debug("Added subtask %s to parent %s", task.name, parent.name) return task
python
def add_task(self, cor, name=None, finalizer=None, stop_timeout=1.0, parent=None): """Schedule a task to run on the background event loop. This method will start the given coroutine as a task and keep track of it so that it can be properly shutdown which the event loop is stopped. If parent is None, the task will be stopped by calling finalizer() inside the event loop and then awaiting the task. If finalizer is None then task.cancel() will be called to stop the task. If finalizer is specified, it is called with a single argument (self, this BackgroundTask). Finalizer can be a simple function, or any awaitable. If it is an awaitable it will be awaited. If parent is not None, it must be a BackgroundTask object previously created by a call to BackgroundEventLoop.add_task() and this task will be registered as a subtask of that task. It is that task's job then to cancel this task or otherwise stop it when it is stopped. This method is safe to call either from inside the event loop itself or from any other thread without fear of deadlock or race. Args: cor (coroutine or asyncio.Task): An asyncio Task or the coroutine that we should execute as a task. If a coroutine is given it is scheduled as a task in threadsafe manner automatically. name (str): The name of the task for pretty printing and debug purposes. If not specified, it defaults to the underlying asyncio task object instance name. finalizer (callable): An optional callable that should be invoked to cancel the task. If not specified, calling stop() will result in cancel() being called on the underlying task. stop_timeout (float): The maximum amount of time to wait for this task to stop when stop() is called in seconds. None indicates an unlimited amount of time. Default is 1. This is ignored if parent is not None. parent (BackgroundTask): A previously created task that will take responsibility for stopping this task when it is stopped. Returns: BackgroundTask: The BackgroundTask representing this task. """ if self.stopping: raise LoopStoppingError("Cannot add task because loop is stopping") # Ensure the loop exists and is started self.start() if parent is not None and parent not in self.tasks: raise ArgumentError("Designated parent task {} is not registered".format(parent)) task = BackgroundTask(cor, name, finalizer, stop_timeout, loop=self) if parent is None: self.tasks.add(task) self._logger.debug("Added primary task %s", task.name) else: parent.add_subtask(task) self._logger.debug("Added subtask %s to parent %s", task.name, parent.name) return task
[ "def", "add_task", "(", "self", ",", "cor", ",", "name", "=", "None", ",", "finalizer", "=", "None", ",", "stop_timeout", "=", "1.0", ",", "parent", "=", "None", ")", ":", "if", "self", ".", "stopping", ":", "raise", "LoopStoppingError", "(", "\"Cannot add task because loop is stopping\"", ")", "# Ensure the loop exists and is started", "self", ".", "start", "(", ")", "if", "parent", "is", "not", "None", "and", "parent", "not", "in", "self", ".", "tasks", ":", "raise", "ArgumentError", "(", "\"Designated parent task {} is not registered\"", ".", "format", "(", "parent", ")", ")", "task", "=", "BackgroundTask", "(", "cor", ",", "name", ",", "finalizer", ",", "stop_timeout", ",", "loop", "=", "self", ")", "if", "parent", "is", "None", ":", "self", ".", "tasks", ".", "add", "(", "task", ")", "self", ".", "_logger", ".", "debug", "(", "\"Added primary task %s\"", ",", "task", ".", "name", ")", "else", ":", "parent", ".", "add_subtask", "(", "task", ")", "self", ".", "_logger", ".", "debug", "(", "\"Added subtask %s to parent %s\"", ",", "task", ".", "name", ",", "parent", ".", "name", ")", "return", "task" ]
Schedule a task to run on the background event loop. This method will start the given coroutine as a task and keep track of it so that it can be properly shutdown which the event loop is stopped. If parent is None, the task will be stopped by calling finalizer() inside the event loop and then awaiting the task. If finalizer is None then task.cancel() will be called to stop the task. If finalizer is specified, it is called with a single argument (self, this BackgroundTask). Finalizer can be a simple function, or any awaitable. If it is an awaitable it will be awaited. If parent is not None, it must be a BackgroundTask object previously created by a call to BackgroundEventLoop.add_task() and this task will be registered as a subtask of that task. It is that task's job then to cancel this task or otherwise stop it when it is stopped. This method is safe to call either from inside the event loop itself or from any other thread without fear of deadlock or race. Args: cor (coroutine or asyncio.Task): An asyncio Task or the coroutine that we should execute as a task. If a coroutine is given it is scheduled as a task in threadsafe manner automatically. name (str): The name of the task for pretty printing and debug purposes. If not specified, it defaults to the underlying asyncio task object instance name. finalizer (callable): An optional callable that should be invoked to cancel the task. If not specified, calling stop() will result in cancel() being called on the underlying task. stop_timeout (float): The maximum amount of time to wait for this task to stop when stop() is called in seconds. None indicates an unlimited amount of time. Default is 1. This is ignored if parent is not None. parent (BackgroundTask): A previously created task that will take responsibility for stopping this task when it is stopped. Returns: BackgroundTask: The BackgroundTask representing this task.
[ "Schedule", "a", "task", "to", "run", "on", "the", "background", "event", "loop", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L424-L487
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop.run_coroutine
def run_coroutine(self, cor, *args, **kwargs): """Run a coroutine to completion and return its result. This method may only be called outside of the event loop. Attempting to call it from inside the event loop would deadlock and will raise InternalError instead. Args: cor (coroutine): The coroutine that we wish to run in the background and wait until it finishes. Returns: object: Whatever the coroutine cor returns. """ if self.stopping: raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor) self.start() cor = _instaniate_coroutine(cor, args, kwargs) if self.inside_loop(): raise InternalError("BackgroundEventLoop.run_coroutine called from inside event loop, " "would have deadlocked.") future = self.launch_coroutine(cor) return future.result()
python
def run_coroutine(self, cor, *args, **kwargs): """Run a coroutine to completion and return its result. This method may only be called outside of the event loop. Attempting to call it from inside the event loop would deadlock and will raise InternalError instead. Args: cor (coroutine): The coroutine that we wish to run in the background and wait until it finishes. Returns: object: Whatever the coroutine cor returns. """ if self.stopping: raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor) self.start() cor = _instaniate_coroutine(cor, args, kwargs) if self.inside_loop(): raise InternalError("BackgroundEventLoop.run_coroutine called from inside event loop, " "would have deadlocked.") future = self.launch_coroutine(cor) return future.result()
[ "def", "run_coroutine", "(", "self", ",", "cor", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "stopping", ":", "raise", "LoopStoppingError", "(", "\"Could not launch coroutine because loop is shutting down: %s\"", "%", "cor", ")", "self", ".", "start", "(", ")", "cor", "=", "_instaniate_coroutine", "(", "cor", ",", "args", ",", "kwargs", ")", "if", "self", ".", "inside_loop", "(", ")", ":", "raise", "InternalError", "(", "\"BackgroundEventLoop.run_coroutine called from inside event loop, \"", "\"would have deadlocked.\"", ")", "future", "=", "self", ".", "launch_coroutine", "(", "cor", ")", "return", "future", ".", "result", "(", ")" ]
Run a coroutine to completion and return its result. This method may only be called outside of the event loop. Attempting to call it from inside the event loop would deadlock and will raise InternalError instead. Args: cor (coroutine): The coroutine that we wish to run in the background and wait until it finishes. Returns: object: Whatever the coroutine cor returns.
[ "Run", "a", "coroutine", "to", "completion", "and", "return", "its", "result", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L489-L516
train
iotile/coretools
iotilecore/iotile/core/utilities/async_tools/event_loop.py
BackgroundEventLoop.log_coroutine
def log_coroutine(self, cor, *args, **kwargs): """Run a coroutine logging any exception raised. This routine will not block until the coroutine is finished nor will it return any result. It will just log if any exception is raised by the coroutine during operation. It is safe to call from both inside and outside the event loop. There is no guarantee on how soon the coroutine will be scheduled. Args: cor (coroutine): The coroutine that we wish to run in the background and wait until it finishes. """ if self.stopping: raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor) self.start() cor = _instaniate_coroutine(cor, args, kwargs) def _run_and_log(): task = self.loop.create_task(cor) task.add_done_callback(lambda x: _log_future_exception(x, self._logger)) if self.inside_loop(): _run_and_log() else: self.loop.call_soon_threadsafe(_run_and_log)
python
def log_coroutine(self, cor, *args, **kwargs): """Run a coroutine logging any exception raised. This routine will not block until the coroutine is finished nor will it return any result. It will just log if any exception is raised by the coroutine during operation. It is safe to call from both inside and outside the event loop. There is no guarantee on how soon the coroutine will be scheduled. Args: cor (coroutine): The coroutine that we wish to run in the background and wait until it finishes. """ if self.stopping: raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor) self.start() cor = _instaniate_coroutine(cor, args, kwargs) def _run_and_log(): task = self.loop.create_task(cor) task.add_done_callback(lambda x: _log_future_exception(x, self._logger)) if self.inside_loop(): _run_and_log() else: self.loop.call_soon_threadsafe(_run_and_log)
[ "def", "log_coroutine", "(", "self", ",", "cor", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "stopping", ":", "raise", "LoopStoppingError", "(", "\"Could not launch coroutine because loop is shutting down: %s\"", "%", "cor", ")", "self", ".", "start", "(", ")", "cor", "=", "_instaniate_coroutine", "(", "cor", ",", "args", ",", "kwargs", ")", "def", "_run_and_log", "(", ")", ":", "task", "=", "self", ".", "loop", ".", "create_task", "(", "cor", ")", "task", ".", "add_done_callback", "(", "lambda", "x", ":", "_log_future_exception", "(", "x", ",", "self", ".", "_logger", ")", ")", "if", "self", ".", "inside_loop", "(", ")", ":", "_run_and_log", "(", ")", "else", ":", "self", ".", "loop", ".", "call_soon_threadsafe", "(", "_run_and_log", ")" ]
Run a coroutine logging any exception raised. This routine will not block until the coroutine is finished nor will it return any result. It will just log if any exception is raised by the coroutine during operation. It is safe to call from both inside and outside the event loop. There is no guarantee on how soon the coroutine will be scheduled. Args: cor (coroutine): The coroutine that we wish to run in the background and wait until it finishes.
[ "Run", "a", "coroutine", "logging", "any", "exception", "raised", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L552-L582
train
iotile/coretools
iotile_ext_cloud/iotile/cloud/config.py
link_cloud
def link_cloud(self, username=None, password=None, device_id=None): """Create and store a token for interacting with the IOTile Cloud API. You will need to call link_cloud once for each virtualenv that you create and want to use with any api calls that touch iotile cloud. Note that this method is called on a ConfigManager instance If you do not pass your username or password it will be prompted from you securely on stdin. If you are logging in for a user, the token will expire periodically and you will have to relogin. If you pass a device_id, you can obtain a limited token for that device that will never expire, assuming you have access to that device. Args: username (string): Your iotile.cloud username. This is prompted from stdin if not provided. password (string): Your iotile.cloud password. This is prompted from stdin if not provided. device_id (int): Optional device id to obtain permanent credentials for a device. """ reg = ComponentRegistry() domain = self.get('cloud:server') if username is None: prompt_str = "Please enter your IOTile.cloud email: " username = input(prompt_str) if password is None: prompt_str = "Please enter your IOTile.cloud password: " password = getpass.getpass(prompt_str) cloud = Api(domain=domain) ok_resp = cloud.login(email=username, password=password) if not ok_resp: raise ArgumentError("Could not login to iotile.cloud as user %s" % username) reg.set_config('arch:cloud_user', cloud.username) reg.set_config('arch:cloud_token', cloud.token) reg.set_config('arch:cloud_token_type', cloud.token_type) if device_id is not None: cloud = IOTileCloud() cloud.impersonate_device(device_id)
python
def link_cloud(self, username=None, password=None, device_id=None): """Create and store a token for interacting with the IOTile Cloud API. You will need to call link_cloud once for each virtualenv that you create and want to use with any api calls that touch iotile cloud. Note that this method is called on a ConfigManager instance If you do not pass your username or password it will be prompted from you securely on stdin. If you are logging in for a user, the token will expire periodically and you will have to relogin. If you pass a device_id, you can obtain a limited token for that device that will never expire, assuming you have access to that device. Args: username (string): Your iotile.cloud username. This is prompted from stdin if not provided. password (string): Your iotile.cloud password. This is prompted from stdin if not provided. device_id (int): Optional device id to obtain permanent credentials for a device. """ reg = ComponentRegistry() domain = self.get('cloud:server') if username is None: prompt_str = "Please enter your IOTile.cloud email: " username = input(prompt_str) if password is None: prompt_str = "Please enter your IOTile.cloud password: " password = getpass.getpass(prompt_str) cloud = Api(domain=domain) ok_resp = cloud.login(email=username, password=password) if not ok_resp: raise ArgumentError("Could not login to iotile.cloud as user %s" % username) reg.set_config('arch:cloud_user', cloud.username) reg.set_config('arch:cloud_token', cloud.token) reg.set_config('arch:cloud_token_type', cloud.token_type) if device_id is not None: cloud = IOTileCloud() cloud.impersonate_device(device_id)
[ "def", "link_cloud", "(", "self", ",", "username", "=", "None", ",", "password", "=", "None", ",", "device_id", "=", "None", ")", ":", "reg", "=", "ComponentRegistry", "(", ")", "domain", "=", "self", ".", "get", "(", "'cloud:server'", ")", "if", "username", "is", "None", ":", "prompt_str", "=", "\"Please enter your IOTile.cloud email: \"", "username", "=", "input", "(", "prompt_str", ")", "if", "password", "is", "None", ":", "prompt_str", "=", "\"Please enter your IOTile.cloud password: \"", "password", "=", "getpass", ".", "getpass", "(", "prompt_str", ")", "cloud", "=", "Api", "(", "domain", "=", "domain", ")", "ok_resp", "=", "cloud", ".", "login", "(", "email", "=", "username", ",", "password", "=", "password", ")", "if", "not", "ok_resp", ":", "raise", "ArgumentError", "(", "\"Could not login to iotile.cloud as user %s\"", "%", "username", ")", "reg", ".", "set_config", "(", "'arch:cloud_user'", ",", "cloud", ".", "username", ")", "reg", ".", "set_config", "(", "'arch:cloud_token'", ",", "cloud", ".", "token", ")", "reg", ".", "set_config", "(", "'arch:cloud_token_type'", ",", "cloud", ".", "token_type", ")", "if", "device_id", "is", "not", "None", ":", "cloud", "=", "IOTileCloud", "(", ")", "cloud", ".", "impersonate_device", "(", "device_id", ")" ]
Create and store a token for interacting with the IOTile Cloud API. You will need to call link_cloud once for each virtualenv that you create and want to use with any api calls that touch iotile cloud. Note that this method is called on a ConfigManager instance If you do not pass your username or password it will be prompted from you securely on stdin. If you are logging in for a user, the token will expire periodically and you will have to relogin. If you pass a device_id, you can obtain a limited token for that device that will never expire, assuming you have access to that device. Args: username (string): Your iotile.cloud username. This is prompted from stdin if not provided. password (string): Your iotile.cloud password. This is prompted from stdin if not provided. device_id (int): Optional device id to obtain permanent credentials for a device.
[ "Create", "and", "store", "a", "token", "for", "interacting", "with", "the", "IOTile", "Cloud", "API", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotile_ext_cloud/iotile/cloud/config.py#L14-L65
train
iotile/coretools
iotilecore/iotile/core/utilities/kvstore_json.py
JSONKVStore._load_file
def _load_file(self): """Load all entries from json backing file """ if not os.path.exists(self.file): return {} with open(self.file, "r") as infile: data = json.load(infile) return data
python
def _load_file(self): """Load all entries from json backing file """ if not os.path.exists(self.file): return {} with open(self.file, "r") as infile: data = json.load(infile) return data
[ "def", "_load_file", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "file", ")", ":", "return", "{", "}", "with", "open", "(", "self", ".", "file", ",", "\"r\"", ")", "as", "infile", ":", "data", "=", "json", ".", "load", "(", "infile", ")", "return", "data" ]
Load all entries from json backing file
[ "Load", "all", "entries", "from", "json", "backing", "file" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L48-L58
train
iotile/coretools
iotilecore/iotile/core/utilities/kvstore_json.py
JSONKVStore._save_file
def _save_file(self, data): """Attempt to atomically save file by saving and then moving into position The goal is to make it difficult for a crash to corrupt our data file since the move operation can be made atomic if needed on mission critical filesystems. """ if platform.system() == 'Windows': with open(self.file, "w") as outfile: json.dump(data, outfile) else: newpath = self.file + '.new' with open(newpath, "w") as outfile: json.dump(data, outfile) os.rename( os.path.realpath(newpath), os.path.realpath(self.file) )
python
def _save_file(self, data): """Attempt to atomically save file by saving and then moving into position The goal is to make it difficult for a crash to corrupt our data file since the move operation can be made atomic if needed on mission critical filesystems. """ if platform.system() == 'Windows': with open(self.file, "w") as outfile: json.dump(data, outfile) else: newpath = self.file + '.new' with open(newpath, "w") as outfile: json.dump(data, outfile) os.rename( os.path.realpath(newpath), os.path.realpath(self.file) )
[ "def", "_save_file", "(", "self", ",", "data", ")", ":", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", ":", "with", "open", "(", "self", ".", "file", ",", "\"w\"", ")", "as", "outfile", ":", "json", ".", "dump", "(", "data", ",", "outfile", ")", "else", ":", "newpath", "=", "self", ".", "file", "+", "'.new'", "with", "open", "(", "newpath", ",", "\"w\"", ")", "as", "outfile", ":", "json", ".", "dump", "(", "data", ",", "outfile", ")", "os", ".", "rename", "(", "os", ".", "path", ".", "realpath", "(", "newpath", ")", ",", "os", ".", "path", ".", "realpath", "(", "self", ".", "file", ")", ")" ]
Attempt to atomically save file by saving and then moving into position The goal is to make it difficult for a crash to corrupt our data file since the move operation can be made atomic if needed on mission critical filesystems.
[ "Attempt", "to", "atomically", "save", "file", "by", "saving", "and", "then", "moving", "into", "position" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L60-L79
train
iotile/coretools
iotilecore/iotile/core/utilities/kvstore_json.py
JSONKVStore.remove
def remove(self, key): """Remove a key from the data store Args: key (string): The key to remove Raises: KeyError: if the key was not found """ data = self._load_file() del data[key] self._save_file(data)
python
def remove(self, key): """Remove a key from the data store Args: key (string): The key to remove Raises: KeyError: if the key was not found """ data = self._load_file() del data[key] self._save_file(data)
[ "def", "remove", "(", "self", ",", "key", ")", ":", "data", "=", "self", ".", "_load_file", "(", ")", "del", "data", "[", "key", "]", "self", ".", "_save_file", "(", "data", ")" ]
Remove a key from the data store Args: key (string): The key to remove Raises: KeyError: if the key was not found
[ "Remove", "a", "key", "from", "the", "data", "store" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L107-L119
train
iotile/coretools
iotilecore/iotile/core/utilities/kvstore_json.py
JSONKVStore.set
def set(self, key, value): """Set the value of a key Args: key (string): The key used to store this value value (string): The value to store """ data = self._load_file() data[key] = value self._save_file(data)
python
def set(self, key, value): """Set the value of a key Args: key (string): The key used to store this value value (string): The value to store """ data = self._load_file() data[key] = value self._save_file(data)
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "data", "=", "self", ".", "_load_file", "(", ")", "data", "[", "key", "]", "=", "value", "self", ".", "_save_file", "(", "data", ")" ]
Set the value of a key Args: key (string): The key used to store this value value (string): The value to store
[ "Set", "the", "value", "of", "a", "key" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L134-L144
train
iotile/coretools
iotilesensorgraph/iotile/sg/parser/scopes/trigger_scope.py
TriggerScope.trigger_chain
def trigger_chain(self): """Return a NodeInput tuple for creating a node. Returns: (StreamIdentifier, InputTrigger) """ trigger_stream = self.allocator.attach_stream(self.trigger_stream) return (trigger_stream, self.trigger_cond)
python
def trigger_chain(self): """Return a NodeInput tuple for creating a node. Returns: (StreamIdentifier, InputTrigger) """ trigger_stream = self.allocator.attach_stream(self.trigger_stream) return (trigger_stream, self.trigger_cond)
[ "def", "trigger_chain", "(", "self", ")", ":", "trigger_stream", "=", "self", ".", "allocator", ".", "attach_stream", "(", "self", ".", "trigger_stream", ")", "return", "(", "trigger_stream", ",", "self", ".", "trigger_cond", ")" ]
Return a NodeInput tuple for creating a node. Returns: (StreamIdentifier, InputTrigger)
[ "Return", "a", "NodeInput", "tuple", "for", "creating", "a", "node", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/parser/scopes/trigger_scope.py#L36-L44
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cc.py
generate
def generate(env): """ Add Builders and construction variables for C compilers to an Environment. """ static_obj, shared_obj = SCons.Tool.createObjBuilders(env) for suffix in CSuffixes: static_obj.add_action(suffix, SCons.Defaults.CAction) shared_obj.add_action(suffix, SCons.Defaults.ShCAction) static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) add_common_cc_variables(env) if 'CC' not in env: env['CC'] = env.Detect(compilers) or compilers[0] env['CFLAGS'] = SCons.Util.CLVar('') env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' env['SHCC'] = '$CC' env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' env['CPPDEFPREFIX'] = '-D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '-I' env['INCSUFFIX'] = '' env['SHOBJSUFFIX'] = '.os' env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 env['CFILESUFFIX'] = '.c'
python
def generate(env): """ Add Builders and construction variables for C compilers to an Environment. """ static_obj, shared_obj = SCons.Tool.createObjBuilders(env) for suffix in CSuffixes: static_obj.add_action(suffix, SCons.Defaults.CAction) shared_obj.add_action(suffix, SCons.Defaults.ShCAction) static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) add_common_cc_variables(env) if 'CC' not in env: env['CC'] = env.Detect(compilers) or compilers[0] env['CFLAGS'] = SCons.Util.CLVar('') env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' env['SHCC'] = '$CC' env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' env['CPPDEFPREFIX'] = '-D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '-I' env['INCSUFFIX'] = '' env['SHOBJSUFFIX'] = '.os' env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 env['CFILESUFFIX'] = '.c'
[ "def", "generate", "(", "env", ")", ":", "static_obj", ",", "shared_obj", "=", "SCons", ".", "Tool", ".", "createObjBuilders", "(", "env", ")", "for", "suffix", "in", "CSuffixes", ":", "static_obj", ".", "add_action", "(", "suffix", ",", "SCons", ".", "Defaults", ".", "CAction", ")", "shared_obj", ".", "add_action", "(", "suffix", ",", "SCons", ".", "Defaults", ".", "ShCAction", ")", "static_obj", ".", "add_emitter", "(", "suffix", ",", "SCons", ".", "Defaults", ".", "StaticObjectEmitter", ")", "shared_obj", ".", "add_emitter", "(", "suffix", ",", "SCons", ".", "Defaults", ".", "SharedObjectEmitter", ")", "add_common_cc_variables", "(", "env", ")", "if", "'CC'", "not", "in", "env", ":", "env", "[", "'CC'", "]", "=", "env", ".", "Detect", "(", "compilers", ")", "or", "compilers", "[", "0", "]", "env", "[", "'CFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "''", ")", "env", "[", "'CCCOM'", "]", "=", "'$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'", "env", "[", "'SHCC'", "]", "=", "'$CC'", "env", "[", "'SHCFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'$CFLAGS'", ")", "env", "[", "'SHCCCOM'", "]", "=", "'$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'", "env", "[", "'CPPDEFPREFIX'", "]", "=", "'-D'", "env", "[", "'CPPDEFSUFFIX'", "]", "=", "''", "env", "[", "'INCPREFIX'", "]", "=", "'-I'", "env", "[", "'INCSUFFIX'", "]", "=", "''", "env", "[", "'SHOBJSUFFIX'", "]", "=", "'.os'", "env", "[", "'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'", "]", "=", "0", "env", "[", "'CFILESUFFIX'", "]", "=", "'.c'" ]
Add Builders and construction variables for C compilers to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "C", "compilers", "to", "an", "Environment", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cc.py#L67-L96
train
iotile/coretools
iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py
process_mock_rpc
def process_mock_rpc(input_string): """Process a mock RPC argument. Args: input_string (str): The input string that should be in the format <slot id>:<rpc id> = value """ spec, equals, value = input_string.partition(u'=') if len(equals) == 0: print("Could not parse mock RPC argument: {}".format(input_string)) sys.exit(1) try: value = int(value.strip(), 0) except ValueError as exc: print("Could not parse mock RPC value: {}".format(str(exc))) sys.exit(1) slot, part, rpc_id = spec.partition(u":") if len(part) == 0: print("Could not parse mock RPC slot/rpc definition: {}".format(spec)) sys.exit(1) try: slot = SlotIdentifier.FromString(slot) except ArgumentError as exc: print("Could not parse slot id in mock RPC definition: {}".format(exc.msg)) sys.exit(1) try: rpc_id = int(rpc_id, 0) except ValueError as exc: print("Could not parse mock RPC number: {}".format(str(exc))) sys.exit(1) return slot, rpc_id, value
python
def process_mock_rpc(input_string): """Process a mock RPC argument. Args: input_string (str): The input string that should be in the format <slot id>:<rpc id> = value """ spec, equals, value = input_string.partition(u'=') if len(equals) == 0: print("Could not parse mock RPC argument: {}".format(input_string)) sys.exit(1) try: value = int(value.strip(), 0) except ValueError as exc: print("Could not parse mock RPC value: {}".format(str(exc))) sys.exit(1) slot, part, rpc_id = spec.partition(u":") if len(part) == 0: print("Could not parse mock RPC slot/rpc definition: {}".format(spec)) sys.exit(1) try: slot = SlotIdentifier.FromString(slot) except ArgumentError as exc: print("Could not parse slot id in mock RPC definition: {}".format(exc.msg)) sys.exit(1) try: rpc_id = int(rpc_id, 0) except ValueError as exc: print("Could not parse mock RPC number: {}".format(str(exc))) sys.exit(1) return slot, rpc_id, value
[ "def", "process_mock_rpc", "(", "input_string", ")", ":", "spec", ",", "equals", ",", "value", "=", "input_string", ".", "partition", "(", "u'='", ")", "if", "len", "(", "equals", ")", "==", "0", ":", "print", "(", "\"Could not parse mock RPC argument: {}\"", ".", "format", "(", "input_string", ")", ")", "sys", ".", "exit", "(", "1", ")", "try", ":", "value", "=", "int", "(", "value", ".", "strip", "(", ")", ",", "0", ")", "except", "ValueError", "as", "exc", ":", "print", "(", "\"Could not parse mock RPC value: {}\"", ".", "format", "(", "str", "(", "exc", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "slot", ",", "part", ",", "rpc_id", "=", "spec", ".", "partition", "(", "u\":\"", ")", "if", "len", "(", "part", ")", "==", "0", ":", "print", "(", "\"Could not parse mock RPC slot/rpc definition: {}\"", ".", "format", "(", "spec", ")", ")", "sys", ".", "exit", "(", "1", ")", "try", ":", "slot", "=", "SlotIdentifier", ".", "FromString", "(", "slot", ")", "except", "ArgumentError", "as", "exc", ":", "print", "(", "\"Could not parse slot id in mock RPC definition: {}\"", ".", "format", "(", "exc", ".", "msg", ")", ")", "sys", ".", "exit", "(", "1", ")", "try", ":", "rpc_id", "=", "int", "(", "rpc_id", ",", "0", ")", "except", "ValueError", "as", "exc", ":", "print", "(", "\"Could not parse mock RPC number: {}\"", ".", "format", "(", "str", "(", "exc", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "return", "slot", ",", "rpc_id", ",", "value" ]
Process a mock RPC argument. Args: input_string (str): The input string that should be in the format <slot id>:<rpc id> = value
[ "Process", "a", "mock", "RPC", "argument", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py#L77-L114
train
iotile/coretools
iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py
watch_printer
def watch_printer(watch, value): """Print a watched value. Args: watch (DataStream): The stream that was watched value (IOTileReading): The value to was seen """ print("({: 8} s) {}: {}".format(value.raw_time, watch, value.value))
python
def watch_printer(watch, value): """Print a watched value. Args: watch (DataStream): The stream that was watched value (IOTileReading): The value to was seen """ print("({: 8} s) {}: {}".format(value.raw_time, watch, value.value))
[ "def", "watch_printer", "(", "watch", ",", "value", ")", ":", "print", "(", "\"({: 8} s) {}: {}\"", ".", "format", "(", "value", ".", "raw_time", ",", "watch", ",", "value", ".", "value", ")", ")" ]
Print a watched value. Args: watch (DataStream): The stream that was watched value (IOTileReading): The value to was seen
[ "Print", "a", "watched", "value", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py#L116-L124
train
iotile/coretools
iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py
main
def main(argv=None): """Main entry point for iotile sensorgraph simulator. This is the iotile-sgrun command line program. It takes an optional set of command line parameters to allow for testing. Args: argv (list of str): An optional set of command line parameters. If not passed, these are taken from sys.argv. """ if argv is None: argv = sys.argv[1:] try: executor = None parser = build_args() args = parser.parse_args(args=argv) model = DeviceModel() parser = SensorGraphFileParser() parser.parse_file(args.sensor_graph) parser.compile(model) if not args.disable_optimizer: opt = SensorGraphOptimizer() opt.optimize(parser.sensor_graph, model=model) graph = parser.sensor_graph sim = SensorGraphSimulator(graph) for stop in args.stop: sim.stop_condition(stop) for watch in args.watch: watch_sel = DataStreamSelector.FromString(watch) graph.sensor_log.watch(watch_sel, watch_printer) # If we are semihosting, create the appropriate executor connected to the device if args.semihost_device is not None: executor = SemihostedRPCExecutor(args.port, args.semihost_device) sim.rpc_executor = executor for mock in args.mock_rpc: slot, rpc_id, value = process_mock_rpc(mock) sim.rpc_executor.mock(slot, rpc_id, value) for stim in args.stimulus: sim.stimulus(stim) graph.load_constants() if args.trace is not None: sim.record_trace() try: if args.connected: sim.step(user_connected, 8) sim.run(accelerated=not args.realtime) except KeyboardInterrupt: pass if args.trace is not None: sim.trace.save(args.trace) finally: if executor is not None: executor.hw.close() return 0
python
def main(argv=None): """Main entry point for iotile sensorgraph simulator. This is the iotile-sgrun command line program. It takes an optional set of command line parameters to allow for testing. Args: argv (list of str): An optional set of command line parameters. If not passed, these are taken from sys.argv. """ if argv is None: argv = sys.argv[1:] try: executor = None parser = build_args() args = parser.parse_args(args=argv) model = DeviceModel() parser = SensorGraphFileParser() parser.parse_file(args.sensor_graph) parser.compile(model) if not args.disable_optimizer: opt = SensorGraphOptimizer() opt.optimize(parser.sensor_graph, model=model) graph = parser.sensor_graph sim = SensorGraphSimulator(graph) for stop in args.stop: sim.stop_condition(stop) for watch in args.watch: watch_sel = DataStreamSelector.FromString(watch) graph.sensor_log.watch(watch_sel, watch_printer) # If we are semihosting, create the appropriate executor connected to the device if args.semihost_device is not None: executor = SemihostedRPCExecutor(args.port, args.semihost_device) sim.rpc_executor = executor for mock in args.mock_rpc: slot, rpc_id, value = process_mock_rpc(mock) sim.rpc_executor.mock(slot, rpc_id, value) for stim in args.stimulus: sim.stimulus(stim) graph.load_constants() if args.trace is not None: sim.record_trace() try: if args.connected: sim.step(user_connected, 8) sim.run(accelerated=not args.realtime) except KeyboardInterrupt: pass if args.trace is not None: sim.trace.save(args.trace) finally: if executor is not None: executor.hw.close() return 0
[ "def", "main", "(", "argv", "=", "None", ")", ":", "if", "argv", "is", "None", ":", "argv", "=", "sys", ".", "argv", "[", "1", ":", "]", "try", ":", "executor", "=", "None", "parser", "=", "build_args", "(", ")", "args", "=", "parser", ".", "parse_args", "(", "args", "=", "argv", ")", "model", "=", "DeviceModel", "(", ")", "parser", "=", "SensorGraphFileParser", "(", ")", "parser", ".", "parse_file", "(", "args", ".", "sensor_graph", ")", "parser", ".", "compile", "(", "model", ")", "if", "not", "args", ".", "disable_optimizer", ":", "opt", "=", "SensorGraphOptimizer", "(", ")", "opt", ".", "optimize", "(", "parser", ".", "sensor_graph", ",", "model", "=", "model", ")", "graph", "=", "parser", ".", "sensor_graph", "sim", "=", "SensorGraphSimulator", "(", "graph", ")", "for", "stop", "in", "args", ".", "stop", ":", "sim", ".", "stop_condition", "(", "stop", ")", "for", "watch", "in", "args", ".", "watch", ":", "watch_sel", "=", "DataStreamSelector", ".", "FromString", "(", "watch", ")", "graph", ".", "sensor_log", ".", "watch", "(", "watch_sel", ",", "watch_printer", ")", "# If we are semihosting, create the appropriate executor connected to the device", "if", "args", ".", "semihost_device", "is", "not", "None", ":", "executor", "=", "SemihostedRPCExecutor", "(", "args", ".", "port", ",", "args", ".", "semihost_device", ")", "sim", ".", "rpc_executor", "=", "executor", "for", "mock", "in", "args", ".", "mock_rpc", ":", "slot", ",", "rpc_id", ",", "value", "=", "process_mock_rpc", "(", "mock", ")", "sim", ".", "rpc_executor", ".", "mock", "(", "slot", ",", "rpc_id", ",", "value", ")", "for", "stim", "in", "args", ".", "stimulus", ":", "sim", ".", "stimulus", "(", "stim", ")", "graph", ".", "load_constants", "(", ")", "if", "args", ".", "trace", "is", "not", "None", ":", "sim", ".", "record_trace", "(", ")", "try", ":", "if", "args", ".", "connected", ":", "sim", ".", "step", "(", "user_connected", ",", "8", ")", "sim", ".", "run", "(", "accelerated", "=", "not", "args", ".", "realtime", ")", "except", "KeyboardInterrupt", ":", "pass", "if", "args", ".", "trace", "is", "not", "None", ":", "sim", ".", "trace", ".", "save", "(", "args", ".", "trace", ")", "finally", ":", "if", "executor", "is", "not", "None", ":", "executor", ".", "hw", ".", "close", "(", ")", "return", "0" ]
Main entry point for iotile sensorgraph simulator. This is the iotile-sgrun command line program. It takes an optional set of command line parameters to allow for testing. Args: argv (list of str): An optional set of command line parameters. If not passed, these are taken from sys.argv.
[ "Main", "entry", "point", "for", "iotile", "sensorgraph", "simulator", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py#L127-L199
train
iotile/coretools
iotileship/iotile/ship/actions/verify_device_step.py
VerifyDeviceStep._verify_tile_versions
def _verify_tile_versions(self, hw): """Verify that the tiles have the correct versions """ for tile, expected_tile_version in self._tile_versions.items(): actual_tile_version = str(hw.get(tile).tile_version()) if expected_tile_version != actual_tile_version: raise ArgumentError("Tile has incorrect firmware", tile=tile, \ expected_version=expected_tile_version, actual_version=actual_tile_version)
python
def _verify_tile_versions(self, hw): """Verify that the tiles have the correct versions """ for tile, expected_tile_version in self._tile_versions.items(): actual_tile_version = str(hw.get(tile).tile_version()) if expected_tile_version != actual_tile_version: raise ArgumentError("Tile has incorrect firmware", tile=tile, \ expected_version=expected_tile_version, actual_version=actual_tile_version)
[ "def", "_verify_tile_versions", "(", "self", ",", "hw", ")", ":", "for", "tile", ",", "expected_tile_version", "in", "self", ".", "_tile_versions", ".", "items", "(", ")", ":", "actual_tile_version", "=", "str", "(", "hw", ".", "get", "(", "tile", ")", ".", "tile_version", "(", ")", ")", "if", "expected_tile_version", "!=", "actual_tile_version", ":", "raise", "ArgumentError", "(", "\"Tile has incorrect firmware\"", ",", "tile", "=", "tile", ",", "expected_version", "=", "expected_tile_version", ",", "actual_version", "=", "actual_tile_version", ")" ]
Verify that the tiles have the correct versions
[ "Verify", "that", "the", "tiles", "have", "the", "correct", "versions" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/actions/verify_device_step.py#L33-L40
train
iotile/coretools
iotileship/iotile/ship/actions/verify_device_step.py
VerifyDeviceStep._verify_realtime_streams
def _verify_realtime_streams(self, hw): """Check that the realtime streams are being produced """ print("--> Testing realtime data (takes 2 seconds)") time.sleep(2.1) reports = [x for x in hw.iter_reports()] reports_seen = {key: 0 for key in self._realtime_streams} for report in reports: stream_value = report.visible_readings[0].stream if reports_seen.get(stream_value) is not None: reports_seen[stream_value] += 1 for stream in reports_seen.keys(): if reports_seen[stream] < 2: raise ArgumentError("Realtime Stream not pushing any reports", stream=hex(stream), \ reports_seen=reports_seen[stream])
python
def _verify_realtime_streams(self, hw): """Check that the realtime streams are being produced """ print("--> Testing realtime data (takes 2 seconds)") time.sleep(2.1) reports = [x for x in hw.iter_reports()] reports_seen = {key: 0 for key in self._realtime_streams} for report in reports: stream_value = report.visible_readings[0].stream if reports_seen.get(stream_value) is not None: reports_seen[stream_value] += 1 for stream in reports_seen.keys(): if reports_seen[stream] < 2: raise ArgumentError("Realtime Stream not pushing any reports", stream=hex(stream), \ reports_seen=reports_seen[stream])
[ "def", "_verify_realtime_streams", "(", "self", ",", "hw", ")", ":", "print", "(", "\"--> Testing realtime data (takes 2 seconds)\"", ")", "time", ".", "sleep", "(", "2.1", ")", "reports", "=", "[", "x", "for", "x", "in", "hw", ".", "iter_reports", "(", ")", "]", "reports_seen", "=", "{", "key", ":", "0", "for", "key", "in", "self", ".", "_realtime_streams", "}", "for", "report", "in", "reports", ":", "stream_value", "=", "report", ".", "visible_readings", "[", "0", "]", ".", "stream", "if", "reports_seen", ".", "get", "(", "stream_value", ")", "is", "not", "None", ":", "reports_seen", "[", "stream_value", "]", "+=", "1", "for", "stream", "in", "reports_seen", ".", "keys", "(", ")", ":", "if", "reports_seen", "[", "stream", "]", "<", "2", ":", "raise", "ArgumentError", "(", "\"Realtime Stream not pushing any reports\"", ",", "stream", "=", "hex", "(", "stream", ")", ",", "reports_seen", "=", "reports_seen", "[", "stream", "]", ")" ]
Check that the realtime streams are being produced
[ "Check", "that", "the", "realtime", "streams", "are", "being", "produced" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/actions/verify_device_step.py#L64-L80
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py
_update_pot_file
def _update_pot_file(target, source, env): """ Action function for `POTUpdate` builder """ import re import os import SCons.Action nop = lambda target, source, env: 0 # Save scons cwd and os cwd (NOTE: they may be different. After the job, we # revert each one to its original state). save_cwd = env.fs.getcwd() save_os_cwd = os.getcwd() chdir = target[0].dir chdir_str = repr(chdir.get_abspath()) # Print chdir message (employ SCons.Action.Action for that. It knows better # than me how to to this correctly). env.Execute(SCons.Action.Action(nop, "Entering " + chdir_str)) # Go to target's directory and do our job env.fs.chdir(chdir, 1) # Go into target's directory try: cmd = _CmdRunner('$XGETTEXTCOM', '$XGETTEXTCOMSTR') action = SCons.Action.Action(cmd, strfunction=cmd.strfunction) status = action([target[0]], source, env) except: # Something went wrong. env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str)) # Revert working dirs to previous state and re-throw exception. env.fs.chdir(save_cwd, 0) os.chdir(save_os_cwd) raise # Print chdir message. env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str)) # Revert working dirs to previous state. env.fs.chdir(save_cwd, 0) os.chdir(save_os_cwd) # If the command was not successfull, return error code. if status: return status new_content = cmd.out if not new_content: # When xgettext finds no internationalized messages, no *.pot is created # (because we don't want to bother translators with empty POT files). needs_update = False explain = "no internationalized messages encountered" else: if target[0].exists(): # If the file already exists, it's left unaltered unless its messages # are outdated (w.r.t. to these recovered by xgettext from sources). old_content = target[0].get_text_contents() re_cdate = re.compile(r'^"POT-Creation-Date: .*"$[\r\n]?', re.M) old_content_nocdate = re.sub(re_cdate, "", old_content) new_content_nocdate = re.sub(re_cdate, "", new_content) if (old_content_nocdate == new_content_nocdate): # Messages are up-to-date needs_update = False explain = "messages in file found to be up-to-date" else: # Messages are outdated needs_update = True explain = "messages in file were outdated" else: # No POT file found, create new one needs_update = True explain = "new file" if needs_update: # Print message employing SCons.Action.Action for that. msg = "Writing " + repr(str(target[0])) + " (" + explain + ")" env.Execute(SCons.Action.Action(nop, msg)) f = open(str(target[0]), "w") f.write(new_content) f.close() return 0 else: # Print message employing SCons.Action.Action for that. msg = "Not writing " + repr(str(target[0])) + " (" + explain + ")" env.Execute(SCons.Action.Action(nop, msg)) return 0
python
def _update_pot_file(target, source, env): """ Action function for `POTUpdate` builder """ import re import os import SCons.Action nop = lambda target, source, env: 0 # Save scons cwd and os cwd (NOTE: they may be different. After the job, we # revert each one to its original state). save_cwd = env.fs.getcwd() save_os_cwd = os.getcwd() chdir = target[0].dir chdir_str = repr(chdir.get_abspath()) # Print chdir message (employ SCons.Action.Action for that. It knows better # than me how to to this correctly). env.Execute(SCons.Action.Action(nop, "Entering " + chdir_str)) # Go to target's directory and do our job env.fs.chdir(chdir, 1) # Go into target's directory try: cmd = _CmdRunner('$XGETTEXTCOM', '$XGETTEXTCOMSTR') action = SCons.Action.Action(cmd, strfunction=cmd.strfunction) status = action([target[0]], source, env) except: # Something went wrong. env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str)) # Revert working dirs to previous state and re-throw exception. env.fs.chdir(save_cwd, 0) os.chdir(save_os_cwd) raise # Print chdir message. env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str)) # Revert working dirs to previous state. env.fs.chdir(save_cwd, 0) os.chdir(save_os_cwd) # If the command was not successfull, return error code. if status: return status new_content = cmd.out if not new_content: # When xgettext finds no internationalized messages, no *.pot is created # (because we don't want to bother translators with empty POT files). needs_update = False explain = "no internationalized messages encountered" else: if target[0].exists(): # If the file already exists, it's left unaltered unless its messages # are outdated (w.r.t. to these recovered by xgettext from sources). old_content = target[0].get_text_contents() re_cdate = re.compile(r'^"POT-Creation-Date: .*"$[\r\n]?', re.M) old_content_nocdate = re.sub(re_cdate, "", old_content) new_content_nocdate = re.sub(re_cdate, "", new_content) if (old_content_nocdate == new_content_nocdate): # Messages are up-to-date needs_update = False explain = "messages in file found to be up-to-date" else: # Messages are outdated needs_update = True explain = "messages in file were outdated" else: # No POT file found, create new one needs_update = True explain = "new file" if needs_update: # Print message employing SCons.Action.Action for that. msg = "Writing " + repr(str(target[0])) + " (" + explain + ")" env.Execute(SCons.Action.Action(nop, msg)) f = open(str(target[0]), "w") f.write(new_content) f.close() return 0 else: # Print message employing SCons.Action.Action for that. msg = "Not writing " + repr(str(target[0])) + " (" + explain + ")" env.Execute(SCons.Action.Action(nop, msg)) return 0
[ "def", "_update_pot_file", "(", "target", ",", "source", ",", "env", ")", ":", "import", "re", "import", "os", "import", "SCons", ".", "Action", "nop", "=", "lambda", "target", ",", "source", ",", "env", ":", "0", "# Save scons cwd and os cwd (NOTE: they may be different. After the job, we", "# revert each one to its original state).", "save_cwd", "=", "env", ".", "fs", ".", "getcwd", "(", ")", "save_os_cwd", "=", "os", ".", "getcwd", "(", ")", "chdir", "=", "target", "[", "0", "]", ".", "dir", "chdir_str", "=", "repr", "(", "chdir", ".", "get_abspath", "(", ")", ")", "# Print chdir message (employ SCons.Action.Action for that. It knows better", "# than me how to to this correctly).", "env", ".", "Execute", "(", "SCons", ".", "Action", ".", "Action", "(", "nop", ",", "\"Entering \"", "+", "chdir_str", ")", ")", "# Go to target's directory and do our job", "env", ".", "fs", ".", "chdir", "(", "chdir", ",", "1", ")", "# Go into target's directory", "try", ":", "cmd", "=", "_CmdRunner", "(", "'$XGETTEXTCOM'", ",", "'$XGETTEXTCOMSTR'", ")", "action", "=", "SCons", ".", "Action", ".", "Action", "(", "cmd", ",", "strfunction", "=", "cmd", ".", "strfunction", ")", "status", "=", "action", "(", "[", "target", "[", "0", "]", "]", ",", "source", ",", "env", ")", "except", ":", "# Something went wrong.", "env", ".", "Execute", "(", "SCons", ".", "Action", ".", "Action", "(", "nop", ",", "\"Leaving \"", "+", "chdir_str", ")", ")", "# Revert working dirs to previous state and re-throw exception.", "env", ".", "fs", ".", "chdir", "(", "save_cwd", ",", "0", ")", "os", ".", "chdir", "(", "save_os_cwd", ")", "raise", "# Print chdir message.", "env", ".", "Execute", "(", "SCons", ".", "Action", ".", "Action", "(", "nop", ",", "\"Leaving \"", "+", "chdir_str", ")", ")", "# Revert working dirs to previous state.", "env", ".", "fs", ".", "chdir", "(", "save_cwd", ",", "0", ")", "os", ".", "chdir", "(", "save_os_cwd", ")", "# If the command was not successfull, return error code.", "if", "status", ":", "return", "status", "new_content", "=", "cmd", ".", "out", "if", "not", "new_content", ":", "# When xgettext finds no internationalized messages, no *.pot is created", "# (because we don't want to bother translators with empty POT files).", "needs_update", "=", "False", "explain", "=", "\"no internationalized messages encountered\"", "else", ":", "if", "target", "[", "0", "]", ".", "exists", "(", ")", ":", "# If the file already exists, it's left unaltered unless its messages", "# are outdated (w.r.t. to these recovered by xgettext from sources).", "old_content", "=", "target", "[", "0", "]", ".", "get_text_contents", "(", ")", "re_cdate", "=", "re", ".", "compile", "(", "r'^\"POT-Creation-Date: .*\"$[\\r\\n]?'", ",", "re", ".", "M", ")", "old_content_nocdate", "=", "re", ".", "sub", "(", "re_cdate", ",", "\"\"", ",", "old_content", ")", "new_content_nocdate", "=", "re", ".", "sub", "(", "re_cdate", ",", "\"\"", ",", "new_content", ")", "if", "(", "old_content_nocdate", "==", "new_content_nocdate", ")", ":", "# Messages are up-to-date", "needs_update", "=", "False", "explain", "=", "\"messages in file found to be up-to-date\"", "else", ":", "# Messages are outdated", "needs_update", "=", "True", "explain", "=", "\"messages in file were outdated\"", "else", ":", "# No POT file found, create new one", "needs_update", "=", "True", "explain", "=", "\"new file\"", "if", "needs_update", ":", "# Print message employing SCons.Action.Action for that.", "msg", "=", "\"Writing \"", "+", "repr", "(", "str", "(", "target", "[", "0", "]", ")", ")", "+", "\" (\"", "+", "explain", "+", "\")\"", "env", ".", "Execute", "(", "SCons", ".", "Action", ".", "Action", "(", "nop", ",", "msg", ")", ")", "f", "=", "open", "(", "str", "(", "target", "[", "0", "]", ")", ",", "\"w\"", ")", "f", ".", "write", "(", "new_content", ")", "f", ".", "close", "(", ")", "return", "0", "else", ":", "# Print message employing SCons.Action.Action for that.", "msg", "=", "\"Not writing \"", "+", "repr", "(", "str", "(", "target", "[", "0", "]", ")", ")", "+", "\" (\"", "+", "explain", "+", "\")\"", "env", ".", "Execute", "(", "SCons", ".", "Action", ".", "Action", "(", "nop", ",", "msg", ")", ")", "return", "0" ]
Action function for `POTUpdate` builder
[ "Action", "function", "for", "POTUpdate", "builder" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L75-L151
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py
_scan_xgettext_from_files
def _scan_xgettext_from_files(target, source, env, files=None, path=None): """ Parses `POTFILES.in`-like file and returns list of extracted file names. """ import re import SCons.Util import SCons.Node.FS if files is None: return 0 if not SCons.Util.is_List(files): files = [files] if path is None: if 'XGETTEXTPATH' in env: path = env['XGETTEXTPATH'] else: path = [] if not SCons.Util.is_List(path): path = [path] path = SCons.Util.flatten(path) dirs = () for p in path: if not isinstance(p, SCons.Node.FS.Base): if SCons.Util.is_String(p): p = env.subst(p, source=source, target=target) p = env.arg2nodes(p, env.fs.Dir) dirs += tuple(p) # cwd is the default search path (when no path is defined by user) if not dirs: dirs = (env.fs.getcwd(),) # Parse 'POTFILE.in' files. re_comment = re.compile(r'^#[^\n\r]*$\r?\n?', re.M) re_emptyln = re.compile(r'^[ \t\r]*$\r?\n?', re.M) re_trailws = re.compile(r'[ \t\r]+$') for f in files: # Find files in search path $XGETTEXTPATH if isinstance(f, SCons.Node.FS.Base) and f.rexists(): contents = f.get_text_contents() contents = re_comment.sub("", contents) contents = re_emptyln.sub("", contents) contents = re_trailws.sub("", contents) depnames = contents.splitlines() for depname in depnames: depfile = SCons.Node.FS.find_file(depname, dirs) if not depfile: depfile = env.arg2nodes(depname, dirs[0].File) env.Depends(target, depfile) return 0
python
def _scan_xgettext_from_files(target, source, env, files=None, path=None): """ Parses `POTFILES.in`-like file and returns list of extracted file names. """ import re import SCons.Util import SCons.Node.FS if files is None: return 0 if not SCons.Util.is_List(files): files = [files] if path is None: if 'XGETTEXTPATH' in env: path = env['XGETTEXTPATH'] else: path = [] if not SCons.Util.is_List(path): path = [path] path = SCons.Util.flatten(path) dirs = () for p in path: if not isinstance(p, SCons.Node.FS.Base): if SCons.Util.is_String(p): p = env.subst(p, source=source, target=target) p = env.arg2nodes(p, env.fs.Dir) dirs += tuple(p) # cwd is the default search path (when no path is defined by user) if not dirs: dirs = (env.fs.getcwd(),) # Parse 'POTFILE.in' files. re_comment = re.compile(r'^#[^\n\r]*$\r?\n?', re.M) re_emptyln = re.compile(r'^[ \t\r]*$\r?\n?', re.M) re_trailws = re.compile(r'[ \t\r]+$') for f in files: # Find files in search path $XGETTEXTPATH if isinstance(f, SCons.Node.FS.Base) and f.rexists(): contents = f.get_text_contents() contents = re_comment.sub("", contents) contents = re_emptyln.sub("", contents) contents = re_trailws.sub("", contents) depnames = contents.splitlines() for depname in depnames: depfile = SCons.Node.FS.find_file(depname, dirs) if not depfile: depfile = env.arg2nodes(depname, dirs[0].File) env.Depends(target, depfile) return 0
[ "def", "_scan_xgettext_from_files", "(", "target", ",", "source", ",", "env", ",", "files", "=", "None", ",", "path", "=", "None", ")", ":", "import", "re", "import", "SCons", ".", "Util", "import", "SCons", ".", "Node", ".", "FS", "if", "files", "is", "None", ":", "return", "0", "if", "not", "SCons", ".", "Util", ".", "is_List", "(", "files", ")", ":", "files", "=", "[", "files", "]", "if", "path", "is", "None", ":", "if", "'XGETTEXTPATH'", "in", "env", ":", "path", "=", "env", "[", "'XGETTEXTPATH'", "]", "else", ":", "path", "=", "[", "]", "if", "not", "SCons", ".", "Util", ".", "is_List", "(", "path", ")", ":", "path", "=", "[", "path", "]", "path", "=", "SCons", ".", "Util", ".", "flatten", "(", "path", ")", "dirs", "=", "(", ")", "for", "p", "in", "path", ":", "if", "not", "isinstance", "(", "p", ",", "SCons", ".", "Node", ".", "FS", ".", "Base", ")", ":", "if", "SCons", ".", "Util", ".", "is_String", "(", "p", ")", ":", "p", "=", "env", ".", "subst", "(", "p", ",", "source", "=", "source", ",", "target", "=", "target", ")", "p", "=", "env", ".", "arg2nodes", "(", "p", ",", "env", ".", "fs", ".", "Dir", ")", "dirs", "+=", "tuple", "(", "p", ")", "# cwd is the default search path (when no path is defined by user)", "if", "not", "dirs", ":", "dirs", "=", "(", "env", ".", "fs", ".", "getcwd", "(", ")", ",", ")", "# Parse 'POTFILE.in' files.", "re_comment", "=", "re", ".", "compile", "(", "r'^#[^\\n\\r]*$\\r?\\n?'", ",", "re", ".", "M", ")", "re_emptyln", "=", "re", ".", "compile", "(", "r'^[ \\t\\r]*$\\r?\\n?'", ",", "re", ".", "M", ")", "re_trailws", "=", "re", ".", "compile", "(", "r'[ \\t\\r]+$'", ")", "for", "f", "in", "files", ":", "# Find files in search path $XGETTEXTPATH", "if", "isinstance", "(", "f", ",", "SCons", ".", "Node", ".", "FS", ".", "Base", ")", "and", "f", ".", "rexists", "(", ")", ":", "contents", "=", "f", ".", "get_text_contents", "(", ")", "contents", "=", "re_comment", ".", "sub", "(", "\"\"", ",", "contents", ")", "contents", "=", "re_emptyln", ".", "sub", "(", "\"\"", ",", "contents", ")", "contents", "=", "re_trailws", ".", "sub", "(", "\"\"", ",", "contents", ")", "depnames", "=", "contents", ".", "splitlines", "(", ")", "for", "depname", "in", "depnames", ":", "depfile", "=", "SCons", ".", "Node", ".", "FS", ".", "find_file", "(", "depname", ",", "dirs", ")", "if", "not", "depfile", ":", "depfile", "=", "env", ".", "arg2nodes", "(", "depname", ",", "dirs", "[", "0", "]", ".", "File", ")", "env", ".", "Depends", "(", "target", ",", "depfile", ")", "return", "0" ]
Parses `POTFILES.in`-like file and returns list of extracted file names.
[ "Parses", "POTFILES", ".", "in", "-", "like", "file", "and", "returns", "list", "of", "extracted", "file", "names", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L175-L225
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py
_pot_update_emitter
def _pot_update_emitter(target, source, env): """ Emitter function for `POTUpdate` builder """ from SCons.Tool.GettextCommon import _POTargetFactory import SCons.Util import SCons.Node.FS if 'XGETTEXTFROM' in env: xfrom = env['XGETTEXTFROM'] else: return target, source if not SCons.Util.is_List(xfrom): xfrom = [xfrom] xfrom = SCons.Util.flatten(xfrom) # Prepare list of 'POTFILE.in' files. files = [] for xf in xfrom: if not isinstance(xf, SCons.Node.FS.Base): if SCons.Util.is_String(xf): # Interpolate variables in strings xf = env.subst(xf, source=source, target=target) xf = env.arg2nodes(xf) files.extend(xf) if files: env.Depends(target, files) _scan_xgettext_from_files(target, source, env, files) return target, source
python
def _pot_update_emitter(target, source, env): """ Emitter function for `POTUpdate` builder """ from SCons.Tool.GettextCommon import _POTargetFactory import SCons.Util import SCons.Node.FS if 'XGETTEXTFROM' in env: xfrom = env['XGETTEXTFROM'] else: return target, source if not SCons.Util.is_List(xfrom): xfrom = [xfrom] xfrom = SCons.Util.flatten(xfrom) # Prepare list of 'POTFILE.in' files. files = [] for xf in xfrom: if not isinstance(xf, SCons.Node.FS.Base): if SCons.Util.is_String(xf): # Interpolate variables in strings xf = env.subst(xf, source=source, target=target) xf = env.arg2nodes(xf) files.extend(xf) if files: env.Depends(target, files) _scan_xgettext_from_files(target, source, env, files) return target, source
[ "def", "_pot_update_emitter", "(", "target", ",", "source", ",", "env", ")", ":", "from", "SCons", ".", "Tool", ".", "GettextCommon", "import", "_POTargetFactory", "import", "SCons", ".", "Util", "import", "SCons", ".", "Node", ".", "FS", "if", "'XGETTEXTFROM'", "in", "env", ":", "xfrom", "=", "env", "[", "'XGETTEXTFROM'", "]", "else", ":", "return", "target", ",", "source", "if", "not", "SCons", ".", "Util", ".", "is_List", "(", "xfrom", ")", ":", "xfrom", "=", "[", "xfrom", "]", "xfrom", "=", "SCons", ".", "Util", ".", "flatten", "(", "xfrom", ")", "# Prepare list of 'POTFILE.in' files.", "files", "=", "[", "]", "for", "xf", "in", "xfrom", ":", "if", "not", "isinstance", "(", "xf", ",", "SCons", ".", "Node", ".", "FS", ".", "Base", ")", ":", "if", "SCons", ".", "Util", ".", "is_String", "(", "xf", ")", ":", "# Interpolate variables in strings", "xf", "=", "env", ".", "subst", "(", "xf", ",", "source", "=", "source", ",", "target", "=", "target", ")", "xf", "=", "env", ".", "arg2nodes", "(", "xf", ")", "files", ".", "extend", "(", "xf", ")", "if", "files", ":", "env", ".", "Depends", "(", "target", ",", "files", ")", "_scan_xgettext_from_files", "(", "target", ",", "source", ",", "env", ",", "files", ")", "return", "target", ",", "source" ]
Emitter function for `POTUpdate` builder
[ "Emitter", "function", "for", "POTUpdate", "builder" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L231-L258
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py
_POTUpdateBuilder
def _POTUpdateBuilder(env, **kw): """ Creates `POTUpdate` builder object """ import SCons.Action from SCons.Tool.GettextCommon import _POTargetFactory kw['action'] = SCons.Action.Action(_update_pot_file, None) kw['suffix'] = '$POTSUFFIX' kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File kw['emitter'] = _pot_update_emitter return _POTBuilder(**kw)
python
def _POTUpdateBuilder(env, **kw): """ Creates `POTUpdate` builder object """ import SCons.Action from SCons.Tool.GettextCommon import _POTargetFactory kw['action'] = SCons.Action.Action(_update_pot_file, None) kw['suffix'] = '$POTSUFFIX' kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File kw['emitter'] = _pot_update_emitter return _POTBuilder(**kw)
[ "def", "_POTUpdateBuilder", "(", "env", ",", "*", "*", "kw", ")", ":", "import", "SCons", ".", "Action", "from", "SCons", ".", "Tool", ".", "GettextCommon", "import", "_POTargetFactory", "kw", "[", "'action'", "]", "=", "SCons", ".", "Action", ".", "Action", "(", "_update_pot_file", ",", "None", ")", "kw", "[", "'suffix'", "]", "=", "'$POTSUFFIX'", "kw", "[", "'target_factory'", "]", "=", "_POTargetFactory", "(", "env", ",", "alias", "=", "'$POTUPDATE_ALIAS'", ")", ".", "File", "kw", "[", "'emitter'", "]", "=", "_pot_update_emitter", "return", "_POTBuilder", "(", "*", "*", "kw", ")" ]
Creates `POTUpdate` builder object
[ "Creates", "POTUpdate", "builder", "object" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L275-L283
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py
generate
def generate(env, **kw): """ Generate `xgettext` tool """ import SCons.Util from SCons.Tool.GettextCommon import RPaths, _detect_xgettext try: env['XGETTEXT'] = _detect_xgettext(env) except: env['XGETTEXT'] = 'xgettext' # NOTE: sources="$SOURCES" would work as well. However, we use following # construction to convert absolute paths provided by scons onto paths # relative to current working dir. Note, that scons expands $SOURCE(S) to # absolute paths for sources $SOURCE(s) outside of current subtree (e.g. in # "../"). With source=$SOURCE these absolute paths would be written to the # resultant *.pot file (and its derived *.po files) as references to lines in # source code (e.g. referring lines in *.c files). Such references would be # correct (e.g. in poedit) only on machine on which *.pot was generated and # would be of no use on other hosts (having a copy of source code located # in different place in filesystem). sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET' \ + ', SOURCES)} $)' # NOTE: the output from $XGETTEXTCOM command must go to stdout, not to a file. # This is required by the POTUpdate builder's action. xgettextcom = '$XGETTEXT $XGETTEXTFLAGS $_XGETTEXTPATHFLAGS' \ + ' $_XGETTEXTFROMFLAGS -o - ' + sources xgettextpathflags = '$( ${_concat( XGETTEXTPATHPREFIX, XGETTEXTPATH' \ + ', XGETTEXTPATHSUFFIX, __env__, RDirs, TARGET, SOURCES)} $)' xgettextfromflags = '$( ${_concat( XGETTEXTFROMPREFIX, XGETTEXTFROM' \ + ', XGETTEXTFROMSUFFIX, __env__, target=TARGET, source=SOURCES)} $)' env.SetDefault( _XGETTEXTDOMAIN='${TARGET.filebase}', XGETTEXTFLAGS=[], XGETTEXTCOM=xgettextcom, XGETTEXTCOMSTR='', XGETTEXTPATH=[], XGETTEXTPATHPREFIX='-D', XGETTEXTPATHSUFFIX='', XGETTEXTFROM=None, XGETTEXTFROMPREFIX='-f', XGETTEXTFROMSUFFIX='', _XGETTEXTPATHFLAGS=xgettextpathflags, _XGETTEXTFROMFLAGS=xgettextfromflags, POTSUFFIX=['.pot'], POTUPDATE_ALIAS='pot-update', XgettextRPaths=RPaths(env) ) env.Append(BUILDERS={ '_POTUpdateBuilder': _POTUpdateBuilder(env) }) env.AddMethod(_POTUpdateBuilderWrapper, 'POTUpdate') env.AlwaysBuild(env.Alias('$POTUPDATE_ALIAS'))
python
def generate(env, **kw): """ Generate `xgettext` tool """ import SCons.Util from SCons.Tool.GettextCommon import RPaths, _detect_xgettext try: env['XGETTEXT'] = _detect_xgettext(env) except: env['XGETTEXT'] = 'xgettext' # NOTE: sources="$SOURCES" would work as well. However, we use following # construction to convert absolute paths provided by scons onto paths # relative to current working dir. Note, that scons expands $SOURCE(S) to # absolute paths for sources $SOURCE(s) outside of current subtree (e.g. in # "../"). With source=$SOURCE these absolute paths would be written to the # resultant *.pot file (and its derived *.po files) as references to lines in # source code (e.g. referring lines in *.c files). Such references would be # correct (e.g. in poedit) only on machine on which *.pot was generated and # would be of no use on other hosts (having a copy of source code located # in different place in filesystem). sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET' \ + ', SOURCES)} $)' # NOTE: the output from $XGETTEXTCOM command must go to stdout, not to a file. # This is required by the POTUpdate builder's action. xgettextcom = '$XGETTEXT $XGETTEXTFLAGS $_XGETTEXTPATHFLAGS' \ + ' $_XGETTEXTFROMFLAGS -o - ' + sources xgettextpathflags = '$( ${_concat( XGETTEXTPATHPREFIX, XGETTEXTPATH' \ + ', XGETTEXTPATHSUFFIX, __env__, RDirs, TARGET, SOURCES)} $)' xgettextfromflags = '$( ${_concat( XGETTEXTFROMPREFIX, XGETTEXTFROM' \ + ', XGETTEXTFROMSUFFIX, __env__, target=TARGET, source=SOURCES)} $)' env.SetDefault( _XGETTEXTDOMAIN='${TARGET.filebase}', XGETTEXTFLAGS=[], XGETTEXTCOM=xgettextcom, XGETTEXTCOMSTR='', XGETTEXTPATH=[], XGETTEXTPATHPREFIX='-D', XGETTEXTPATHSUFFIX='', XGETTEXTFROM=None, XGETTEXTFROMPREFIX='-f', XGETTEXTFROMSUFFIX='', _XGETTEXTPATHFLAGS=xgettextpathflags, _XGETTEXTFROMFLAGS=xgettextfromflags, POTSUFFIX=['.pot'], POTUPDATE_ALIAS='pot-update', XgettextRPaths=RPaths(env) ) env.Append(BUILDERS={ '_POTUpdateBuilder': _POTUpdateBuilder(env) }) env.AddMethod(_POTUpdateBuilderWrapper, 'POTUpdate') env.AlwaysBuild(env.Alias('$POTUPDATE_ALIAS'))
[ "def", "generate", "(", "env", ",", "*", "*", "kw", ")", ":", "import", "SCons", ".", "Util", "from", "SCons", ".", "Tool", ".", "GettextCommon", "import", "RPaths", ",", "_detect_xgettext", "try", ":", "env", "[", "'XGETTEXT'", "]", "=", "_detect_xgettext", "(", "env", ")", "except", ":", "env", "[", "'XGETTEXT'", "]", "=", "'xgettext'", "# NOTE: sources=\"$SOURCES\" would work as well. However, we use following", "# construction to convert absolute paths provided by scons onto paths", "# relative to current working dir. Note, that scons expands $SOURCE(S) to", "# absolute paths for sources $SOURCE(s) outside of current subtree (e.g. in", "# \"../\"). With source=$SOURCE these absolute paths would be written to the", "# resultant *.pot file (and its derived *.po files) as references to lines in", "# source code (e.g. referring lines in *.c files). Such references would be", "# correct (e.g. in poedit) only on machine on which *.pot was generated and", "# would be of no use on other hosts (having a copy of source code located", "# in different place in filesystem).", "sources", "=", "'$( ${_concat( \"\", SOURCES, \"\", __env__, XgettextRPaths, TARGET'", "+", "', SOURCES)} $)'", "# NOTE: the output from $XGETTEXTCOM command must go to stdout, not to a file.", "# This is required by the POTUpdate builder's action.", "xgettextcom", "=", "'$XGETTEXT $XGETTEXTFLAGS $_XGETTEXTPATHFLAGS'", "+", "' $_XGETTEXTFROMFLAGS -o - '", "+", "sources", "xgettextpathflags", "=", "'$( ${_concat( XGETTEXTPATHPREFIX, XGETTEXTPATH'", "+", "', XGETTEXTPATHSUFFIX, __env__, RDirs, TARGET, SOURCES)} $)'", "xgettextfromflags", "=", "'$( ${_concat( XGETTEXTFROMPREFIX, XGETTEXTFROM'", "+", "', XGETTEXTFROMSUFFIX, __env__, target=TARGET, source=SOURCES)} $)'", "env", ".", "SetDefault", "(", "_XGETTEXTDOMAIN", "=", "'${TARGET.filebase}'", ",", "XGETTEXTFLAGS", "=", "[", "]", ",", "XGETTEXTCOM", "=", "xgettextcom", ",", "XGETTEXTCOMSTR", "=", "''", ",", "XGETTEXTPATH", "=", "[", "]", ",", "XGETTEXTPATHPREFIX", "=", "'-D'", ",", "XGETTEXTPATHSUFFIX", "=", "''", ",", "XGETTEXTFROM", "=", "None", ",", "XGETTEXTFROMPREFIX", "=", "'-f'", ",", "XGETTEXTFROMSUFFIX", "=", "''", ",", "_XGETTEXTPATHFLAGS", "=", "xgettextpathflags", ",", "_XGETTEXTFROMFLAGS", "=", "xgettextfromflags", ",", "POTSUFFIX", "=", "[", "'.pot'", "]", ",", "POTUPDATE_ALIAS", "=", "'pot-update'", ",", "XgettextRPaths", "=", "RPaths", "(", "env", ")", ")", "env", ".", "Append", "(", "BUILDERS", "=", "{", "'_POTUpdateBuilder'", ":", "_POTUpdateBuilder", "(", "env", ")", "}", ")", "env", ".", "AddMethod", "(", "_POTUpdateBuilderWrapper", ",", "'POTUpdate'", ")", "env", ".", "AlwaysBuild", "(", "env", ".", "Alias", "(", "'$POTUPDATE_ALIAS'", ")", ")" ]
Generate `xgettext` tool
[ "Generate", "xgettext", "tool" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L289-L342
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py
generate
def generate(env): """Add Builders and construction variables for gcc to an Environment.""" if 'CC' not in env: env['CC'] = env.Detect(compilers) or compilers[0] cc.generate(env) if env['PLATFORM'] in ['cygwin', 'win32']: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') else: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') # determine compiler version version = detect_version(env, env['CC']) if version: env['CCVERSION'] = version
python
def generate(env): """Add Builders and construction variables for gcc to an Environment.""" if 'CC' not in env: env['CC'] = env.Detect(compilers) or compilers[0] cc.generate(env) if env['PLATFORM'] in ['cygwin', 'win32']: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') else: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') # determine compiler version version = detect_version(env, env['CC']) if version: env['CCVERSION'] = version
[ "def", "generate", "(", "env", ")", ":", "if", "'CC'", "not", "in", "env", ":", "env", "[", "'CC'", "]", "=", "env", ".", "Detect", "(", "compilers", ")", "or", "compilers", "[", "0", "]", "cc", ".", "generate", "(", "env", ")", "if", "env", "[", "'PLATFORM'", "]", "in", "[", "'cygwin'", ",", "'win32'", "]", ":", "env", "[", "'SHCCFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'$CCFLAGS'", ")", "else", ":", "env", "[", "'SHCCFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'$CCFLAGS -fPIC'", ")", "# determine compiler version", "version", "=", "detect_version", "(", "env", ",", "env", "[", "'CC'", "]", ")", "if", "version", ":", "env", "[", "'CCVERSION'", "]", "=", "version" ]
Add Builders and construction variables for gcc to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "gcc", "to", "an", "Environment", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py#L45-L60
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py
detect_version
def detect_version(env, cc): """Return the version of the GNU compiler, or None if it is not a GNU compiler.""" cc = env.subst(cc) if not cc: return None version = None #pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'], pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'], stdin = 'devnull', stderr = 'devnull', stdout = subprocess.PIPE) # -dumpversion was added in GCC 3.0. As long as we're supporting # GCC versions older than that, we should use --version and a # regular expression. #line = pipe.stdout.read().strip() #if line: # version = line line = SCons.Util.to_str(pipe.stdout.readline()) match = re.search(r'[0-9]+(\.[0-9]+)+', line) if match: version = match.group(0) # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer: # So continue with reading to let the child process actually terminate. while SCons.Util.to_str(pipe.stdout.readline()): pass ret = pipe.wait() if ret != 0: return None return version
python
def detect_version(env, cc): """Return the version of the GNU compiler, or None if it is not a GNU compiler.""" cc = env.subst(cc) if not cc: return None version = None #pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'], pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'], stdin = 'devnull', stderr = 'devnull', stdout = subprocess.PIPE) # -dumpversion was added in GCC 3.0. As long as we're supporting # GCC versions older than that, we should use --version and a # regular expression. #line = pipe.stdout.read().strip() #if line: # version = line line = SCons.Util.to_str(pipe.stdout.readline()) match = re.search(r'[0-9]+(\.[0-9]+)+', line) if match: version = match.group(0) # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer: # So continue with reading to let the child process actually terminate. while SCons.Util.to_str(pipe.stdout.readline()): pass ret = pipe.wait() if ret != 0: return None return version
[ "def", "detect_version", "(", "env", ",", "cc", ")", ":", "cc", "=", "env", ".", "subst", "(", "cc", ")", "if", "not", "cc", ":", "return", "None", "version", "=", "None", "#pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],", "pipe", "=", "SCons", ".", "Action", ".", "_subproc", "(", "env", ",", "SCons", ".", "Util", ".", "CLVar", "(", "cc", ")", "+", "[", "'--version'", "]", ",", "stdin", "=", "'devnull'", ",", "stderr", "=", "'devnull'", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "# -dumpversion was added in GCC 3.0. As long as we're supporting", "# GCC versions older than that, we should use --version and a", "# regular expression.", "#line = pipe.stdout.read().strip()", "#if line:", "# version = line", "line", "=", "SCons", ".", "Util", ".", "to_str", "(", "pipe", ".", "stdout", ".", "readline", "(", ")", ")", "match", "=", "re", ".", "search", "(", "r'[0-9]+(\\.[0-9]+)+'", ",", "line", ")", "if", "match", ":", "version", "=", "match", ".", "group", "(", "0", ")", "# Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:", "# So continue with reading to let the child process actually terminate.", "while", "SCons", ".", "Util", ".", "to_str", "(", "pipe", ".", "stdout", ".", "readline", "(", ")", ")", ":", "pass", "ret", "=", "pipe", ".", "wait", "(", ")", "if", "ret", "!=", "0", ":", "return", "None", "return", "version" ]
Return the version of the GNU compiler, or None if it is not a GNU compiler.
[ "Return", "the", "version", "of", "the", "GNU", "compiler", "or", "None", "if", "it", "is", "not", "a", "GNU", "compiler", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py#L66-L94
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
is_dos_short_file_name
def is_dos_short_file_name(file): """ Examine if the given file is in the 8.3 form. """ fname, ext = os.path.splitext(file) proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot proper_fname = file.isupper() and len(fname) <= 8 return proper_ext and proper_fname
python
def is_dos_short_file_name(file): """ Examine if the given file is in the 8.3 form. """ fname, ext = os.path.splitext(file) proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot proper_fname = file.isupper() and len(fname) <= 8 return proper_ext and proper_fname
[ "def", "is_dos_short_file_name", "(", "file", ")", ":", "fname", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "file", ")", "proper_ext", "=", "len", "(", "ext", ")", "==", "0", "or", "(", "2", "<=", "len", "(", "ext", ")", "<=", "4", ")", "# the ext contains the dot", "proper_fname", "=", "file", ".", "isupper", "(", ")", "and", "len", "(", "fname", ")", "<=", "8", "return", "proper_ext", "and", "proper_fname" ]
Examine if the given file is in the 8.3 form.
[ "Examine", "if", "the", "given", "file", "is", "in", "the", "8", ".", "3", "form", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L81-L88
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
create_feature_dict
def create_feature_dict(files): """ X_MSI_FEATURE and doc FileTag's can be used to collect files in a hierarchy. This function collects the files into this hierarchy. """ dict = {} def add_to_dict( feature, file ): if not SCons.Util.is_List( feature ): feature = [ feature ] for f in feature: if f not in dict: dict[ f ] = [ file ] else: dict[ f ].append( file ) for file in files: if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ): add_to_dict(file.PACKAGING_X_MSI_FEATURE, file) elif hasattr( file, 'PACKAGING_DOC' ): add_to_dict( 'PACKAGING_DOC', file ) else: add_to_dict( 'default', file ) return dict
python
def create_feature_dict(files): """ X_MSI_FEATURE and doc FileTag's can be used to collect files in a hierarchy. This function collects the files into this hierarchy. """ dict = {} def add_to_dict( feature, file ): if not SCons.Util.is_List( feature ): feature = [ feature ] for f in feature: if f not in dict: dict[ f ] = [ file ] else: dict[ f ].append( file ) for file in files: if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ): add_to_dict(file.PACKAGING_X_MSI_FEATURE, file) elif hasattr( file, 'PACKAGING_DOC' ): add_to_dict( 'PACKAGING_DOC', file ) else: add_to_dict( 'default', file ) return dict
[ "def", "create_feature_dict", "(", "files", ")", ":", "dict", "=", "{", "}", "def", "add_to_dict", "(", "feature", ",", "file", ")", ":", "if", "not", "SCons", ".", "Util", ".", "is_List", "(", "feature", ")", ":", "feature", "=", "[", "feature", "]", "for", "f", "in", "feature", ":", "if", "f", "not", "in", "dict", ":", "dict", "[", "f", "]", "=", "[", "file", "]", "else", ":", "dict", "[", "f", "]", ".", "append", "(", "file", ")", "for", "file", "in", "files", ":", "if", "hasattr", "(", "file", ",", "'PACKAGING_X_MSI_FEATURE'", ")", ":", "add_to_dict", "(", "file", ".", "PACKAGING_X_MSI_FEATURE", ",", "file", ")", "elif", "hasattr", "(", "file", ",", "'PACKAGING_DOC'", ")", ":", "add_to_dict", "(", "'PACKAGING_DOC'", ",", "file", ")", "else", ":", "add_to_dict", "(", "'default'", ",", "file", ")", "return", "dict" ]
X_MSI_FEATURE and doc FileTag's can be used to collect files in a hierarchy. This function collects the files into this hierarchy.
[ "X_MSI_FEATURE", "and", "doc", "FileTag", "s", "can", "be", "used", "to", "collect", "files", "in", "a", "hierarchy", ".", "This", "function", "collects", "the", "files", "into", "this", "hierarchy", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L128-L152
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
generate_guids
def generate_guids(root): """ generates globally unique identifiers for parts of the xml which need them. Component tags have a special requirement. Their UUID is only allowed to change if the list of their contained resources has changed. This allows for clean removal and proper updates. To handle this requirement, the uuid is generated with an md5 hashing the whole subtree of a xml node. """ from hashlib import md5 # specify which tags need a guid and in which attribute this should be stored. needs_id = { 'Product' : 'Id', 'Package' : 'Id', 'Component' : 'Guid', } # find all XMl nodes matching the key, retrieve their attribute, hash their # subtree, convert hash to string and add as a attribute to the xml node. for (key,value) in needs_id.items(): node_list = root.getElementsByTagName(key) attribute = value for node in node_list: hash = md5(node.toxml()).hexdigest() hash_str = '%s-%s-%s-%s-%s' % ( hash[:8], hash[8:12], hash[12:16], hash[16:20], hash[20:] ) node.attributes[attribute] = hash_str
python
def generate_guids(root): """ generates globally unique identifiers for parts of the xml which need them. Component tags have a special requirement. Their UUID is only allowed to change if the list of their contained resources has changed. This allows for clean removal and proper updates. To handle this requirement, the uuid is generated with an md5 hashing the whole subtree of a xml node. """ from hashlib import md5 # specify which tags need a guid and in which attribute this should be stored. needs_id = { 'Product' : 'Id', 'Package' : 'Id', 'Component' : 'Guid', } # find all XMl nodes matching the key, retrieve their attribute, hash their # subtree, convert hash to string and add as a attribute to the xml node. for (key,value) in needs_id.items(): node_list = root.getElementsByTagName(key) attribute = value for node in node_list: hash = md5(node.toxml()).hexdigest() hash_str = '%s-%s-%s-%s-%s' % ( hash[:8], hash[8:12], hash[12:16], hash[16:20], hash[20:] ) node.attributes[attribute] = hash_str
[ "def", "generate_guids", "(", "root", ")", ":", "from", "hashlib", "import", "md5", "# specify which tags need a guid and in which attribute this should be stored.", "needs_id", "=", "{", "'Product'", ":", "'Id'", ",", "'Package'", ":", "'Id'", ",", "'Component'", ":", "'Guid'", ",", "}", "# find all XMl nodes matching the key, retrieve their attribute, hash their", "# subtree, convert hash to string and add as a attribute to the xml node.", "for", "(", "key", ",", "value", ")", "in", "needs_id", ".", "items", "(", ")", ":", "node_list", "=", "root", ".", "getElementsByTagName", "(", "key", ")", "attribute", "=", "value", "for", "node", "in", "node_list", ":", "hash", "=", "md5", "(", "node", ".", "toxml", "(", ")", ")", ".", "hexdigest", "(", ")", "hash_str", "=", "'%s-%s-%s-%s-%s'", "%", "(", "hash", "[", ":", "8", "]", ",", "hash", "[", "8", ":", "12", "]", ",", "hash", "[", "12", ":", "16", "]", ",", "hash", "[", "16", ":", "20", "]", ",", "hash", "[", "20", ":", "]", ")", "node", ".", "attributes", "[", "attribute", "]", "=", "hash_str" ]
generates globally unique identifiers for parts of the xml which need them. Component tags have a special requirement. Their UUID is only allowed to change if the list of their contained resources has changed. This allows for clean removal and proper updates. To handle this requirement, the uuid is generated with an md5 hashing the whole subtree of a xml node.
[ "generates", "globally", "unique", "identifiers", "for", "parts", "of", "the", "xml", "which", "need", "them", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L154-L181
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
create_default_directory_layout
def create_default_directory_layout(root, NAME, VERSION, VENDOR, filename_set): """ Create the wix default target directory layout and return the innermost directory. We assume that the XML tree delivered in the root argument already contains the Product tag. Everything is put under the PFiles directory property defined by WiX. After that a directory with the 'VENDOR' tag is placed and then a directory with the name of the project and its VERSION. This leads to the following TARGET Directory Layout: C:\<PFiles>\<Vendor>\<Projectname-Version>\ Example: C:\Programme\Company\Product-1.2\ """ doc = Document() d1 = doc.createElement( 'Directory' ) d1.attributes['Id'] = 'TARGETDIR' d1.attributes['Name'] = 'SourceDir' d2 = doc.createElement( 'Directory' ) d2.attributes['Id'] = 'ProgramFilesFolder' d2.attributes['Name'] = 'PFiles' d3 = doc.createElement( 'Directory' ) d3.attributes['Id'] = 'VENDOR_folder' d3.attributes['Name'] = escape( gen_dos_short_file_name( VENDOR, filename_set ) ) d3.attributes['LongName'] = escape( VENDOR ) d4 = doc.createElement( 'Directory' ) project_folder = "%s-%s" % ( NAME, VERSION ) d4.attributes['Id'] = 'MY_DEFAULT_FOLDER' d4.attributes['Name'] = escape( gen_dos_short_file_name( project_folder, filename_set ) ) d4.attributes['LongName'] = escape( project_folder ) d1.childNodes.append( d2 ) d2.childNodes.append( d3 ) d3.childNodes.append( d4 ) root.getElementsByTagName('Product')[0].childNodes.append( d1 ) return d4
python
def create_default_directory_layout(root, NAME, VERSION, VENDOR, filename_set): """ Create the wix default target directory layout and return the innermost directory. We assume that the XML tree delivered in the root argument already contains the Product tag. Everything is put under the PFiles directory property defined by WiX. After that a directory with the 'VENDOR' tag is placed and then a directory with the name of the project and its VERSION. This leads to the following TARGET Directory Layout: C:\<PFiles>\<Vendor>\<Projectname-Version>\ Example: C:\Programme\Company\Product-1.2\ """ doc = Document() d1 = doc.createElement( 'Directory' ) d1.attributes['Id'] = 'TARGETDIR' d1.attributes['Name'] = 'SourceDir' d2 = doc.createElement( 'Directory' ) d2.attributes['Id'] = 'ProgramFilesFolder' d2.attributes['Name'] = 'PFiles' d3 = doc.createElement( 'Directory' ) d3.attributes['Id'] = 'VENDOR_folder' d3.attributes['Name'] = escape( gen_dos_short_file_name( VENDOR, filename_set ) ) d3.attributes['LongName'] = escape( VENDOR ) d4 = doc.createElement( 'Directory' ) project_folder = "%s-%s" % ( NAME, VERSION ) d4.attributes['Id'] = 'MY_DEFAULT_FOLDER' d4.attributes['Name'] = escape( gen_dos_short_file_name( project_folder, filename_set ) ) d4.attributes['LongName'] = escape( project_folder ) d1.childNodes.append( d2 ) d2.childNodes.append( d3 ) d3.childNodes.append( d4 ) root.getElementsByTagName('Product')[0].childNodes.append( d1 ) return d4
[ "def", "create_default_directory_layout", "(", "root", ",", "NAME", ",", "VERSION", ",", "VENDOR", ",", "filename_set", ")", ":", "doc", "=", "Document", "(", ")", "d1", "=", "doc", ".", "createElement", "(", "'Directory'", ")", "d1", ".", "attributes", "[", "'Id'", "]", "=", "'TARGETDIR'", "d1", ".", "attributes", "[", "'Name'", "]", "=", "'SourceDir'", "d2", "=", "doc", ".", "createElement", "(", "'Directory'", ")", "d2", ".", "attributes", "[", "'Id'", "]", "=", "'ProgramFilesFolder'", "d2", ".", "attributes", "[", "'Name'", "]", "=", "'PFiles'", "d3", "=", "doc", ".", "createElement", "(", "'Directory'", ")", "d3", ".", "attributes", "[", "'Id'", "]", "=", "'VENDOR_folder'", "d3", ".", "attributes", "[", "'Name'", "]", "=", "escape", "(", "gen_dos_short_file_name", "(", "VENDOR", ",", "filename_set", ")", ")", "d3", ".", "attributes", "[", "'LongName'", "]", "=", "escape", "(", "VENDOR", ")", "d4", "=", "doc", ".", "createElement", "(", "'Directory'", ")", "project_folder", "=", "\"%s-%s\"", "%", "(", "NAME", ",", "VERSION", ")", "d4", ".", "attributes", "[", "'Id'", "]", "=", "'MY_DEFAULT_FOLDER'", "d4", ".", "attributes", "[", "'Name'", "]", "=", "escape", "(", "gen_dos_short_file_name", "(", "project_folder", ",", "filename_set", ")", ")", "d4", ".", "attributes", "[", "'LongName'", "]", "=", "escape", "(", "project_folder", ")", "d1", ".", "childNodes", ".", "append", "(", "d2", ")", "d2", ".", "childNodes", ".", "append", "(", "d3", ")", "d3", ".", "childNodes", ".", "append", "(", "d4", ")", "root", ".", "getElementsByTagName", "(", "'Product'", ")", "[", "0", "]", ".", "childNodes", ".", "append", "(", "d1", ")", "return", "d4" ]
Create the wix default target directory layout and return the innermost directory. We assume that the XML tree delivered in the root argument already contains the Product tag. Everything is put under the PFiles directory property defined by WiX. After that a directory with the 'VENDOR' tag is placed and then a directory with the name of the project and its VERSION. This leads to the following TARGET Directory Layout: C:\<PFiles>\<Vendor>\<Projectname-Version>\ Example: C:\Programme\Company\Product-1.2\
[ "Create", "the", "wix", "default", "target", "directory", "layout", "and", "return", "the", "innermost", "directory", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L225-L265
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
build_wxsfile_file_section
def build_wxsfile_file_section(root, files, NAME, VERSION, VENDOR, filename_set, id_set): """ Builds the Component sections of the wxs file with their included files. Files need to be specified in 8.3 format and in the long name format, long filenames will be converted automatically. Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag. """ root = create_default_directory_layout( root, NAME, VERSION, VENDOR, filename_set ) components = create_feature_dict( files ) factory = Document() def get_directory( node, dir ): """ Returns the node under the given node representing the directory. Returns the component node if dir is None or empty. """ if dir == '' or not dir: return node Directory = node dir_parts = dir.split(os.path.sep) # to make sure that our directory ids are unique, the parent folders are # consecutively added to upper_dir upper_dir = '' # walk down the xml tree finding parts of the directory dir_parts = [d for d in dir_parts if d != ''] for d in dir_parts[:]: already_created = [c for c in Directory.childNodes if c.nodeName == 'Directory' and c.attributes['LongName'].value == escape(d)] if already_created != []: Directory = already_created[0] dir_parts.remove(d) upper_dir += d else: break for d in dir_parts: nDirectory = factory.createElement( 'Directory' ) nDirectory.attributes['LongName'] = escape( d ) nDirectory.attributes['Name'] = escape( gen_dos_short_file_name( d, filename_set ) ) upper_dir += d nDirectory.attributes['Id'] = convert_to_id( upper_dir, id_set ) Directory.childNodes.append( nDirectory ) Directory = nDirectory return Directory for file in files: drive, path = os.path.splitdrive( file.PACKAGING_INSTALL_LOCATION ) filename = os.path.basename( path ) dirname = os.path.dirname( path ) h = { # tagname : default value 'PACKAGING_X_MSI_VITAL' : 'yes', 'PACKAGING_X_MSI_FILEID' : convert_to_id(filename, id_set), 'PACKAGING_X_MSI_LONGNAME' : filename, 'PACKAGING_X_MSI_SHORTNAME' : gen_dos_short_file_name(filename, filename_set), 'PACKAGING_X_MSI_SOURCE' : file.get_path(), } # fill in the default tags given above. for k,v in [ (k, v) for (k,v) in h.items() if not hasattr(file, k) ]: setattr( file, k, v ) File = factory.createElement( 'File' ) File.attributes['LongName'] = escape( file.PACKAGING_X_MSI_LONGNAME ) File.attributes['Name'] = escape( file.PACKAGING_X_MSI_SHORTNAME ) File.attributes['Source'] = escape( file.PACKAGING_X_MSI_SOURCE ) File.attributes['Id'] = escape( file.PACKAGING_X_MSI_FILEID ) File.attributes['Vital'] = escape( file.PACKAGING_X_MSI_VITAL ) # create the <Component> Tag under which this file should appear Component = factory.createElement('Component') Component.attributes['DiskId'] = '1' Component.attributes['Id'] = convert_to_id( filename, id_set ) # hang the component node under the root node and the file node # under the component node. Directory = get_directory( root, dirname ) Directory.childNodes.append( Component ) Component.childNodes.append( File )
python
def build_wxsfile_file_section(root, files, NAME, VERSION, VENDOR, filename_set, id_set): """ Builds the Component sections of the wxs file with their included files. Files need to be specified in 8.3 format and in the long name format, long filenames will be converted automatically. Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag. """ root = create_default_directory_layout( root, NAME, VERSION, VENDOR, filename_set ) components = create_feature_dict( files ) factory = Document() def get_directory( node, dir ): """ Returns the node under the given node representing the directory. Returns the component node if dir is None or empty. """ if dir == '' or not dir: return node Directory = node dir_parts = dir.split(os.path.sep) # to make sure that our directory ids are unique, the parent folders are # consecutively added to upper_dir upper_dir = '' # walk down the xml tree finding parts of the directory dir_parts = [d for d in dir_parts if d != ''] for d in dir_parts[:]: already_created = [c for c in Directory.childNodes if c.nodeName == 'Directory' and c.attributes['LongName'].value == escape(d)] if already_created != []: Directory = already_created[0] dir_parts.remove(d) upper_dir += d else: break for d in dir_parts: nDirectory = factory.createElement( 'Directory' ) nDirectory.attributes['LongName'] = escape( d ) nDirectory.attributes['Name'] = escape( gen_dos_short_file_name( d, filename_set ) ) upper_dir += d nDirectory.attributes['Id'] = convert_to_id( upper_dir, id_set ) Directory.childNodes.append( nDirectory ) Directory = nDirectory return Directory for file in files: drive, path = os.path.splitdrive( file.PACKAGING_INSTALL_LOCATION ) filename = os.path.basename( path ) dirname = os.path.dirname( path ) h = { # tagname : default value 'PACKAGING_X_MSI_VITAL' : 'yes', 'PACKAGING_X_MSI_FILEID' : convert_to_id(filename, id_set), 'PACKAGING_X_MSI_LONGNAME' : filename, 'PACKAGING_X_MSI_SHORTNAME' : gen_dos_short_file_name(filename, filename_set), 'PACKAGING_X_MSI_SOURCE' : file.get_path(), } # fill in the default tags given above. for k,v in [ (k, v) for (k,v) in h.items() if not hasattr(file, k) ]: setattr( file, k, v ) File = factory.createElement( 'File' ) File.attributes['LongName'] = escape( file.PACKAGING_X_MSI_LONGNAME ) File.attributes['Name'] = escape( file.PACKAGING_X_MSI_SHORTNAME ) File.attributes['Source'] = escape( file.PACKAGING_X_MSI_SOURCE ) File.attributes['Id'] = escape( file.PACKAGING_X_MSI_FILEID ) File.attributes['Vital'] = escape( file.PACKAGING_X_MSI_VITAL ) # create the <Component> Tag under which this file should appear Component = factory.createElement('Component') Component.attributes['DiskId'] = '1' Component.attributes['Id'] = convert_to_id( filename, id_set ) # hang the component node under the root node and the file node # under the component node. Directory = get_directory( root, dirname ) Directory.childNodes.append( Component ) Component.childNodes.append( File )
[ "def", "build_wxsfile_file_section", "(", "root", ",", "files", ",", "NAME", ",", "VERSION", ",", "VENDOR", ",", "filename_set", ",", "id_set", ")", ":", "root", "=", "create_default_directory_layout", "(", "root", ",", "NAME", ",", "VERSION", ",", "VENDOR", ",", "filename_set", ")", "components", "=", "create_feature_dict", "(", "files", ")", "factory", "=", "Document", "(", ")", "def", "get_directory", "(", "node", ",", "dir", ")", ":", "\"\"\" Returns the node under the given node representing the directory.\n\n Returns the component node if dir is None or empty.\n \"\"\"", "if", "dir", "==", "''", "or", "not", "dir", ":", "return", "node", "Directory", "=", "node", "dir_parts", "=", "dir", ".", "split", "(", "os", ".", "path", ".", "sep", ")", "# to make sure that our directory ids are unique, the parent folders are", "# consecutively added to upper_dir", "upper_dir", "=", "''", "# walk down the xml tree finding parts of the directory", "dir_parts", "=", "[", "d", "for", "d", "in", "dir_parts", "if", "d", "!=", "''", "]", "for", "d", "in", "dir_parts", "[", ":", "]", ":", "already_created", "=", "[", "c", "for", "c", "in", "Directory", ".", "childNodes", "if", "c", ".", "nodeName", "==", "'Directory'", "and", "c", ".", "attributes", "[", "'LongName'", "]", ".", "value", "==", "escape", "(", "d", ")", "]", "if", "already_created", "!=", "[", "]", ":", "Directory", "=", "already_created", "[", "0", "]", "dir_parts", ".", "remove", "(", "d", ")", "upper_dir", "+=", "d", "else", ":", "break", "for", "d", "in", "dir_parts", ":", "nDirectory", "=", "factory", ".", "createElement", "(", "'Directory'", ")", "nDirectory", ".", "attributes", "[", "'LongName'", "]", "=", "escape", "(", "d", ")", "nDirectory", ".", "attributes", "[", "'Name'", "]", "=", "escape", "(", "gen_dos_short_file_name", "(", "d", ",", "filename_set", ")", ")", "upper_dir", "+=", "d", "nDirectory", ".", "attributes", "[", "'Id'", "]", "=", "convert_to_id", "(", "upper_dir", ",", "id_set", ")", "Directory", ".", "childNodes", ".", "append", "(", "nDirectory", ")", "Directory", "=", "nDirectory", "return", "Directory", "for", "file", "in", "files", ":", "drive", ",", "path", "=", "os", ".", "path", ".", "splitdrive", "(", "file", ".", "PACKAGING_INSTALL_LOCATION", ")", "filename", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "h", "=", "{", "# tagname : default value", "'PACKAGING_X_MSI_VITAL'", ":", "'yes'", ",", "'PACKAGING_X_MSI_FILEID'", ":", "convert_to_id", "(", "filename", ",", "id_set", ")", ",", "'PACKAGING_X_MSI_LONGNAME'", ":", "filename", ",", "'PACKAGING_X_MSI_SHORTNAME'", ":", "gen_dos_short_file_name", "(", "filename", ",", "filename_set", ")", ",", "'PACKAGING_X_MSI_SOURCE'", ":", "file", ".", "get_path", "(", ")", ",", "}", "# fill in the default tags given above.", "for", "k", ",", "v", "in", "[", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "h", ".", "items", "(", ")", "if", "not", "hasattr", "(", "file", ",", "k", ")", "]", ":", "setattr", "(", "file", ",", "k", ",", "v", ")", "File", "=", "factory", ".", "createElement", "(", "'File'", ")", "File", ".", "attributes", "[", "'LongName'", "]", "=", "escape", "(", "file", ".", "PACKAGING_X_MSI_LONGNAME", ")", "File", ".", "attributes", "[", "'Name'", "]", "=", "escape", "(", "file", ".", "PACKAGING_X_MSI_SHORTNAME", ")", "File", ".", "attributes", "[", "'Source'", "]", "=", "escape", "(", "file", ".", "PACKAGING_X_MSI_SOURCE", ")", "File", ".", "attributes", "[", "'Id'", "]", "=", "escape", "(", "file", ".", "PACKAGING_X_MSI_FILEID", ")", "File", ".", "attributes", "[", "'Vital'", "]", "=", "escape", "(", "file", ".", "PACKAGING_X_MSI_VITAL", ")", "# create the <Component> Tag under which this file should appear", "Component", "=", "factory", ".", "createElement", "(", "'Component'", ")", "Component", ".", "attributes", "[", "'DiskId'", "]", "=", "'1'", "Component", ".", "attributes", "[", "'Id'", "]", "=", "convert_to_id", "(", "filename", ",", "id_set", ")", "# hang the component node under the root node and the file node", "# under the component node.", "Directory", "=", "get_directory", "(", "root", ",", "dirname", ")", "Directory", ".", "childNodes", ".", "append", "(", "Component", ")", "Component", ".", "childNodes", ".", "append", "(", "File", ")" ]
Builds the Component sections of the wxs file with their included files. Files need to be specified in 8.3 format and in the long name format, long filenames will be converted automatically. Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag.
[ "Builds", "the", "Component", "sections", "of", "the", "wxs", "file", "with", "their", "included", "files", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L270-L357
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
build_wxsfile_default_gui
def build_wxsfile_default_gui(root): """ This function adds a default GUI to the wxs file """ factory = Document() Product = root.getElementsByTagName('Product')[0] UIRef = factory.createElement('UIRef') UIRef.attributes['Id'] = 'WixUI_Mondo' Product.childNodes.append(UIRef) UIRef = factory.createElement('UIRef') UIRef.attributes['Id'] = 'WixUI_ErrorProgressText' Product.childNodes.append(UIRef)
python
def build_wxsfile_default_gui(root): """ This function adds a default GUI to the wxs file """ factory = Document() Product = root.getElementsByTagName('Product')[0] UIRef = factory.createElement('UIRef') UIRef.attributes['Id'] = 'WixUI_Mondo' Product.childNodes.append(UIRef) UIRef = factory.createElement('UIRef') UIRef.attributes['Id'] = 'WixUI_ErrorProgressText' Product.childNodes.append(UIRef)
[ "def", "build_wxsfile_default_gui", "(", "root", ")", ":", "factory", "=", "Document", "(", ")", "Product", "=", "root", ".", "getElementsByTagName", "(", "'Product'", ")", "[", "0", "]", "UIRef", "=", "factory", ".", "createElement", "(", "'UIRef'", ")", "UIRef", ".", "attributes", "[", "'Id'", "]", "=", "'WixUI_Mondo'", "Product", ".", "childNodes", ".", "append", "(", "UIRef", ")", "UIRef", "=", "factory", ".", "createElement", "(", "'UIRef'", ")", "UIRef", ".", "attributes", "[", "'Id'", "]", "=", "'WixUI_ErrorProgressText'", "Product", ".", "childNodes", ".", "append", "(", "UIRef", ")" ]
This function adds a default GUI to the wxs file
[ "This", "function", "adds", "a", "default", "GUI", "to", "the", "wxs", "file" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L417-L429
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
build_license_file
def build_license_file(directory, spec): """ Creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT" in the given directory """ name, text = '', '' try: name = spec['LICENSE'] text = spec['X_MSI_LICENSE_TEXT'] except KeyError: pass # ignore this as X_MSI_LICENSE_TEXT is optional if name!='' or text!='': file = open( os.path.join(directory.get_path(), 'License.rtf'), 'w' ) file.write('{\\rtf') if text!='': file.write(text.replace('\n', '\\par ')) else: file.write(name+'\\par\\par') file.write('}') file.close()
python
def build_license_file(directory, spec): """ Creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT" in the given directory """ name, text = '', '' try: name = spec['LICENSE'] text = spec['X_MSI_LICENSE_TEXT'] except KeyError: pass # ignore this as X_MSI_LICENSE_TEXT is optional if name!='' or text!='': file = open( os.path.join(directory.get_path(), 'License.rtf'), 'w' ) file.write('{\\rtf') if text!='': file.write(text.replace('\n', '\\par ')) else: file.write(name+'\\par\\par') file.write('}') file.close()
[ "def", "build_license_file", "(", "directory", ",", "spec", ")", ":", "name", ",", "text", "=", "''", ",", "''", "try", ":", "name", "=", "spec", "[", "'LICENSE'", "]", "text", "=", "spec", "[", "'X_MSI_LICENSE_TEXT'", "]", "except", "KeyError", ":", "pass", "# ignore this as X_MSI_LICENSE_TEXT is optional", "if", "name", "!=", "''", "or", "text", "!=", "''", ":", "file", "=", "open", "(", "os", ".", "path", ".", "join", "(", "directory", ".", "get_path", "(", ")", ",", "'License.rtf'", ")", ",", "'w'", ")", "file", ".", "write", "(", "'{\\\\rtf'", ")", "if", "text", "!=", "''", ":", "file", ".", "write", "(", "text", ".", "replace", "(", "'\\n'", ",", "'\\\\par '", ")", ")", "else", ":", "file", ".", "write", "(", "name", "+", "'\\\\par\\\\par'", ")", "file", ".", "write", "(", "'}'", ")", "file", ".", "close", "(", ")" ]
Creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT" in the given directory
[ "Creates", "a", "License", ".", "rtf", "file", "with", "the", "content", "of", "X_MSI_LICENSE_TEXT", "in", "the", "given", "directory" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L431-L451
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py
build_wxsfile_header_section
def build_wxsfile_header_section(root, spec): """ Adds the xml file node which define the package meta-data. """ # Create the needed DOM nodes and add them at the correct position in the tree. factory = Document() Product = factory.createElement( 'Product' ) Package = factory.createElement( 'Package' ) root.childNodes.append( Product ) Product.childNodes.append( Package ) # set "mandatory" default values if 'X_MSI_LANGUAGE' not in spec: spec['X_MSI_LANGUAGE'] = '1033' # select english # mandatory sections, will throw a KeyError if the tag is not available Product.attributes['Name'] = escape( spec['NAME'] ) Product.attributes['Version'] = escape( spec['VERSION'] ) Product.attributes['Manufacturer'] = escape( spec['VENDOR'] ) Product.attributes['Language'] = escape( spec['X_MSI_LANGUAGE'] ) Package.attributes['Description'] = escape( spec['SUMMARY'] ) # now the optional tags, for which we avoid the KeyErrror exception if 'DESCRIPTION' in spec: Package.attributes['Comments'] = escape( spec['DESCRIPTION'] ) if 'X_MSI_UPGRADE_CODE' in spec: Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] ) # We hardcode the media tag as our current model cannot handle it. Media = factory.createElement('Media') Media.attributes['Id'] = '1' Media.attributes['Cabinet'] = 'default.cab' Media.attributes['EmbedCab'] = 'yes' root.getElementsByTagName('Product')[0].childNodes.append(Media)
python
def build_wxsfile_header_section(root, spec): """ Adds the xml file node which define the package meta-data. """ # Create the needed DOM nodes and add them at the correct position in the tree. factory = Document() Product = factory.createElement( 'Product' ) Package = factory.createElement( 'Package' ) root.childNodes.append( Product ) Product.childNodes.append( Package ) # set "mandatory" default values if 'X_MSI_LANGUAGE' not in spec: spec['X_MSI_LANGUAGE'] = '1033' # select english # mandatory sections, will throw a KeyError if the tag is not available Product.attributes['Name'] = escape( spec['NAME'] ) Product.attributes['Version'] = escape( spec['VERSION'] ) Product.attributes['Manufacturer'] = escape( spec['VENDOR'] ) Product.attributes['Language'] = escape( spec['X_MSI_LANGUAGE'] ) Package.attributes['Description'] = escape( spec['SUMMARY'] ) # now the optional tags, for which we avoid the KeyErrror exception if 'DESCRIPTION' in spec: Package.attributes['Comments'] = escape( spec['DESCRIPTION'] ) if 'X_MSI_UPGRADE_CODE' in spec: Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] ) # We hardcode the media tag as our current model cannot handle it. Media = factory.createElement('Media') Media.attributes['Id'] = '1' Media.attributes['Cabinet'] = 'default.cab' Media.attributes['EmbedCab'] = 'yes' root.getElementsByTagName('Product')[0].childNodes.append(Media)
[ "def", "build_wxsfile_header_section", "(", "root", ",", "spec", ")", ":", "# Create the needed DOM nodes and add them at the correct position in the tree.", "factory", "=", "Document", "(", ")", "Product", "=", "factory", ".", "createElement", "(", "'Product'", ")", "Package", "=", "factory", ".", "createElement", "(", "'Package'", ")", "root", ".", "childNodes", ".", "append", "(", "Product", ")", "Product", ".", "childNodes", ".", "append", "(", "Package", ")", "# set \"mandatory\" default values", "if", "'X_MSI_LANGUAGE'", "not", "in", "spec", ":", "spec", "[", "'X_MSI_LANGUAGE'", "]", "=", "'1033'", "# select english", "# mandatory sections, will throw a KeyError if the tag is not available", "Product", ".", "attributes", "[", "'Name'", "]", "=", "escape", "(", "spec", "[", "'NAME'", "]", ")", "Product", ".", "attributes", "[", "'Version'", "]", "=", "escape", "(", "spec", "[", "'VERSION'", "]", ")", "Product", ".", "attributes", "[", "'Manufacturer'", "]", "=", "escape", "(", "spec", "[", "'VENDOR'", "]", ")", "Product", ".", "attributes", "[", "'Language'", "]", "=", "escape", "(", "spec", "[", "'X_MSI_LANGUAGE'", "]", ")", "Package", ".", "attributes", "[", "'Description'", "]", "=", "escape", "(", "spec", "[", "'SUMMARY'", "]", ")", "# now the optional tags, for which we avoid the KeyErrror exception", "if", "'DESCRIPTION'", "in", "spec", ":", "Package", ".", "attributes", "[", "'Comments'", "]", "=", "escape", "(", "spec", "[", "'DESCRIPTION'", "]", ")", "if", "'X_MSI_UPGRADE_CODE'", "in", "spec", ":", "Package", ".", "attributes", "[", "'X_MSI_UPGRADE_CODE'", "]", "=", "escape", "(", "spec", "[", "'X_MSI_UPGRADE_CODE'", "]", ")", "# We hardcode the media tag as our current model cannot handle it.", "Media", "=", "factory", ".", "createElement", "(", "'Media'", ")", "Media", ".", "attributes", "[", "'Id'", "]", "=", "'1'", "Media", ".", "attributes", "[", "'Cabinet'", "]", "=", "'default.cab'", "Media", ".", "attributes", "[", "'EmbedCab'", "]", "=", "'yes'", "root", ".", "getElementsByTagName", "(", "'Product'", ")", "[", "0", "]", ".", "childNodes", ".", "append", "(", "Media", ")" ]
Adds the xml file node which define the package meta-data.
[ "Adds", "the", "xml", "file", "node", "which", "define", "the", "package", "meta", "-", "data", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L456-L490
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/suncxx.py
generate
def generate(env): """Add Builders and construction variables for SunPRO C++.""" path, cxx, shcxx, version = get_cppc(env) if path: cxx = os.path.join(path, cxx) shcxx = os.path.join(path, shcxx) cplusplus.generate(env) env['CXX'] = cxx env['SHCXX'] = shcxx env['CXXVERSION'] = version env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -KPIC') env['SHOBJPREFIX'] = 'so_' env['SHOBJSUFFIX'] = '.o'
python
def generate(env): """Add Builders and construction variables for SunPRO C++.""" path, cxx, shcxx, version = get_cppc(env) if path: cxx = os.path.join(path, cxx) shcxx = os.path.join(path, shcxx) cplusplus.generate(env) env['CXX'] = cxx env['SHCXX'] = shcxx env['CXXVERSION'] = version env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -KPIC') env['SHOBJPREFIX'] = 'so_' env['SHOBJSUFFIX'] = '.o'
[ "def", "generate", "(", "env", ")", ":", "path", ",", "cxx", ",", "shcxx", ",", "version", "=", "get_cppc", "(", "env", ")", "if", "path", ":", "cxx", "=", "os", ".", "path", ".", "join", "(", "path", ",", "cxx", ")", "shcxx", "=", "os", ".", "path", ".", "join", "(", "path", ",", "shcxx", ")", "cplusplus", ".", "generate", "(", "env", ")", "env", "[", "'CXX'", "]", "=", "cxx", "env", "[", "'SHCXX'", "]", "=", "shcxx", "env", "[", "'CXXVERSION'", "]", "=", "version", "env", "[", "'SHCXXFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'$CXXFLAGS -KPIC'", ")", "env", "[", "'SHOBJPREFIX'", "]", "=", "'so_'", "env", "[", "'SHOBJSUFFIX'", "]", "=", "'.o'" ]
Add Builders and construction variables for SunPRO C++.
[ "Add", "Builders", "and", "construction", "variables", "for", "SunPRO", "C", "++", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/suncxx.py#L116-L130
train
iotile/coretools
iotilecore/iotile/core/hw/reports/flexible_dictionary.py
FlexibleDictionaryReport.FromReadings
def FromReadings(cls, uuid, readings, events, report_id=IOTileReading.InvalidReadingID, selector=0xFFFF, streamer=0x100, sent_timestamp=0, received_time=None): """Create a flexible dictionary report from a list of readings and events. Args: uuid (int): The uuid of the device that this report came from readings (list of IOTileReading): A list of IOTileReading objects containing the data in the report events (list of IOTileEvent): A list of the events contained in the report. report_id (int): The id of the report. If not provided it defaults to IOTileReading.InvalidReadingID. Note that you can specify anything you want for the report id but for actual IOTile devices the report id will always be greater than the id of all of the readings contained in the report since devices generate ids sequentially. selector (int): The streamer selector of this report. This can be anything but if the report came from a device, it would correspond with the query the device used to pick readings to go into the report. streamer (int): The streamer id that this reading was sent from. sent_timestamp (int): The device's uptime that sent this report. received_time(datetime): The UTC time when this report was received from an IOTile device. If it is being created now, received_time defaults to datetime.utcnow(). Returns: FlexibleDictionaryReport: A report containing the readings and events passed in. """ lowest_id = IOTileReading.InvalidReadingID highest_id = IOTileReading.InvalidReadingID for item in itertools.chain(iter(readings), iter(events)): if item.reading_id == IOTileReading.InvalidReadingID: continue if lowest_id == IOTileReading.InvalidReadingID or item.reading_id < lowest_id: lowest_id = item.reading_id if highest_id == IOTileReading.InvalidReadingID or item.reading_id > highest_id: highest_id = item.reading_id reading_list = [x.asdict() for x in readings] event_list = [x.asdict() for x in events] report_dict = { "format": cls.FORMAT_TAG, "device": uuid, "streamer_index": streamer, "streamer_selector": selector, "incremental_id": report_id, "lowest_id": lowest_id, "highest_id": highest_id, "device_sent_timestamp": sent_timestamp, "events": event_list, "data": reading_list } encoded = msgpack.packb(report_dict, default=_encode_datetime, use_bin_type=True) return FlexibleDictionaryReport(encoded, signed=False, encrypted=False, received_time=received_time)
python
def FromReadings(cls, uuid, readings, events, report_id=IOTileReading.InvalidReadingID, selector=0xFFFF, streamer=0x100, sent_timestamp=0, received_time=None): """Create a flexible dictionary report from a list of readings and events. Args: uuid (int): The uuid of the device that this report came from readings (list of IOTileReading): A list of IOTileReading objects containing the data in the report events (list of IOTileEvent): A list of the events contained in the report. report_id (int): The id of the report. If not provided it defaults to IOTileReading.InvalidReadingID. Note that you can specify anything you want for the report id but for actual IOTile devices the report id will always be greater than the id of all of the readings contained in the report since devices generate ids sequentially. selector (int): The streamer selector of this report. This can be anything but if the report came from a device, it would correspond with the query the device used to pick readings to go into the report. streamer (int): The streamer id that this reading was sent from. sent_timestamp (int): The device's uptime that sent this report. received_time(datetime): The UTC time when this report was received from an IOTile device. If it is being created now, received_time defaults to datetime.utcnow(). Returns: FlexibleDictionaryReport: A report containing the readings and events passed in. """ lowest_id = IOTileReading.InvalidReadingID highest_id = IOTileReading.InvalidReadingID for item in itertools.chain(iter(readings), iter(events)): if item.reading_id == IOTileReading.InvalidReadingID: continue if lowest_id == IOTileReading.InvalidReadingID or item.reading_id < lowest_id: lowest_id = item.reading_id if highest_id == IOTileReading.InvalidReadingID or item.reading_id > highest_id: highest_id = item.reading_id reading_list = [x.asdict() for x in readings] event_list = [x.asdict() for x in events] report_dict = { "format": cls.FORMAT_TAG, "device": uuid, "streamer_index": streamer, "streamer_selector": selector, "incremental_id": report_id, "lowest_id": lowest_id, "highest_id": highest_id, "device_sent_timestamp": sent_timestamp, "events": event_list, "data": reading_list } encoded = msgpack.packb(report_dict, default=_encode_datetime, use_bin_type=True) return FlexibleDictionaryReport(encoded, signed=False, encrypted=False, received_time=received_time)
[ "def", "FromReadings", "(", "cls", ",", "uuid", ",", "readings", ",", "events", ",", "report_id", "=", "IOTileReading", ".", "InvalidReadingID", ",", "selector", "=", "0xFFFF", ",", "streamer", "=", "0x100", ",", "sent_timestamp", "=", "0", ",", "received_time", "=", "None", ")", ":", "lowest_id", "=", "IOTileReading", ".", "InvalidReadingID", "highest_id", "=", "IOTileReading", ".", "InvalidReadingID", "for", "item", "in", "itertools", ".", "chain", "(", "iter", "(", "readings", ")", ",", "iter", "(", "events", ")", ")", ":", "if", "item", ".", "reading_id", "==", "IOTileReading", ".", "InvalidReadingID", ":", "continue", "if", "lowest_id", "==", "IOTileReading", ".", "InvalidReadingID", "or", "item", ".", "reading_id", "<", "lowest_id", ":", "lowest_id", "=", "item", ".", "reading_id", "if", "highest_id", "==", "IOTileReading", ".", "InvalidReadingID", "or", "item", ".", "reading_id", ">", "highest_id", ":", "highest_id", "=", "item", ".", "reading_id", "reading_list", "=", "[", "x", ".", "asdict", "(", ")", "for", "x", "in", "readings", "]", "event_list", "=", "[", "x", ".", "asdict", "(", ")", "for", "x", "in", "events", "]", "report_dict", "=", "{", "\"format\"", ":", "cls", ".", "FORMAT_TAG", ",", "\"device\"", ":", "uuid", ",", "\"streamer_index\"", ":", "streamer", ",", "\"streamer_selector\"", ":", "selector", ",", "\"incremental_id\"", ":", "report_id", ",", "\"lowest_id\"", ":", "lowest_id", ",", "\"highest_id\"", ":", "highest_id", ",", "\"device_sent_timestamp\"", ":", "sent_timestamp", ",", "\"events\"", ":", "event_list", ",", "\"data\"", ":", "reading_list", "}", "encoded", "=", "msgpack", ".", "packb", "(", "report_dict", ",", "default", "=", "_encode_datetime", ",", "use_bin_type", "=", "True", ")", "return", "FlexibleDictionaryReport", "(", "encoded", ",", "signed", "=", "False", ",", "encrypted", "=", "False", ",", "received_time", "=", "received_time", ")" ]
Create a flexible dictionary report from a list of readings and events. Args: uuid (int): The uuid of the device that this report came from readings (list of IOTileReading): A list of IOTileReading objects containing the data in the report events (list of IOTileEvent): A list of the events contained in the report. report_id (int): The id of the report. If not provided it defaults to IOTileReading.InvalidReadingID. Note that you can specify anything you want for the report id but for actual IOTile devices the report id will always be greater than the id of all of the readings contained in the report since devices generate ids sequentially. selector (int): The streamer selector of this report. This can be anything but if the report came from a device, it would correspond with the query the device used to pick readings to go into the report. streamer (int): The streamer id that this reading was sent from. sent_timestamp (int): The device's uptime that sent this report. received_time(datetime): The UTC time when this report was received from an IOTile device. If it is being created now, received_time defaults to datetime.utcnow(). Returns: FlexibleDictionaryReport: A report containing the readings and events passed in.
[ "Create", "a", "flexible", "dictionary", "report", "from", "a", "list", "of", "readings", "and", "events", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/flexible_dictionary.py#L28-L80
train
iotile/coretools
iotilecore/iotile/core/hw/reports/flexible_dictionary.py
FlexibleDictionaryReport.decode
def decode(self): """Decode this report from a msgpack encoded binary blob.""" report_dict = msgpack.unpackb(self.raw_report, raw=False) events = [IOTileEvent.FromDict(x) for x in report_dict.get('events', [])] readings = [IOTileReading.FromDict(x) for x in report_dict.get('data', [])] if 'device' not in report_dict: raise DataError("Invalid encoded FlexibleDictionaryReport that did not " "have a device key set with the device uuid") self.origin = report_dict['device'] self.report_id = report_dict.get("incremental_id", IOTileReading.InvalidReadingID) self.sent_timestamp = report_dict.get("device_sent_timestamp", 0) self.origin_streamer = report_dict.get("streamer_index") self.streamer_selector = report_dict.get("streamer_selector") self.lowest_id = report_dict.get('lowest_id') self.highest_id = report_dict.get('highest_id') return readings, events
python
def decode(self): """Decode this report from a msgpack encoded binary blob.""" report_dict = msgpack.unpackb(self.raw_report, raw=False) events = [IOTileEvent.FromDict(x) for x in report_dict.get('events', [])] readings = [IOTileReading.FromDict(x) for x in report_dict.get('data', [])] if 'device' not in report_dict: raise DataError("Invalid encoded FlexibleDictionaryReport that did not " "have a device key set with the device uuid") self.origin = report_dict['device'] self.report_id = report_dict.get("incremental_id", IOTileReading.InvalidReadingID) self.sent_timestamp = report_dict.get("device_sent_timestamp", 0) self.origin_streamer = report_dict.get("streamer_index") self.streamer_selector = report_dict.get("streamer_selector") self.lowest_id = report_dict.get('lowest_id') self.highest_id = report_dict.get('highest_id') return readings, events
[ "def", "decode", "(", "self", ")", ":", "report_dict", "=", "msgpack", ".", "unpackb", "(", "self", ".", "raw_report", ",", "raw", "=", "False", ")", "events", "=", "[", "IOTileEvent", ".", "FromDict", "(", "x", ")", "for", "x", "in", "report_dict", ".", "get", "(", "'events'", ",", "[", "]", ")", "]", "readings", "=", "[", "IOTileReading", ".", "FromDict", "(", "x", ")", "for", "x", "in", "report_dict", ".", "get", "(", "'data'", ",", "[", "]", ")", "]", "if", "'device'", "not", "in", "report_dict", ":", "raise", "DataError", "(", "\"Invalid encoded FlexibleDictionaryReport that did not \"", "\"have a device key set with the device uuid\"", ")", "self", ".", "origin", "=", "report_dict", "[", "'device'", "]", "self", ".", "report_id", "=", "report_dict", ".", "get", "(", "\"incremental_id\"", ",", "IOTileReading", ".", "InvalidReadingID", ")", "self", ".", "sent_timestamp", "=", "report_dict", ".", "get", "(", "\"device_sent_timestamp\"", ",", "0", ")", "self", ".", "origin_streamer", "=", "report_dict", ".", "get", "(", "\"streamer_index\"", ")", "self", ".", "streamer_selector", "=", "report_dict", ".", "get", "(", "\"streamer_selector\"", ")", "self", ".", "lowest_id", "=", "report_dict", ".", "get", "(", "'lowest_id'", ")", "self", ".", "highest_id", "=", "report_dict", ".", "get", "(", "'highest_id'", ")", "return", "readings", ",", "events" ]
Decode this report from a msgpack encoded binary blob.
[ "Decode", "this", "report", "from", "a", "msgpack", "encoded", "binary", "blob", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/reports/flexible_dictionary.py#L82-L102
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_callable_contents
def _callable_contents(obj): """Return the signature contents of a callable Python object. """ try: # Test if obj is a method. return _function_contents(obj.__func__) except AttributeError: try: # Test if obj is a callable object. return _function_contents(obj.__call__.__func__) except AttributeError: try: # Test if obj is a code object. return _code_contents(obj) except AttributeError: # Test if obj is a function object. return _function_contents(obj)
python
def _callable_contents(obj): """Return the signature contents of a callable Python object. """ try: # Test if obj is a method. return _function_contents(obj.__func__) except AttributeError: try: # Test if obj is a callable object. return _function_contents(obj.__call__.__func__) except AttributeError: try: # Test if obj is a code object. return _code_contents(obj) except AttributeError: # Test if obj is a function object. return _function_contents(obj)
[ "def", "_callable_contents", "(", "obj", ")", ":", "try", ":", "# Test if obj is a method.", "return", "_function_contents", "(", "obj", ".", "__func__", ")", "except", "AttributeError", ":", "try", ":", "# Test if obj is a callable object.", "return", "_function_contents", "(", "obj", ".", "__call__", ".", "__func__", ")", "except", "AttributeError", ":", "try", ":", "# Test if obj is a code object.", "return", "_code_contents", "(", "obj", ")", "except", "AttributeError", ":", "# Test if obj is a function object.", "return", "_function_contents", "(", "obj", ")" ]
Return the signature contents of a callable Python object.
[ "Return", "the", "signature", "contents", "of", "a", "callable", "Python", "object", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L150-L169
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_object_contents
def _object_contents(obj): """Return the signature contents of any Python object. We have to handle the case where object contains a code object since it can be pickled directly. """ try: # Test if obj is a method. return _function_contents(obj.__func__) except AttributeError: try: # Test if obj is a callable object. return _function_contents(obj.__call__.__func__) except AttributeError: try: # Test if obj is a code object. return _code_contents(obj) except AttributeError: try: # Test if obj is a function object. return _function_contents(obj) except AttributeError as ae: # Should be a pickle-able Python object. try: return _object_instance_content(obj) # pickling an Action instance or object doesn't yield a stable # content as instance property may be dumped in different orders # return pickle.dumps(obj, ACTION_SIGNATURE_PICKLE_PROTOCOL) except (pickle.PicklingError, TypeError, AttributeError) as ex: # This is weird, but it seems that nested classes # are unpickable. The Python docs say it should # always be a PicklingError, but some Python # versions seem to return TypeError. Just do # the best we can. return bytearray(repr(obj), 'utf-8')
python
def _object_contents(obj): """Return the signature contents of any Python object. We have to handle the case where object contains a code object since it can be pickled directly. """ try: # Test if obj is a method. return _function_contents(obj.__func__) except AttributeError: try: # Test if obj is a callable object. return _function_contents(obj.__call__.__func__) except AttributeError: try: # Test if obj is a code object. return _code_contents(obj) except AttributeError: try: # Test if obj is a function object. return _function_contents(obj) except AttributeError as ae: # Should be a pickle-able Python object. try: return _object_instance_content(obj) # pickling an Action instance or object doesn't yield a stable # content as instance property may be dumped in different orders # return pickle.dumps(obj, ACTION_SIGNATURE_PICKLE_PROTOCOL) except (pickle.PicklingError, TypeError, AttributeError) as ex: # This is weird, but it seems that nested classes # are unpickable. The Python docs say it should # always be a PicklingError, but some Python # versions seem to return TypeError. Just do # the best we can. return bytearray(repr(obj), 'utf-8')
[ "def", "_object_contents", "(", "obj", ")", ":", "try", ":", "# Test if obj is a method.", "return", "_function_contents", "(", "obj", ".", "__func__", ")", "except", "AttributeError", ":", "try", ":", "# Test if obj is a callable object.", "return", "_function_contents", "(", "obj", ".", "__call__", ".", "__func__", ")", "except", "AttributeError", ":", "try", ":", "# Test if obj is a code object.", "return", "_code_contents", "(", "obj", ")", "except", "AttributeError", ":", "try", ":", "# Test if obj is a function object.", "return", "_function_contents", "(", "obj", ")", "except", "AttributeError", "as", "ae", ":", "# Should be a pickle-able Python object.", "try", ":", "return", "_object_instance_content", "(", "obj", ")", "# pickling an Action instance or object doesn't yield a stable", "# content as instance property may be dumped in different orders", "# return pickle.dumps(obj, ACTION_SIGNATURE_PICKLE_PROTOCOL)", "except", "(", "pickle", ".", "PicklingError", ",", "TypeError", ",", "AttributeError", ")", "as", "ex", ":", "# This is weird, but it seems that nested classes", "# are unpickable. The Python docs say it should", "# always be a PicklingError, but some Python", "# versions seem to return TypeError. Just do", "# the best we can.", "return", "bytearray", "(", "repr", "(", "obj", ")", ",", "'utf-8'", ")" ]
Return the signature contents of any Python object. We have to handle the case where object contains a code object since it can be pickled directly.
[ "Return", "the", "signature", "contents", "of", "any", "Python", "object", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L172-L210
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_code_contents
def _code_contents(code, docstring=None): """Return the signature contents of a code object. By providing direct access to the code object of the function, Python makes this extremely easy. Hooray! Unfortunately, older versions of Python include line number indications in the compiled byte code. Boo! So we remove the line number byte codes to prevent recompilations from moving a Python function. See: - https://docs.python.org/2/library/inspect.html - http://python-reference.readthedocs.io/en/latest/docs/code/index.html For info on what each co\_ variable provides The signature is as follows (should be byte/chars): co_argcount, len(co_varnames), len(co_cellvars), len(co_freevars), ( comma separated signature for each object in co_consts ), ( comma separated signature for each object in co_names ), ( The bytecode with line number bytecodes removed from co_code ) co_argcount - Returns the number of positional arguments (including arguments with default values). co_varnames - Returns a tuple containing the names of the local variables (starting with the argument names). co_cellvars - Returns a tuple containing the names of local variables that are referenced by nested functions. co_freevars - Returns a tuple containing the names of free variables. (?) co_consts - Returns a tuple containing the literals used by the bytecode. co_names - Returns a tuple containing the names used by the bytecode. co_code - Returns a string representing the sequence of bytecode instructions. """ # contents = [] # The code contents depends on the number of local variables # but not their actual names. contents = bytearray("{}, {}".format(code.co_argcount, len(code.co_varnames)), 'utf-8') contents.extend(b", ") contents.extend(bytearray(str(len(code.co_cellvars)), 'utf-8')) contents.extend(b", ") contents.extend(bytearray(str(len(code.co_freevars)), 'utf-8')) # The code contents depends on any constants accessed by the # function. Note that we have to call _object_contents on each # constants because the code object of nested functions can # show-up among the constants. z = [_object_contents(cc) for cc in code.co_consts[1:]] contents.extend(b',(') contents.extend(bytearray(',', 'utf-8').join(z)) contents.extend(b')') # The code contents depends on the variable names used to # accessed global variable, as changing the variable name changes # the variable actually accessed and therefore changes the # function result. z= [bytearray(_object_contents(cc)) for cc in code.co_names] contents.extend(b',(') contents.extend(bytearray(',','utf-8').join(z)) contents.extend(b')') # The code contents depends on its actual code!!! contents.extend(b',(') contents.extend(code.co_code) contents.extend(b')') return contents
python
def _code_contents(code, docstring=None): """Return the signature contents of a code object. By providing direct access to the code object of the function, Python makes this extremely easy. Hooray! Unfortunately, older versions of Python include line number indications in the compiled byte code. Boo! So we remove the line number byte codes to prevent recompilations from moving a Python function. See: - https://docs.python.org/2/library/inspect.html - http://python-reference.readthedocs.io/en/latest/docs/code/index.html For info on what each co\_ variable provides The signature is as follows (should be byte/chars): co_argcount, len(co_varnames), len(co_cellvars), len(co_freevars), ( comma separated signature for each object in co_consts ), ( comma separated signature for each object in co_names ), ( The bytecode with line number bytecodes removed from co_code ) co_argcount - Returns the number of positional arguments (including arguments with default values). co_varnames - Returns a tuple containing the names of the local variables (starting with the argument names). co_cellvars - Returns a tuple containing the names of local variables that are referenced by nested functions. co_freevars - Returns a tuple containing the names of free variables. (?) co_consts - Returns a tuple containing the literals used by the bytecode. co_names - Returns a tuple containing the names used by the bytecode. co_code - Returns a string representing the sequence of bytecode instructions. """ # contents = [] # The code contents depends on the number of local variables # but not their actual names. contents = bytearray("{}, {}".format(code.co_argcount, len(code.co_varnames)), 'utf-8') contents.extend(b", ") contents.extend(bytearray(str(len(code.co_cellvars)), 'utf-8')) contents.extend(b", ") contents.extend(bytearray(str(len(code.co_freevars)), 'utf-8')) # The code contents depends on any constants accessed by the # function. Note that we have to call _object_contents on each # constants because the code object of nested functions can # show-up among the constants. z = [_object_contents(cc) for cc in code.co_consts[1:]] contents.extend(b',(') contents.extend(bytearray(',', 'utf-8').join(z)) contents.extend(b')') # The code contents depends on the variable names used to # accessed global variable, as changing the variable name changes # the variable actually accessed and therefore changes the # function result. z= [bytearray(_object_contents(cc)) for cc in code.co_names] contents.extend(b',(') contents.extend(bytearray(',','utf-8').join(z)) contents.extend(b')') # The code contents depends on its actual code!!! contents.extend(b',(') contents.extend(code.co_code) contents.extend(b')') return contents
[ "def", "_code_contents", "(", "code", ",", "docstring", "=", "None", ")", ":", "# contents = []", "# The code contents depends on the number of local variables", "# but not their actual names.", "contents", "=", "bytearray", "(", "\"{}, {}\"", ".", "format", "(", "code", ".", "co_argcount", ",", "len", "(", "code", ".", "co_varnames", ")", ")", ",", "'utf-8'", ")", "contents", ".", "extend", "(", "b\", \"", ")", "contents", ".", "extend", "(", "bytearray", "(", "str", "(", "len", "(", "code", ".", "co_cellvars", ")", ")", ",", "'utf-8'", ")", ")", "contents", ".", "extend", "(", "b\", \"", ")", "contents", ".", "extend", "(", "bytearray", "(", "str", "(", "len", "(", "code", ".", "co_freevars", ")", ")", ",", "'utf-8'", ")", ")", "# The code contents depends on any constants accessed by the", "# function. Note that we have to call _object_contents on each", "# constants because the code object of nested functions can", "# show-up among the constants.", "z", "=", "[", "_object_contents", "(", "cc", ")", "for", "cc", "in", "code", ".", "co_consts", "[", "1", ":", "]", "]", "contents", ".", "extend", "(", "b',('", ")", "contents", ".", "extend", "(", "bytearray", "(", "','", ",", "'utf-8'", ")", ".", "join", "(", "z", ")", ")", "contents", ".", "extend", "(", "b')'", ")", "# The code contents depends on the variable names used to", "# accessed global variable, as changing the variable name changes", "# the variable actually accessed and therefore changes the", "# function result.", "z", "=", "[", "bytearray", "(", "_object_contents", "(", "cc", ")", ")", "for", "cc", "in", "code", ".", "co_names", "]", "contents", ".", "extend", "(", "b',('", ")", "contents", ".", "extend", "(", "bytearray", "(", "','", ",", "'utf-8'", ")", ".", "join", "(", "z", ")", ")", "contents", ".", "extend", "(", "b')'", ")", "# The code contents depends on its actual code!!!", "contents", ".", "extend", "(", "b',('", ")", "contents", ".", "extend", "(", "code", ".", "co_code", ")", "contents", ".", "extend", "(", "b')'", ")", "return", "contents" ]
Return the signature contents of a code object. By providing direct access to the code object of the function, Python makes this extremely easy. Hooray! Unfortunately, older versions of Python include line number indications in the compiled byte code. Boo! So we remove the line number byte codes to prevent recompilations from moving a Python function. See: - https://docs.python.org/2/library/inspect.html - http://python-reference.readthedocs.io/en/latest/docs/code/index.html For info on what each co\_ variable provides The signature is as follows (should be byte/chars): co_argcount, len(co_varnames), len(co_cellvars), len(co_freevars), ( comma separated signature for each object in co_consts ), ( comma separated signature for each object in co_names ), ( The bytecode with line number bytecodes removed from co_code ) co_argcount - Returns the number of positional arguments (including arguments with default values). co_varnames - Returns a tuple containing the names of the local variables (starting with the argument names). co_cellvars - Returns a tuple containing the names of local variables that are referenced by nested functions. co_freevars - Returns a tuple containing the names of free variables. (?) co_consts - Returns a tuple containing the literals used by the bytecode. co_names - Returns a tuple containing the names used by the bytecode. co_code - Returns a string representing the sequence of bytecode instructions.
[ "Return", "the", "signature", "contents", "of", "a", "code", "object", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L213-L281
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_object_instance_content
def _object_instance_content(obj): """ Returns consistant content for a action class or an instance thereof :Parameters: - `obj` Should be either and action class or an instance thereof :Returns: bytearray or bytes representing the obj suitable for generating a signature from. """ retval = bytearray() if obj is None: return b'N.' if isinstance(obj, SCons.Util.BaseStringTypes): return SCons.Util.to_bytes(obj) inst_class = obj.__class__ inst_class_name = bytearray(obj.__class__.__name__,'utf-8') inst_class_module = bytearray(obj.__class__.__module__,'utf-8') inst_class_hierarchy = bytearray(repr(inspect.getclasstree([obj.__class__,])),'utf-8') # print("ICH:%s : %s"%(inst_class_hierarchy, repr(obj))) properties = [(p, getattr(obj, p, "None")) for p in dir(obj) if not (p[:2] == '__' or inspect.ismethod(getattr(obj, p)) or inspect.isbuiltin(getattr(obj,p))) ] properties.sort() properties_str = ','.join(["%s=%s"%(p[0],p[1]) for p in properties]) properties_bytes = bytearray(properties_str,'utf-8') methods = [p for p in dir(obj) if inspect.ismethod(getattr(obj, p))] methods.sort() method_contents = [] for m in methods: # print("Method:%s"%m) v = _function_contents(getattr(obj, m)) # print("[%s->]V:%s [%s]"%(m,v,type(v))) method_contents.append(v) retval = bytearray(b'{') retval.extend(inst_class_name) retval.extend(b":") retval.extend(inst_class_module) retval.extend(b'}[[') retval.extend(inst_class_hierarchy) retval.extend(b']]{{') retval.extend(bytearray(b",").join(method_contents)) retval.extend(b"}}{{{") retval.extend(properties_bytes) retval.extend(b'}}}') return retval
python
def _object_instance_content(obj): """ Returns consistant content for a action class or an instance thereof :Parameters: - `obj` Should be either and action class or an instance thereof :Returns: bytearray or bytes representing the obj suitable for generating a signature from. """ retval = bytearray() if obj is None: return b'N.' if isinstance(obj, SCons.Util.BaseStringTypes): return SCons.Util.to_bytes(obj) inst_class = obj.__class__ inst_class_name = bytearray(obj.__class__.__name__,'utf-8') inst_class_module = bytearray(obj.__class__.__module__,'utf-8') inst_class_hierarchy = bytearray(repr(inspect.getclasstree([obj.__class__,])),'utf-8') # print("ICH:%s : %s"%(inst_class_hierarchy, repr(obj))) properties = [(p, getattr(obj, p, "None")) for p in dir(obj) if not (p[:2] == '__' or inspect.ismethod(getattr(obj, p)) or inspect.isbuiltin(getattr(obj,p))) ] properties.sort() properties_str = ','.join(["%s=%s"%(p[0],p[1]) for p in properties]) properties_bytes = bytearray(properties_str,'utf-8') methods = [p for p in dir(obj) if inspect.ismethod(getattr(obj, p))] methods.sort() method_contents = [] for m in methods: # print("Method:%s"%m) v = _function_contents(getattr(obj, m)) # print("[%s->]V:%s [%s]"%(m,v,type(v))) method_contents.append(v) retval = bytearray(b'{') retval.extend(inst_class_name) retval.extend(b":") retval.extend(inst_class_module) retval.extend(b'}[[') retval.extend(inst_class_hierarchy) retval.extend(b']]{{') retval.extend(bytearray(b",").join(method_contents)) retval.extend(b"}}{{{") retval.extend(properties_bytes) retval.extend(b'}}}') return retval
[ "def", "_object_instance_content", "(", "obj", ")", ":", "retval", "=", "bytearray", "(", ")", "if", "obj", "is", "None", ":", "return", "b'N.'", "if", "isinstance", "(", "obj", ",", "SCons", ".", "Util", ".", "BaseStringTypes", ")", ":", "return", "SCons", ".", "Util", ".", "to_bytes", "(", "obj", ")", "inst_class", "=", "obj", ".", "__class__", "inst_class_name", "=", "bytearray", "(", "obj", ".", "__class__", ".", "__name__", ",", "'utf-8'", ")", "inst_class_module", "=", "bytearray", "(", "obj", ".", "__class__", ".", "__module__", ",", "'utf-8'", ")", "inst_class_hierarchy", "=", "bytearray", "(", "repr", "(", "inspect", ".", "getclasstree", "(", "[", "obj", ".", "__class__", ",", "]", ")", ")", ",", "'utf-8'", ")", "# print(\"ICH:%s : %s\"%(inst_class_hierarchy, repr(obj)))", "properties", "=", "[", "(", "p", ",", "getattr", "(", "obj", ",", "p", ",", "\"None\"", ")", ")", "for", "p", "in", "dir", "(", "obj", ")", "if", "not", "(", "p", "[", ":", "2", "]", "==", "'__'", "or", "inspect", ".", "ismethod", "(", "getattr", "(", "obj", ",", "p", ")", ")", "or", "inspect", ".", "isbuiltin", "(", "getattr", "(", "obj", ",", "p", ")", ")", ")", "]", "properties", ".", "sort", "(", ")", "properties_str", "=", "','", ".", "join", "(", "[", "\"%s=%s\"", "%", "(", "p", "[", "0", "]", ",", "p", "[", "1", "]", ")", "for", "p", "in", "properties", "]", ")", "properties_bytes", "=", "bytearray", "(", "properties_str", ",", "'utf-8'", ")", "methods", "=", "[", "p", "for", "p", "in", "dir", "(", "obj", ")", "if", "inspect", ".", "ismethod", "(", "getattr", "(", "obj", ",", "p", ")", ")", "]", "methods", ".", "sort", "(", ")", "method_contents", "=", "[", "]", "for", "m", "in", "methods", ":", "# print(\"Method:%s\"%m)", "v", "=", "_function_contents", "(", "getattr", "(", "obj", ",", "m", ")", ")", "# print(\"[%s->]V:%s [%s]\"%(m,v,type(v)))", "method_contents", ".", "append", "(", "v", ")", "retval", "=", "bytearray", "(", "b'{'", ")", "retval", ".", "extend", "(", "inst_class_name", ")", "retval", ".", "extend", "(", "b\":\"", ")", "retval", ".", "extend", "(", "inst_class_module", ")", "retval", ".", "extend", "(", "b'}[['", ")", "retval", ".", "extend", "(", "inst_class_hierarchy", ")", "retval", ".", "extend", "(", "b']]{{'", ")", "retval", ".", "extend", "(", "bytearray", "(", "b\",\"", ")", ".", "join", "(", "method_contents", ")", ")", "retval", ".", "extend", "(", "b\"}}{{{\"", ")", "retval", ".", "extend", "(", "properties_bytes", ")", "retval", ".", "extend", "(", "b'}}}'", ")", "return", "retval" ]
Returns consistant content for a action class or an instance thereof :Parameters: - `obj` Should be either and action class or an instance thereof :Returns: bytearray or bytes representing the obj suitable for generating a signature from.
[ "Returns", "consistant", "content", "for", "a", "action", "class", "or", "an", "instance", "thereof" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L332-L382
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_do_create_keywords
def _do_create_keywords(args, kw): """This converts any arguments after the action argument into their equivalent keywords and adds them to the kw argument. """ v = kw.get('varlist', ()) # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O'] if is_String(v): v = (v,) kw['varlist'] = tuple(v) if args: # turn positional args into equivalent keywords cmdstrfunc = args[0] if cmdstrfunc is None or is_String(cmdstrfunc): kw['cmdstr'] = cmdstrfunc elif callable(cmdstrfunc): kw['strfunction'] = cmdstrfunc else: raise SCons.Errors.UserError( 'Invalid command display variable type. ' 'You must either pass a string or a callback which ' 'accepts (target, source, env) as parameters.') if len(args) > 1: kw['varlist'] = tuple(SCons.Util.flatten(args[1:])) + kw['varlist'] if kw.get('strfunction', _null) is not _null \ and kw.get('cmdstr', _null) is not _null: raise SCons.Errors.UserError( 'Cannot have both strfunction and cmdstr args to Action()')
python
def _do_create_keywords(args, kw): """This converts any arguments after the action argument into their equivalent keywords and adds them to the kw argument. """ v = kw.get('varlist', ()) # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O'] if is_String(v): v = (v,) kw['varlist'] = tuple(v) if args: # turn positional args into equivalent keywords cmdstrfunc = args[0] if cmdstrfunc is None or is_String(cmdstrfunc): kw['cmdstr'] = cmdstrfunc elif callable(cmdstrfunc): kw['strfunction'] = cmdstrfunc else: raise SCons.Errors.UserError( 'Invalid command display variable type. ' 'You must either pass a string or a callback which ' 'accepts (target, source, env) as parameters.') if len(args) > 1: kw['varlist'] = tuple(SCons.Util.flatten(args[1:])) + kw['varlist'] if kw.get('strfunction', _null) is not _null \ and kw.get('cmdstr', _null) is not _null: raise SCons.Errors.UserError( 'Cannot have both strfunction and cmdstr args to Action()')
[ "def", "_do_create_keywords", "(", "args", ",", "kw", ")", ":", "v", "=", "kw", ".", "get", "(", "'varlist'", ",", "(", ")", ")", "# prevent varlist=\"FOO\" from being interpreted as ['F', 'O', 'O']", "if", "is_String", "(", "v", ")", ":", "v", "=", "(", "v", ",", ")", "kw", "[", "'varlist'", "]", "=", "tuple", "(", "v", ")", "if", "args", ":", "# turn positional args into equivalent keywords", "cmdstrfunc", "=", "args", "[", "0", "]", "if", "cmdstrfunc", "is", "None", "or", "is_String", "(", "cmdstrfunc", ")", ":", "kw", "[", "'cmdstr'", "]", "=", "cmdstrfunc", "elif", "callable", "(", "cmdstrfunc", ")", ":", "kw", "[", "'strfunction'", "]", "=", "cmdstrfunc", "else", ":", "raise", "SCons", ".", "Errors", ".", "UserError", "(", "'Invalid command display variable type. '", "'You must either pass a string or a callback which '", "'accepts (target, source, env) as parameters.'", ")", "if", "len", "(", "args", ")", ">", "1", ":", "kw", "[", "'varlist'", "]", "=", "tuple", "(", "SCons", ".", "Util", ".", "flatten", "(", "args", "[", "1", ":", "]", ")", ")", "+", "kw", "[", "'varlist'", "]", "if", "kw", ".", "get", "(", "'strfunction'", ",", "_null", ")", "is", "not", "_null", "and", "kw", ".", "get", "(", "'cmdstr'", ",", "_null", ")", "is", "not", "_null", ":", "raise", "SCons", ".", "Errors", ".", "UserError", "(", "'Cannot have both strfunction and cmdstr args to Action()'", ")" ]
This converts any arguments after the action argument into their equivalent keywords and adds them to the kw argument.
[ "This", "converts", "any", "arguments", "after", "the", "action", "argument", "into", "their", "equivalent", "keywords", "and", "adds", "them", "to", "the", "kw", "argument", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L413-L438
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_do_create_list_action
def _do_create_list_action(act, kw): """A factory for list actions. Convert the input list into Actions and then wrap them in a ListAction.""" acts = [] for a in act: aa = _do_create_action(a, kw) if aa is not None: acts.append(aa) if not acts: return ListAction([]) elif len(acts) == 1: return acts[0] else: return ListAction(acts)
python
def _do_create_list_action(act, kw): """A factory for list actions. Convert the input list into Actions and then wrap them in a ListAction.""" acts = [] for a in act: aa = _do_create_action(a, kw) if aa is not None: acts.append(aa) if not acts: return ListAction([]) elif len(acts) == 1: return acts[0] else: return ListAction(acts)
[ "def", "_do_create_list_action", "(", "act", ",", "kw", ")", ":", "acts", "=", "[", "]", "for", "a", "in", "act", ":", "aa", "=", "_do_create_action", "(", "a", ",", "kw", ")", "if", "aa", "is", "not", "None", ":", "acts", ".", "append", "(", "aa", ")", "if", "not", "acts", ":", "return", "ListAction", "(", "[", "]", ")", "elif", "len", "(", "acts", ")", "==", "1", ":", "return", "acts", "[", "0", "]", "else", ":", "return", "ListAction", "(", "acts", ")" ]
A factory for list actions. Convert the input list into Actions and then wrap them in a ListAction.
[ "A", "factory", "for", "list", "actions", ".", "Convert", "the", "input", "list", "into", "Actions", "and", "then", "wrap", "them", "in", "a", "ListAction", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L494-L506
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
Action
def Action(act, *args, **kw): """A factory for action objects.""" # Really simple: the _do_create_* routines do the heavy lifting. _do_create_keywords(args, kw) if is_List(act): return _do_create_list_action(act, kw) return _do_create_action(act, kw)
python
def Action(act, *args, **kw): """A factory for action objects.""" # Really simple: the _do_create_* routines do the heavy lifting. _do_create_keywords(args, kw) if is_List(act): return _do_create_list_action(act, kw) return _do_create_action(act, kw)
[ "def", "Action", "(", "act", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "# Really simple: the _do_create_* routines do the heavy lifting.", "_do_create_keywords", "(", "args", ",", "kw", ")", "if", "is_List", "(", "act", ")", ":", "return", "_do_create_list_action", "(", "act", ",", "kw", ")", "return", "_do_create_action", "(", "act", ",", "kw", ")" ]
A factory for action objects.
[ "A", "factory", "for", "action", "objects", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L509-L515
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
_string_from_cmd_list
def _string_from_cmd_list(cmd_list): """Takes a list of command line arguments and returns a pretty representation for printing.""" cl = [] for arg in map(str, cmd_list): if ' ' in arg or '\t' in arg: arg = '"' + arg + '"' cl.append(arg) return ' '.join(cl)
python
def _string_from_cmd_list(cmd_list): """Takes a list of command line arguments and returns a pretty representation for printing.""" cl = [] for arg in map(str, cmd_list): if ' ' in arg or '\t' in arg: arg = '"' + arg + '"' cl.append(arg) return ' '.join(cl)
[ "def", "_string_from_cmd_list", "(", "cmd_list", ")", ":", "cl", "=", "[", "]", "for", "arg", "in", "map", "(", "str", ",", "cmd_list", ")", ":", "if", "' '", "in", "arg", "or", "'\\t'", "in", "arg", ":", "arg", "=", "'\"'", "+", "arg", "+", "'\"'", "cl", ".", "append", "(", "arg", ")", "return", "' '", ".", "join", "(", "cl", ")" ]
Takes a list of command line arguments and returns a pretty representation for printing.
[ "Takes", "a", "list", "of", "command", "line", "arguments", "and", "returns", "a", "pretty", "representation", "for", "printing", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L727-L735
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
get_default_ENV
def get_default_ENV(env): """ A fiddlin' little function that has an 'import SCons.Environment' which can't be moved to the top level without creating an import loop. Since this import creates a local variable named 'SCons', it blocks access to the global variable, so we move it here to prevent complaints about local variables being used uninitialized. """ global default_ENV try: return env['ENV'] except KeyError: if not default_ENV: import SCons.Environment # This is a hideously expensive way to get a default shell # environment. What it really should do is run the platform # setup to get the default ENV. Fortunately, it's incredibly # rare for an Environment not to have a shell environment, so # we're not going to worry about it overmuch. default_ENV = SCons.Environment.Environment()['ENV'] return default_ENV
python
def get_default_ENV(env): """ A fiddlin' little function that has an 'import SCons.Environment' which can't be moved to the top level without creating an import loop. Since this import creates a local variable named 'SCons', it blocks access to the global variable, so we move it here to prevent complaints about local variables being used uninitialized. """ global default_ENV try: return env['ENV'] except KeyError: if not default_ENV: import SCons.Environment # This is a hideously expensive way to get a default shell # environment. What it really should do is run the platform # setup to get the default ENV. Fortunately, it's incredibly # rare for an Environment not to have a shell environment, so # we're not going to worry about it overmuch. default_ENV = SCons.Environment.Environment()['ENV'] return default_ENV
[ "def", "get_default_ENV", "(", "env", ")", ":", "global", "default_ENV", "try", ":", "return", "env", "[", "'ENV'", "]", "except", "KeyError", ":", "if", "not", "default_ENV", ":", "import", "SCons", ".", "Environment", "# This is a hideously expensive way to get a default shell", "# environment. What it really should do is run the platform", "# setup to get the default ENV. Fortunately, it's incredibly", "# rare for an Environment not to have a shell environment, so", "# we're not going to worry about it overmuch.", "default_ENV", "=", "SCons", ".", "Environment", ".", "Environment", "(", ")", "[", "'ENV'", "]", "return", "default_ENV" ]
A fiddlin' little function that has an 'import SCons.Environment' which can't be moved to the top level without creating an import loop. Since this import creates a local variable named 'SCons', it blocks access to the global variable, so we move it here to prevent complaints about local variables being used uninitialized.
[ "A", "fiddlin", "little", "function", "that", "has", "an", "import", "SCons", ".", "Environment", "which", "can", "t", "be", "moved", "to", "the", "top", "level", "without", "creating", "an", "import", "loop", ".", "Since", "this", "import", "creates", "a", "local", "variable", "named", "SCons", "it", "blocks", "access", "to", "the", "global", "variable", "so", "we", "move", "it", "here", "to", "prevent", "complaints", "about", "local", "variables", "being", "used", "uninitialized", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L740-L760
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
CommandAction.execute
def execute(self, target, source, env, executor=None): """Execute a command action. This will handle lists of commands as well as individual commands, because construction variable substitution may turn a single "command" into a list. This means that this class can actually handle lists of commands, even though that's not how we use it externally. """ escape_list = SCons.Subst.escape_list flatten_sequence = SCons.Util.flatten_sequence try: shell = env['SHELL'] except KeyError: raise SCons.Errors.UserError('Missing SHELL construction variable.') try: spawn = env['SPAWN'] except KeyError: raise SCons.Errors.UserError('Missing SPAWN construction variable.') else: if is_String(spawn): spawn = env.subst(spawn, raw=1, conv=lambda x: x) escape = env.get('ESCAPE', lambda x: x) ENV = get_default_ENV(env) # Ensure that the ENV values are all strings: for key, value in ENV.items(): if not is_String(value): if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten_sequence(value) ENV[key] = os.pathsep.join(map(str, value)) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: ENV[key] = str(value) if executor: target = executor.get_all_targets() source = executor.get_all_sources() cmd_list, ignore, silent = self.process(target, list(map(rfile, source)), env, executor) # Use len() to filter out any "command" that's zero-length. for cmd_line in filter(len, cmd_list): # Escape the command line for the interpreter we are using. cmd_line = escape_list(cmd_line, escape) result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) if not ignore and result: msg = "Error %s" % result return SCons.Errors.BuildError(errstr=msg, status=result, action=self, command=cmd_line) return 0
python
def execute(self, target, source, env, executor=None): """Execute a command action. This will handle lists of commands as well as individual commands, because construction variable substitution may turn a single "command" into a list. This means that this class can actually handle lists of commands, even though that's not how we use it externally. """ escape_list = SCons.Subst.escape_list flatten_sequence = SCons.Util.flatten_sequence try: shell = env['SHELL'] except KeyError: raise SCons.Errors.UserError('Missing SHELL construction variable.') try: spawn = env['SPAWN'] except KeyError: raise SCons.Errors.UserError('Missing SPAWN construction variable.') else: if is_String(spawn): spawn = env.subst(spawn, raw=1, conv=lambda x: x) escape = env.get('ESCAPE', lambda x: x) ENV = get_default_ENV(env) # Ensure that the ENV values are all strings: for key, value in ENV.items(): if not is_String(value): if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten_sequence(value) ENV[key] = os.pathsep.join(map(str, value)) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: ENV[key] = str(value) if executor: target = executor.get_all_targets() source = executor.get_all_sources() cmd_list, ignore, silent = self.process(target, list(map(rfile, source)), env, executor) # Use len() to filter out any "command" that's zero-length. for cmd_line in filter(len, cmd_list): # Escape the command line for the interpreter we are using. cmd_line = escape_list(cmd_line, escape) result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) if not ignore and result: msg = "Error %s" % result return SCons.Errors.BuildError(errstr=msg, status=result, action=self, command=cmd_line) return 0
[ "def", "execute", "(", "self", ",", "target", ",", "source", ",", "env", ",", "executor", "=", "None", ")", ":", "escape_list", "=", "SCons", ".", "Subst", ".", "escape_list", "flatten_sequence", "=", "SCons", ".", "Util", ".", "flatten_sequence", "try", ":", "shell", "=", "env", "[", "'SHELL'", "]", "except", "KeyError", ":", "raise", "SCons", ".", "Errors", ".", "UserError", "(", "'Missing SHELL construction variable.'", ")", "try", ":", "spawn", "=", "env", "[", "'SPAWN'", "]", "except", "KeyError", ":", "raise", "SCons", ".", "Errors", ".", "UserError", "(", "'Missing SPAWN construction variable.'", ")", "else", ":", "if", "is_String", "(", "spawn", ")", ":", "spawn", "=", "env", ".", "subst", "(", "spawn", ",", "raw", "=", "1", ",", "conv", "=", "lambda", "x", ":", "x", ")", "escape", "=", "env", ".", "get", "(", "'ESCAPE'", ",", "lambda", "x", ":", "x", ")", "ENV", "=", "get_default_ENV", "(", "env", ")", "# Ensure that the ENV values are all strings:", "for", "key", ",", "value", "in", "ENV", ".", "items", "(", ")", ":", "if", "not", "is_String", "(", "value", ")", ":", "if", "is_List", "(", "value", ")", ":", "# If the value is a list, then we assume it is a", "# path list, because that's a pretty common list-like", "# value to stick in an environment variable:", "value", "=", "flatten_sequence", "(", "value", ")", "ENV", "[", "key", "]", "=", "os", ".", "pathsep", ".", "join", "(", "map", "(", "str", ",", "value", ")", ")", "else", ":", "# If it isn't a string or a list, then we just coerce", "# it to a string, which is the proper way to handle", "# Dir and File instances and will produce something", "# reasonable for just about everything else:", "ENV", "[", "key", "]", "=", "str", "(", "value", ")", "if", "executor", ":", "target", "=", "executor", ".", "get_all_targets", "(", ")", "source", "=", "executor", ".", "get_all_sources", "(", ")", "cmd_list", ",", "ignore", ",", "silent", "=", "self", ".", "process", "(", "target", ",", "list", "(", "map", "(", "rfile", ",", "source", ")", ")", ",", "env", ",", "executor", ")", "# Use len() to filter out any \"command\" that's zero-length.", "for", "cmd_line", "in", "filter", "(", "len", ",", "cmd_list", ")", ":", "# Escape the command line for the interpreter we are using.", "cmd_line", "=", "escape_list", "(", "cmd_line", ",", "escape", ")", "result", "=", "spawn", "(", "shell", ",", "escape", ",", "cmd_line", "[", "0", "]", ",", "cmd_line", ",", "ENV", ")", "if", "not", "ignore", "and", "result", ":", "msg", "=", "\"Error %s\"", "%", "result", "return", "SCons", ".", "Errors", ".", "BuildError", "(", "errstr", "=", "msg", ",", "status", "=", "result", ",", "action", "=", "self", ",", "command", "=", "cmd_line", ")", "return", "0" ]
Execute a command action. This will handle lists of commands as well as individual commands, because construction variable substitution may turn a single "command" into a list. This means that this class can actually handle lists of commands, even though that's not how we use it externally.
[ "Execute", "a", "command", "action", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L886-L947
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
FunctionAction.get_presig
def get_presig(self, target, source, env): """Return the signature contents of this callable action.""" try: return self.gc(target, source, env) except AttributeError: return self.funccontents
python
def get_presig(self, target, source, env): """Return the signature contents of this callable action.""" try: return self.gc(target, source, env) except AttributeError: return self.funccontents
[ "def", "get_presig", "(", "self", ",", "target", ",", "source", ",", "env", ")", ":", "try", ":", "return", "self", ".", "gc", "(", "target", ",", "source", ",", "env", ")", "except", "AttributeError", ":", "return", "self", ".", "funccontents" ]
Return the signature contents of this callable action.
[ "Return", "the", "signature", "contents", "of", "this", "callable", "action", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L1232-L1237
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py
ListAction.get_presig
def get_presig(self, target, source, env): """Return the signature contents of this action list. Simple concatenation of the signatures of the elements. """ return b"".join([bytes(x.get_contents(target, source, env)) for x in self.list])
python
def get_presig(self, target, source, env): """Return the signature contents of this action list. Simple concatenation of the signatures of the elements. """ return b"".join([bytes(x.get_contents(target, source, env)) for x in self.list])
[ "def", "get_presig", "(", "self", ",", "target", ",", "source", ",", "env", ")", ":", "return", "b\"\"", ".", "join", "(", "[", "bytes", "(", "x", ".", "get_contents", "(", "target", ",", "source", ",", "env", ")", ")", "for", "x", "in", "self", ".", "list", "]", ")" ]
Return the signature contents of this action list. Simple concatenation of the signatures of the elements.
[ "Return", "the", "signature", "contents", "of", "this", "action", "list", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Action.py#L1266-L1271
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager._notify_update
async def _notify_update(self, name, change_type, change_info=None, directed_client=None): """Notify updates on a service to anyone who cares.""" for monitor in self._monitors: try: result = monitor(name, change_type, change_info, directed_client=directed_client) if inspect.isawaitable(result): await result except Exception: # We can't allow any exceptions in a monitor routine to break the server. self._logger.warning("Error calling monitor with update %s", name, exc_info=True)
python
async def _notify_update(self, name, change_type, change_info=None, directed_client=None): """Notify updates on a service to anyone who cares.""" for monitor in self._monitors: try: result = monitor(name, change_type, change_info, directed_client=directed_client) if inspect.isawaitable(result): await result except Exception: # We can't allow any exceptions in a monitor routine to break the server. self._logger.warning("Error calling monitor with update %s", name, exc_info=True)
[ "async", "def", "_notify_update", "(", "self", ",", "name", ",", "change_type", ",", "change_info", "=", "None", ",", "directed_client", "=", "None", ")", ":", "for", "monitor", "in", "self", ".", "_monitors", ":", "try", ":", "result", "=", "monitor", "(", "name", ",", "change_type", ",", "change_info", ",", "directed_client", "=", "directed_client", ")", "if", "inspect", ".", "isawaitable", "(", "result", ")", ":", "await", "result", "except", "Exception", ":", "# We can't allow any exceptions in a monitor routine to break the server.", "self", ".", "_logger", ".", "warning", "(", "\"Error calling monitor with update %s\"", ",", "name", ",", "exc_info", "=", "True", ")" ]
Notify updates on a service to anyone who cares.
[ "Notify", "updates", "on", "a", "service", "to", "anyone", "who", "cares", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L61-L71
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.update_state
async def update_state(self, short_name, state): """Set the current state of a service. If the state is unchanged from a previous attempt, this routine does nothing. Args: short_name (string): The short name of the service state (int): The new stae of the service """ if short_name not in self.services: raise ArgumentError("Service name is unknown", short_name=short_name) if state not in states.KNOWN_STATES: raise ArgumentError("Invalid service state", state=state) serv = self.services[short_name]['state'] if serv.state == state: return update = {} update['old_status'] = serv.state update['new_status'] = state update['new_status_string'] = states.KNOWN_STATES[state] serv.state = state await self._notify_update(short_name, 'state_change', update)
python
async def update_state(self, short_name, state): """Set the current state of a service. If the state is unchanged from a previous attempt, this routine does nothing. Args: short_name (string): The short name of the service state (int): The new stae of the service """ if short_name not in self.services: raise ArgumentError("Service name is unknown", short_name=short_name) if state not in states.KNOWN_STATES: raise ArgumentError("Invalid service state", state=state) serv = self.services[short_name]['state'] if serv.state == state: return update = {} update['old_status'] = serv.state update['new_status'] = state update['new_status_string'] = states.KNOWN_STATES[state] serv.state = state await self._notify_update(short_name, 'state_change', update)
[ "async", "def", "update_state", "(", "self", ",", "short_name", ",", "state", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Service name is unknown\"", ",", "short_name", "=", "short_name", ")", "if", "state", "not", "in", "states", ".", "KNOWN_STATES", ":", "raise", "ArgumentError", "(", "\"Invalid service state\"", ",", "state", "=", "state", ")", "serv", "=", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", "if", "serv", ".", "state", "==", "state", ":", "return", "update", "=", "{", "}", "update", "[", "'old_status'", "]", "=", "serv", ".", "state", "update", "[", "'new_status'", "]", "=", "state", "update", "[", "'new_status_string'", "]", "=", "states", ".", "KNOWN_STATES", "[", "state", "]", "serv", ".", "state", "=", "state", "await", "self", ".", "_notify_update", "(", "short_name", ",", "'state_change'", ",", "update", ")" ]
Set the current state of a service. If the state is unchanged from a previous attempt, this routine does nothing. Args: short_name (string): The short name of the service state (int): The new stae of the service
[ "Set", "the", "current", "state", "of", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L73-L101
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.add_service
def add_service(self, name, long_name, preregistered=False, notify=True): """Add a service to the list of tracked services. Args: name (string): A unique short service name for the service long_name (string): A longer, user friendly name for the service preregistered (bool): Whether this service is an expected preregistered service. notify (bool): Send notifications about this service to all clients Returns: awaitable: If notify is True, an awaitable for the notifications. Otherwise None. """ if name in self.services: raise ArgumentError("Could not add service because the long_name is taken", long_name=long_name) serv_state = states.ServiceState(name, long_name, preregistered) service = { 'state': serv_state, 'heartbeat_threshold': 600 } self.services[name] = service if notify: return self._notify_update(name, 'new_service', self.service_info(name)) return None
python
def add_service(self, name, long_name, preregistered=False, notify=True): """Add a service to the list of tracked services. Args: name (string): A unique short service name for the service long_name (string): A longer, user friendly name for the service preregistered (bool): Whether this service is an expected preregistered service. notify (bool): Send notifications about this service to all clients Returns: awaitable: If notify is True, an awaitable for the notifications. Otherwise None. """ if name in self.services: raise ArgumentError("Could not add service because the long_name is taken", long_name=long_name) serv_state = states.ServiceState(name, long_name, preregistered) service = { 'state': serv_state, 'heartbeat_threshold': 600 } self.services[name] = service if notify: return self._notify_update(name, 'new_service', self.service_info(name)) return None
[ "def", "add_service", "(", "self", ",", "name", ",", "long_name", ",", "preregistered", "=", "False", ",", "notify", "=", "True", ")", ":", "if", "name", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Could not add service because the long_name is taken\"", ",", "long_name", "=", "long_name", ")", "serv_state", "=", "states", ".", "ServiceState", "(", "name", ",", "long_name", ",", "preregistered", ")", "service", "=", "{", "'state'", ":", "serv_state", ",", "'heartbeat_threshold'", ":", "600", "}", "self", ".", "services", "[", "name", "]", "=", "service", "if", "notify", ":", "return", "self", ".", "_notify_update", "(", "name", ",", "'new_service'", ",", "self", ".", "service_info", "(", "name", ")", ")", "return", "None" ]
Add a service to the list of tracked services. Args: name (string): A unique short service name for the service long_name (string): A longer, user friendly name for the service preregistered (bool): Whether this service is an expected preregistered service. notify (bool): Send notifications about this service to all clients Returns: awaitable: If notify is True, an awaitable for the notifications. Otherwise None.
[ "Add", "a", "service", "to", "the", "list", "of", "tracked", "services", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L103-L134
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.service_info
def service_info(self, short_name): """Get static information about a service. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the long_name and preregistered info on this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) info = {} info['short_name'] = short_name info['long_name'] = self.services[short_name]['state'].long_name info['preregistered'] = self.services[short_name]['state'].preregistered return info
python
def service_info(self, short_name): """Get static information about a service. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the long_name and preregistered info on this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) info = {} info['short_name'] = short_name info['long_name'] = self.services[short_name]['state'].long_name info['preregistered'] = self.services[short_name]['state'].preregistered return info
[ "def", "service_info", "(", "self", ",", "short_name", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "info", "=", "{", "}", "info", "[", "'short_name'", "]", "=", "short_name", "info", "[", "'long_name'", "]", "=", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", ".", "long_name", "info", "[", "'preregistered'", "]", "=", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", ".", "preregistered", "return", "info" ]
Get static information about a service. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the long_name and preregistered info on this service.
[ "Get", "static", "information", "about", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L136-L155
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.service_messages
def service_messages(self, short_name): """Get the messages stored for a service. Args: short_name (string): The short name of the service to get messages for Returns: list(ServiceMessage): A list of the ServiceMessages stored for this service """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) return list(self.services[short_name]['state'].messages)
python
def service_messages(self, short_name): """Get the messages stored for a service. Args: short_name (string): The short name of the service to get messages for Returns: list(ServiceMessage): A list of the ServiceMessages stored for this service """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) return list(self.services[short_name]['state'].messages)
[ "def", "service_messages", "(", "self", ",", "short_name", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "return", "list", "(", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", ".", "messages", ")" ]
Get the messages stored for a service. Args: short_name (string): The short name of the service to get messages for Returns: list(ServiceMessage): A list of the ServiceMessages stored for this service
[ "Get", "the", "messages", "stored", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L157-L170
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.service_headline
def service_headline(self, short_name): """Get the headline stored for a service. Args: short_name (string): The short name of the service to get messages for Returns: ServiceMessage: the headline or None if there is no headline """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) return self.services[short_name]['state'].headline
python
def service_headline(self, short_name): """Get the headline stored for a service. Args: short_name (string): The short name of the service to get messages for Returns: ServiceMessage: the headline or None if there is no headline """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) return self.services[short_name]['state'].headline
[ "def", "service_headline", "(", "self", ",", "short_name", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "return", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", ".", "headline" ]
Get the headline stored for a service. Args: short_name (string): The short name of the service to get messages for Returns: ServiceMessage: the headline or None if there is no headline
[ "Get", "the", "headline", "stored", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L172-L185
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.service_status
def service_status(self, short_name): """Get the current status of a service. Returns information about the service such as the length since the last heartbeat, any status messages that have been posted about the service and whether the heartbeat should be considered out of the ordinary. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the status of the service """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) info = {} service = self.services[short_name]['state'] info['heartbeat_age'] = monotonic() - service.last_heartbeat info['numeric_status'] = service.state info['string_status'] = service.string_state return info
python
def service_status(self, short_name): """Get the current status of a service. Returns information about the service such as the length since the last heartbeat, any status messages that have been posted about the service and whether the heartbeat should be considered out of the ordinary. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the status of the service """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) info = {} service = self.services[short_name]['state'] info['heartbeat_age'] = monotonic() - service.last_heartbeat info['numeric_status'] = service.state info['string_status'] = service.string_state return info
[ "def", "service_status", "(", "self", ",", "short_name", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "info", "=", "{", "}", "service", "=", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", "info", "[", "'heartbeat_age'", "]", "=", "monotonic", "(", ")", "-", "service", ".", "last_heartbeat", "info", "[", "'numeric_status'", "]", "=", "service", ".", "state", "info", "[", "'string_status'", "]", "=", "service", ".", "string_state", "return", "info" ]
Get the current status of a service. Returns information about the service such as the length since the last heartbeat, any status messages that have been posted about the service and whether the heartbeat should be considered out of the ordinary. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the status of the service
[ "Get", "the", "current", "status", "of", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L187-L212
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.send_message
async def send_message(self, name, level, message): """Post a message for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents """ if name not in self.services: raise ArgumentError("Unknown service name", short_name=name) msg = self.services[name]['state'].post_message(level, message) await self._notify_update(name, 'new_message', msg.to_dict())
python
async def send_message(self, name, level, message): """Post a message for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents """ if name not in self.services: raise ArgumentError("Unknown service name", short_name=name) msg = self.services[name]['state'].post_message(level, message) await self._notify_update(name, 'new_message', msg.to_dict())
[ "async", "def", "send_message", "(", "self", ",", "name", ",", "level", ",", "message", ")", ":", "if", "name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "name", ")", "msg", "=", "self", ".", "services", "[", "name", "]", "[", "'state'", "]", ".", "post_message", "(", "level", ",", "message", ")", "await", "self", ".", "_notify_update", "(", "name", ",", "'new_message'", ",", "msg", ".", "to_dict", "(", ")", ")" ]
Post a message for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents
[ "Post", "a", "message", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L214-L227
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.set_headline
async def set_headline(self, name, level, message): """Set the sticky headline for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents """ if name not in self.services: raise ArgumentError("Unknown service name", short_name=name) self.services[name]['state'].set_headline(level, message) headline = self.services[name]['state'].headline.to_dict() await self._notify_update(name, 'new_headline', headline)
python
async def set_headline(self, name, level, message): """Set the sticky headline for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents """ if name not in self.services: raise ArgumentError("Unknown service name", short_name=name) self.services[name]['state'].set_headline(level, message) headline = self.services[name]['state'].headline.to_dict() await self._notify_update(name, 'new_headline', headline)
[ "async", "def", "set_headline", "(", "self", ",", "name", ",", "level", ",", "message", ")", ":", "if", "name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "name", ")", "self", ".", "services", "[", "name", "]", "[", "'state'", "]", ".", "set_headline", "(", "level", ",", "message", ")", "headline", "=", "self", ".", "services", "[", "name", "]", "[", "'state'", "]", ".", "headline", ".", "to_dict", "(", ")", "await", "self", ".", "_notify_update", "(", "name", ",", "'new_headline'", ",", "headline", ")" ]
Set the sticky headline for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents
[ "Set", "the", "sticky", "headline", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L229-L244
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.send_heartbeat
async def send_heartbeat(self, short_name): """Post a heartbeat for a service. Args: short_name (string): The short name of the service to query """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) self.services[short_name]['state'].heartbeat() await self._notify_update(short_name, 'heartbeat')
python
async def send_heartbeat(self, short_name): """Post a heartbeat for a service. Args: short_name (string): The short name of the service to query """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) self.services[short_name]['state'].heartbeat() await self._notify_update(short_name, 'heartbeat')
[ "async", "def", "send_heartbeat", "(", "self", ",", "short_name", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "self", ".", "services", "[", "short_name", "]", "[", "'state'", "]", ".", "heartbeat", "(", ")", "await", "self", ".", "_notify_update", "(", "short_name", ",", "'heartbeat'", ")" ]
Post a heartbeat for a service. Args: short_name (string): The short name of the service to query
[ "Post", "a", "heartbeat", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L246-L257
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.set_agent
def set_agent(self, short_name, client_id): """Register a client id that handlers commands for a service. Args: short_name (str): The name of the service to set an agent for. client_id (str): A globally unique id for the client that should receive commands for this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) self.agents[short_name] = client_id
python
def set_agent(self, short_name, client_id): """Register a client id that handlers commands for a service. Args: short_name (str): The name of the service to set an agent for. client_id (str): A globally unique id for the client that should receive commands for this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) self.agents[short_name] = client_id
[ "def", "set_agent", "(", "self", ",", "short_name", ",", "client_id", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "self", ".", "agents", "[", "short_name", "]", "=", "client_id" ]
Register a client id that handlers commands for a service. Args: short_name (str): The name of the service to set an agent for. client_id (str): A globally unique id for the client that should receive commands for this service.
[ "Register", "a", "client", "id", "that", "handlers", "commands", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L268-L281
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.clear_agent
def clear_agent(self, short_name, client_id): """Remove a client id from being the command handler for a service. Args: short_name (str): The name of the service to set an agent for. client_id (str): A globally unique id for the client that should no longer receive commands for this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) if short_name not in self.agents: raise ArgumentError("No agent registered for service", short_name=short_name) if client_id != self.agents[short_name]: raise ArgumentError("Client was not registered for service", short_name=short_name, client_id=client_id, current_client=self.agents[short_name]) del self.agents[short_name]
python
def clear_agent(self, short_name, client_id): """Remove a client id from being the command handler for a service. Args: short_name (str): The name of the service to set an agent for. client_id (str): A globally unique id for the client that should no longer receive commands for this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) if short_name not in self.agents: raise ArgumentError("No agent registered for service", short_name=short_name) if client_id != self.agents[short_name]: raise ArgumentError("Client was not registered for service", short_name=short_name, client_id=client_id, current_client=self.agents[short_name]) del self.agents[short_name]
[ "def", "clear_agent", "(", "self", ",", "short_name", ",", "client_id", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "if", "short_name", "not", "in", "self", ".", "agents", ":", "raise", "ArgumentError", "(", "\"No agent registered for service\"", ",", "short_name", "=", "short_name", ")", "if", "client_id", "!=", "self", ".", "agents", "[", "short_name", "]", ":", "raise", "ArgumentError", "(", "\"Client was not registered for service\"", ",", "short_name", "=", "short_name", ",", "client_id", "=", "client_id", ",", "current_client", "=", "self", ".", "agents", "[", "short_name", "]", ")", "del", "self", ".", "agents", "[", "short_name", "]" ]
Remove a client id from being the command handler for a service. Args: short_name (str): The name of the service to set an agent for. client_id (str): A globally unique id for the client that should no longer receive commands for this service.
[ "Remove", "a", "client", "id", "from", "being", "the", "command", "handler", "for", "a", "service", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L283-L303
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.send_rpc_command
async def send_rpc_command(self, short_name, rpc_id, payload, sender_client, timeout=1.0): """Send an RPC to a service using its registered agent. Args: short_name (str): The name of the service we would like to send and RPC to rpc_id (int): The rpc id that we would like to call payload (bytes): The raw bytes that we would like to send as an argument sender_client (str): The uuid of the sending client timeout (float): The maximum number of seconds before we signal a timeout of the RPC Returns: str: A unique id that can used to identify the notified response of this RPC. """ rpc_tag = str(uuid.uuid4()) self.rpc_results.declare(rpc_tag) if short_name in self.services and short_name in self.agents: agent_tag = self.agents[short_name] rpc_message = { 'rpc_id': rpc_id, 'payload': payload, 'response_uuid': rpc_tag } self.in_flight_rpcs[rpc_tag] = InFlightRPC(sender_client, short_name, monotonic(), timeout) await self._notify_update(short_name, 'rpc_command', rpc_message, directed_client=agent_tag) else: response = dict(result='service_not_found', response=b'') self.rpc_results.set(rpc_tag, response) return rpc_tag
python
async def send_rpc_command(self, short_name, rpc_id, payload, sender_client, timeout=1.0): """Send an RPC to a service using its registered agent. Args: short_name (str): The name of the service we would like to send and RPC to rpc_id (int): The rpc id that we would like to call payload (bytes): The raw bytes that we would like to send as an argument sender_client (str): The uuid of the sending client timeout (float): The maximum number of seconds before we signal a timeout of the RPC Returns: str: A unique id that can used to identify the notified response of this RPC. """ rpc_tag = str(uuid.uuid4()) self.rpc_results.declare(rpc_tag) if short_name in self.services and short_name in self.agents: agent_tag = self.agents[short_name] rpc_message = { 'rpc_id': rpc_id, 'payload': payload, 'response_uuid': rpc_tag } self.in_flight_rpcs[rpc_tag] = InFlightRPC(sender_client, short_name, monotonic(), timeout) await self._notify_update(short_name, 'rpc_command', rpc_message, directed_client=agent_tag) else: response = dict(result='service_not_found', response=b'') self.rpc_results.set(rpc_tag, response) return rpc_tag
[ "async", "def", "send_rpc_command", "(", "self", ",", "short_name", ",", "rpc_id", ",", "payload", ",", "sender_client", ",", "timeout", "=", "1.0", ")", ":", "rpc_tag", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "self", ".", "rpc_results", ".", "declare", "(", "rpc_tag", ")", "if", "short_name", "in", "self", ".", "services", "and", "short_name", "in", "self", ".", "agents", ":", "agent_tag", "=", "self", ".", "agents", "[", "short_name", "]", "rpc_message", "=", "{", "'rpc_id'", ":", "rpc_id", ",", "'payload'", ":", "payload", ",", "'response_uuid'", ":", "rpc_tag", "}", "self", ".", "in_flight_rpcs", "[", "rpc_tag", "]", "=", "InFlightRPC", "(", "sender_client", ",", "short_name", ",", "monotonic", "(", ")", ",", "timeout", ")", "await", "self", ".", "_notify_update", "(", "short_name", ",", "'rpc_command'", ",", "rpc_message", ",", "directed_client", "=", "agent_tag", ")", "else", ":", "response", "=", "dict", "(", "result", "=", "'service_not_found'", ",", "response", "=", "b''", ")", "self", ".", "rpc_results", ".", "set", "(", "rpc_tag", ",", "response", ")", "return", "rpc_tag" ]
Send an RPC to a service using its registered agent. Args: short_name (str): The name of the service we would like to send and RPC to rpc_id (int): The rpc id that we would like to call payload (bytes): The raw bytes that we would like to send as an argument sender_client (str): The uuid of the sending client timeout (float): The maximum number of seconds before we signal a timeout of the RPC Returns: str: A unique id that can used to identify the notified response of this RPC.
[ "Send", "an", "RPC", "to", "a", "service", "using", "its", "registered", "agent", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L305-L343
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.send_rpc_response
def send_rpc_response(self, rpc_tag, result, response): """Send a response to an RPC. Args: rpc_tag (str): The exact string given in a previous call to send_rpc_command result (str): The result of the operation. The possible values of response are: service_not_found, rpc_not_found, timeout, success, invalid_response, invalid_arguments, execution_exception response (bytes): The raw bytes that we should send back as a response. """ if rpc_tag not in self.in_flight_rpcs: raise ArgumentError("In flight RPC could not be found, it may have timed out", rpc_tag=rpc_tag) del self.in_flight_rpcs[rpc_tag] response_message = { 'response': response, 'result': result } try: self.rpc_results.set(rpc_tag, response_message) except KeyError: self._logger.warning("RPC response came but no one was waiting: response=%s", response)
python
def send_rpc_response(self, rpc_tag, result, response): """Send a response to an RPC. Args: rpc_tag (str): The exact string given in a previous call to send_rpc_command result (str): The result of the operation. The possible values of response are: service_not_found, rpc_not_found, timeout, success, invalid_response, invalid_arguments, execution_exception response (bytes): The raw bytes that we should send back as a response. """ if rpc_tag not in self.in_flight_rpcs: raise ArgumentError("In flight RPC could not be found, it may have timed out", rpc_tag=rpc_tag) del self.in_flight_rpcs[rpc_tag] response_message = { 'response': response, 'result': result } try: self.rpc_results.set(rpc_tag, response_message) except KeyError: self._logger.warning("RPC response came but no one was waiting: response=%s", response)
[ "def", "send_rpc_response", "(", "self", ",", "rpc_tag", ",", "result", ",", "response", ")", ":", "if", "rpc_tag", "not", "in", "self", ".", "in_flight_rpcs", ":", "raise", "ArgumentError", "(", "\"In flight RPC could not be found, it may have timed out\"", ",", "rpc_tag", "=", "rpc_tag", ")", "del", "self", ".", "in_flight_rpcs", "[", "rpc_tag", "]", "response_message", "=", "{", "'response'", ":", "response", ",", "'result'", ":", "result", "}", "try", ":", "self", ".", "rpc_results", ".", "set", "(", "rpc_tag", ",", "response_message", ")", "except", "KeyError", ":", "self", ".", "_logger", ".", "warning", "(", "\"RPC response came but no one was waiting: response=%s\"", ",", "response", ")" ]
Send a response to an RPC. Args: rpc_tag (str): The exact string given in a previous call to send_rpc_command result (str): The result of the operation. The possible values of response are: service_not_found, rpc_not_found, timeout, success, invalid_response, invalid_arguments, execution_exception response (bytes): The raw bytes that we should send back as a response.
[ "Send", "a", "response", "to", "an", "RPC", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L345-L369
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/service_manager.py
ServiceManager.periodic_service_rpcs
def periodic_service_rpcs(self): """Check if any RPC has expired and remove it from the in flight list. This function should be called periodically to expire any RPCs that never complete. """ to_remove = [] now = monotonic() for rpc_tag, rpc in self.in_flight_rpcs.items(): expiry = rpc.sent_timestamp + rpc.timeout if now > expiry: to_remove.append(rpc_tag) for tag in to_remove: del self.in_flight_rpcs[tag]
python
def periodic_service_rpcs(self): """Check if any RPC has expired and remove it from the in flight list. This function should be called periodically to expire any RPCs that never complete. """ to_remove = [] now = monotonic() for rpc_tag, rpc in self.in_flight_rpcs.items(): expiry = rpc.sent_timestamp + rpc.timeout if now > expiry: to_remove.append(rpc_tag) for tag in to_remove: del self.in_flight_rpcs[tag]
[ "def", "periodic_service_rpcs", "(", "self", ")", ":", "to_remove", "=", "[", "]", "now", "=", "monotonic", "(", ")", "for", "rpc_tag", ",", "rpc", "in", "self", ".", "in_flight_rpcs", ".", "items", "(", ")", ":", "expiry", "=", "rpc", ".", "sent_timestamp", "+", "rpc", ".", "timeout", "if", "now", ">", "expiry", ":", "to_remove", ".", "append", "(", "rpc_tag", ")", "for", "tag", "in", "to_remove", ":", "del", "self", ".", "in_flight_rpcs", "[", "tag", "]" ]
Check if any RPC has expired and remove it from the in flight list. This function should be called periodically to expire any RPCs that never complete.
[ "Check", "if", "any", "RPC", "has", "expired", "and", "remove", "it", "from", "the", "in", "flight", "list", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/service_manager.py#L371-L386
train
iotile/coretools
iotilecore/iotile/core/utilities/paths.py
settings_directory
def settings_directory(): """Find a per user settings directory that is appropriate for each type of system that we are installed on. """ system = platform.system() basedir = None if system == 'Windows': if 'APPDATA' in os.environ: basedir = os.environ['APPDATA'] # If we're not on Windows assume we're on some # kind of posix system or Mac, where the appropriate place would be # ~/.config if basedir is None: basedir = os.path.expanduser('~') basedir = os.path.join(basedir, '.config') settings_dir = os.path.abspath(os.path.join(basedir, 'IOTile-Core')) return settings_dir
python
def settings_directory(): """Find a per user settings directory that is appropriate for each type of system that we are installed on. """ system = platform.system() basedir = None if system == 'Windows': if 'APPDATA' in os.environ: basedir = os.environ['APPDATA'] # If we're not on Windows assume we're on some # kind of posix system or Mac, where the appropriate place would be # ~/.config if basedir is None: basedir = os.path.expanduser('~') basedir = os.path.join(basedir, '.config') settings_dir = os.path.abspath(os.path.join(basedir, 'IOTile-Core')) return settings_dir
[ "def", "settings_directory", "(", ")", ":", "system", "=", "platform", ".", "system", "(", ")", "basedir", "=", "None", "if", "system", "==", "'Windows'", ":", "if", "'APPDATA'", "in", "os", ".", "environ", ":", "basedir", "=", "os", ".", "environ", "[", "'APPDATA'", "]", "# If we're not on Windows assume we're on some", "# kind of posix system or Mac, where the appropriate place would be", "# ~/.config", "if", "basedir", "is", "None", ":", "basedir", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "basedir", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "'.config'", ")", "settings_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "basedir", ",", "'IOTile-Core'", ")", ")", "return", "settings_dir" ]
Find a per user settings directory that is appropriate for each type of system that we are installed on.
[ "Find", "a", "per", "user", "settings", "directory", "that", "is", "appropriate", "for", "each", "type", "of", "system", "that", "we", "are", "installed", "on", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/paths.py#L14-L35
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gs.py
generate
def generate(env): """Add Builders and construction variables for Ghostscript to an Environment.""" global GhostscriptAction # The following try-except block enables us to use the Tool # in standalone mode (without the accompanying pdf.py), # whenever we need an explicit call of gs via the Gs() # Builder ... try: if GhostscriptAction is None: GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR') from SCons.Tool import pdf pdf.generate(env) bld = env['BUILDERS']['PDF'] bld.add_action('.ps', GhostscriptAction) except ImportError as e: pass gsbuilder = SCons.Builder.Builder(action = SCons.Action.Action('$GSCOM', '$GSCOMSTR')) env['BUILDERS']['Gs'] = gsbuilder env['GS'] = gs env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite') env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES'
python
def generate(env): """Add Builders and construction variables for Ghostscript to an Environment.""" global GhostscriptAction # The following try-except block enables us to use the Tool # in standalone mode (without the accompanying pdf.py), # whenever we need an explicit call of gs via the Gs() # Builder ... try: if GhostscriptAction is None: GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR') from SCons.Tool import pdf pdf.generate(env) bld = env['BUILDERS']['PDF'] bld.add_action('.ps', GhostscriptAction) except ImportError as e: pass gsbuilder = SCons.Builder.Builder(action = SCons.Action.Action('$GSCOM', '$GSCOMSTR')) env['BUILDERS']['Gs'] = gsbuilder env['GS'] = gs env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite') env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES'
[ "def", "generate", "(", "env", ")", ":", "global", "GhostscriptAction", "# The following try-except block enables us to use the Tool", "# in standalone mode (without the accompanying pdf.py),", "# whenever we need an explicit call of gs via the Gs()", "# Builder ...", "try", ":", "if", "GhostscriptAction", "is", "None", ":", "GhostscriptAction", "=", "SCons", ".", "Action", ".", "Action", "(", "'$GSCOM'", ",", "'$GSCOMSTR'", ")", "from", "SCons", ".", "Tool", "import", "pdf", "pdf", ".", "generate", "(", "env", ")", "bld", "=", "env", "[", "'BUILDERS'", "]", "[", "'PDF'", "]", "bld", ".", "add_action", "(", "'.ps'", ",", "GhostscriptAction", ")", "except", "ImportError", "as", "e", ":", "pass", "gsbuilder", "=", "SCons", ".", "Builder", ".", "Builder", "(", "action", "=", "SCons", ".", "Action", ".", "Action", "(", "'$GSCOM'", ",", "'$GSCOMSTR'", ")", ")", "env", "[", "'BUILDERS'", "]", "[", "'Gs'", "]", "=", "gsbuilder", "env", "[", "'GS'", "]", "=", "gs", "env", "[", "'GSFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'-dNOPAUSE -dBATCH -sDEVICE=pdfwrite'", ")", "env", "[", "'GSCOM'", "]", "=", "'$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES'" ]
Add Builders and construction variables for Ghostscript to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "Ghostscript", "to", "an", "Environment", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gs.py#L53-L78
train
iotile/coretools
iotilebuild/iotile/build/utilities/bundled_data.py
resource_path
def resource_path(relative_path=None, expect=None): """Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given path. Args: relative_path (str): The relative_path from the config folder to the resource in question. This path can be specified using / characters on all operating systems since it will be normalized before usage. If None is passed, the based config folder will be returned. expect (str): What the path should resolve to, which is checked before returning, raising a DataError if the check fails. You can pass None for no checking, file for checking `os.path.isfile`, or folder for checking `os.path.isdir`. Default: None Returns: str: The normalized absolute path to the resource. """ if expect not in (None, 'file', 'folder'): raise ArgumentError("Invalid expect parameter, must be None, 'file' or 'folder'", expect=expect) this_dir = os.path.dirname(__file__) _resource_path = os.path.join(this_dir, '..', 'config') if relative_path is not None: path = os.path.normpath(relative_path) _resource_path = os.path.join(_resource_path, path) if expect == 'file' and not os.path.isfile(_resource_path): raise DataError("Expected resource %s to be a file and it wasn't" % _resource_path) elif expect == 'folder' and not os.path.isdir(_resource_path): raise DataError("Expected resource %s to be a folder and it wasn't" % _resource_path) return os.path.abspath(_resource_path)
python
def resource_path(relative_path=None, expect=None): """Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given path. Args: relative_path (str): The relative_path from the config folder to the resource in question. This path can be specified using / characters on all operating systems since it will be normalized before usage. If None is passed, the based config folder will be returned. expect (str): What the path should resolve to, which is checked before returning, raising a DataError if the check fails. You can pass None for no checking, file for checking `os.path.isfile`, or folder for checking `os.path.isdir`. Default: None Returns: str: The normalized absolute path to the resource. """ if expect not in (None, 'file', 'folder'): raise ArgumentError("Invalid expect parameter, must be None, 'file' or 'folder'", expect=expect) this_dir = os.path.dirname(__file__) _resource_path = os.path.join(this_dir, '..', 'config') if relative_path is not None: path = os.path.normpath(relative_path) _resource_path = os.path.join(_resource_path, path) if expect == 'file' and not os.path.isfile(_resource_path): raise DataError("Expected resource %s to be a file and it wasn't" % _resource_path) elif expect == 'folder' and not os.path.isdir(_resource_path): raise DataError("Expected resource %s to be a folder and it wasn't" % _resource_path) return os.path.abspath(_resource_path)
[ "def", "resource_path", "(", "relative_path", "=", "None", ",", "expect", "=", "None", ")", ":", "if", "expect", "not", "in", "(", "None", ",", "'file'", ",", "'folder'", ")", ":", "raise", "ArgumentError", "(", "\"Invalid expect parameter, must be None, 'file' or 'folder'\"", ",", "expect", "=", "expect", ")", "this_dir", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "_resource_path", "=", "os", ".", "path", ".", "join", "(", "this_dir", ",", "'..'", ",", "'config'", ")", "if", "relative_path", "is", "not", "None", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "relative_path", ")", "_resource_path", "=", "os", ".", "path", ".", "join", "(", "_resource_path", ",", "path", ")", "if", "expect", "==", "'file'", "and", "not", "os", ".", "path", ".", "isfile", "(", "_resource_path", ")", ":", "raise", "DataError", "(", "\"Expected resource %s to be a file and it wasn't\"", "%", "_resource_path", ")", "elif", "expect", "==", "'folder'", "and", "not", "os", ".", "path", ".", "isdir", "(", "_resource_path", ")", ":", "raise", "DataError", "(", "\"Expected resource %s to be a folder and it wasn't\"", "%", "_resource_path", ")", "return", "os", ".", "path", ".", "abspath", "(", "_resource_path", ")" ]
Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given path. Args: relative_path (str): The relative_path from the config folder to the resource in question. This path can be specified using / characters on all operating systems since it will be normalized before usage. If None is passed, the based config folder will be returned. expect (str): What the path should resolve to, which is checked before returning, raising a DataError if the check fails. You can pass None for no checking, file for checking `os.path.isfile`, or folder for checking `os.path.isdir`. Default: None Returns: str: The normalized absolute path to the resource.
[ "Return", "the", "absolute", "path", "to", "a", "resource", "in", "iotile", "-", "build", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/utilities/bundled_data.py#L22-L65
train
iotile/coretools
iotilecore/iotile/core/utilities/packed.py
unpack
def unpack(fmt, arg): """A shim around struct.unpack to allow it to work on python 2.7.3.""" if isinstance(arg, bytearray) and not (sys.version_info >= (2, 7, 5)): return struct.unpack(fmt, str(arg)) return struct.unpack(fmt, arg)
python
def unpack(fmt, arg): """A shim around struct.unpack to allow it to work on python 2.7.3.""" if isinstance(arg, bytearray) and not (sys.version_info >= (2, 7, 5)): return struct.unpack(fmt, str(arg)) return struct.unpack(fmt, arg)
[ "def", "unpack", "(", "fmt", ",", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "bytearray", ")", "and", "not", "(", "sys", ".", "version_info", ">=", "(", "2", ",", "7", ",", "5", ")", ")", ":", "return", "struct", ".", "unpack", "(", "fmt", ",", "str", "(", "arg", ")", ")", "return", "struct", ".", "unpack", "(", "fmt", ",", "arg", ")" ]
A shim around struct.unpack to allow it to work on python 2.7.3.
[ "A", "shim", "around", "struct", ".", "unpack", "to", "allow", "it", "to", "work", "on", "python", "2", ".", "7", ".", "3", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/packed.py#L5-L11
train
iotile/coretools
iotileemulate/iotile/emulate/reference/controller_features/controller_system.py
ControllerSubsystemBase.initialize
async def initialize(self, timeout=2.0): """Launch any background tasks associated with this subsystem. This method will synchronously await self.initialized() which makes sure that the background tasks start up correctly. """ if self.initialized.is_set(): raise InternalError("initialize called when already initialized") self._emulator.add_task(8, self._reset_vector()) await asyncio.wait_for(self.initialized.wait(), timeout=timeout)
python
async def initialize(self, timeout=2.0): """Launch any background tasks associated with this subsystem. This method will synchronously await self.initialized() which makes sure that the background tasks start up correctly. """ if self.initialized.is_set(): raise InternalError("initialize called when already initialized") self._emulator.add_task(8, self._reset_vector()) await asyncio.wait_for(self.initialized.wait(), timeout=timeout)
[ "async", "def", "initialize", "(", "self", ",", "timeout", "=", "2.0", ")", ":", "if", "self", ".", "initialized", ".", "is_set", "(", ")", ":", "raise", "InternalError", "(", "\"initialize called when already initialized\"", ")", "self", ".", "_emulator", ".", "add_task", "(", "8", ",", "self", ".", "_reset_vector", "(", ")", ")", "await", "asyncio", ".", "wait_for", "(", "self", ".", "initialized", ".", "wait", "(", ")", ",", "timeout", "=", "timeout", ")" ]
Launch any background tasks associated with this subsystem. This method will synchronously await self.initialized() which makes sure that the background tasks start up correctly.
[ "Launch", "any", "background", "tasks", "associated", "with", "this", "subsystem", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/reference/controller_features/controller_system.py#L45-L57
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
_check_registry_type
def _check_registry_type(folder=None): """Check if the user has placed a registry_type.txt file to choose the registry type If a default registry type file is found, the DefaultBackingType and DefaultBackingFile class parameters in ComponentRegistry are updated accordingly. Args: folder (string): The folder that we should check for a default registry type """ folder = _registry_folder(folder) default_file = os.path.join(folder, 'registry_type.txt') try: with open(default_file, "r") as infile: data = infile.read() data = data.strip() ComponentRegistry.SetBackingStore(data) except IOError: pass
python
def _check_registry_type(folder=None): """Check if the user has placed a registry_type.txt file to choose the registry type If a default registry type file is found, the DefaultBackingType and DefaultBackingFile class parameters in ComponentRegistry are updated accordingly. Args: folder (string): The folder that we should check for a default registry type """ folder = _registry_folder(folder) default_file = os.path.join(folder, 'registry_type.txt') try: with open(default_file, "r") as infile: data = infile.read() data = data.strip() ComponentRegistry.SetBackingStore(data) except IOError: pass
[ "def", "_check_registry_type", "(", "folder", "=", "None", ")", ":", "folder", "=", "_registry_folder", "(", "folder", ")", "default_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'registry_type.txt'", ")", "try", ":", "with", "open", "(", "default_file", ",", "\"r\"", ")", "as", "infile", ":", "data", "=", "infile", ".", "read", "(", ")", "data", "=", "data", ".", "strip", "(", ")", "ComponentRegistry", ".", "SetBackingStore", "(", "data", ")", "except", "IOError", ":", "pass" ]
Check if the user has placed a registry_type.txt file to choose the registry type If a default registry type file is found, the DefaultBackingType and DefaultBackingFile class parameters in ComponentRegistry are updated accordingly. Args: folder (string): The folder that we should check for a default registry type
[ "Check", "if", "the", "user", "has", "placed", "a", "registry_type", ".", "txt", "file", "to", "choose", "the", "registry", "type" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L582-L603
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
_ensure_package_loaded
def _ensure_package_loaded(path, component): """Ensure that the given module is loaded as a submodule. Returns: str: The name that the module should be imported as. """ logger = logging.getLogger(__name__) packages = component.find_products('support_package') if len(packages) == 0: return None elif len(packages) > 1: raise ExternalError("Component had multiple products declared as 'support_package", products=packages) if len(path) > 2 and ':' in path[2:]: # Don't flag windows C: type paths path, _, _ = path.rpartition(":") package_base = packages[0] relative_path = os.path.normpath(os.path.relpath(path, start=package_base)) if relative_path.startswith('..'): raise ExternalError("Component had python product output of support_package", package=package_base, product=path, relative_path=relative_path) if not relative_path.endswith('.py'): raise ExternalError("Python product did not end with .py", path=path) relative_path = relative_path[:-3] if os.pathsep in relative_path: raise ExternalError("Python support wheels with multiple subpackages not yet supported", relative_path=relative_path) support_distro = component.support_distribution if support_distro not in sys.modules: logger.debug("Creating dynamic support wheel package: %s", support_distro) file, path, desc = imp.find_module(os.path.basename(package_base), [os.path.dirname(package_base)]) imp.load_module(support_distro, file, path, desc) return "{}.{}".format(support_distro, relative_path)
python
def _ensure_package_loaded(path, component): """Ensure that the given module is loaded as a submodule. Returns: str: The name that the module should be imported as. """ logger = logging.getLogger(__name__) packages = component.find_products('support_package') if len(packages) == 0: return None elif len(packages) > 1: raise ExternalError("Component had multiple products declared as 'support_package", products=packages) if len(path) > 2 and ':' in path[2:]: # Don't flag windows C: type paths path, _, _ = path.rpartition(":") package_base = packages[0] relative_path = os.path.normpath(os.path.relpath(path, start=package_base)) if relative_path.startswith('..'): raise ExternalError("Component had python product output of support_package", package=package_base, product=path, relative_path=relative_path) if not relative_path.endswith('.py'): raise ExternalError("Python product did not end with .py", path=path) relative_path = relative_path[:-3] if os.pathsep in relative_path: raise ExternalError("Python support wheels with multiple subpackages not yet supported", relative_path=relative_path) support_distro = component.support_distribution if support_distro not in sys.modules: logger.debug("Creating dynamic support wheel package: %s", support_distro) file, path, desc = imp.find_module(os.path.basename(package_base), [os.path.dirname(package_base)]) imp.load_module(support_distro, file, path, desc) return "{}.{}".format(support_distro, relative_path)
[ "def", "_ensure_package_loaded", "(", "path", ",", "component", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "packages", "=", "component", ".", "find_products", "(", "'support_package'", ")", "if", "len", "(", "packages", ")", "==", "0", ":", "return", "None", "elif", "len", "(", "packages", ")", ">", "1", ":", "raise", "ExternalError", "(", "\"Component had multiple products declared as 'support_package\"", ",", "products", "=", "packages", ")", "if", "len", "(", "path", ")", ">", "2", "and", "':'", "in", "path", "[", "2", ":", "]", ":", "# Don't flag windows C: type paths", "path", ",", "_", ",", "_", "=", "path", ".", "rpartition", "(", "\":\"", ")", "package_base", "=", "packages", "[", "0", "]", "relative_path", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "relpath", "(", "path", ",", "start", "=", "package_base", ")", ")", "if", "relative_path", ".", "startswith", "(", "'..'", ")", ":", "raise", "ExternalError", "(", "\"Component had python product output of support_package\"", ",", "package", "=", "package_base", ",", "product", "=", "path", ",", "relative_path", "=", "relative_path", ")", "if", "not", "relative_path", ".", "endswith", "(", "'.py'", ")", ":", "raise", "ExternalError", "(", "\"Python product did not end with .py\"", ",", "path", "=", "path", ")", "relative_path", "=", "relative_path", "[", ":", "-", "3", "]", "if", "os", ".", "pathsep", "in", "relative_path", ":", "raise", "ExternalError", "(", "\"Python support wheels with multiple subpackages not yet supported\"", ",", "relative_path", "=", "relative_path", ")", "support_distro", "=", "component", ".", "support_distribution", "if", "support_distro", "not", "in", "sys", ".", "modules", ":", "logger", ".", "debug", "(", "\"Creating dynamic support wheel package: %s\"", ",", "support_distro", ")", "file", ",", "path", ",", "desc", "=", "imp", ".", "find_module", "(", "os", ".", "path", ".", "basename", "(", "package_base", ")", ",", "[", "os", ".", "path", ".", "dirname", "(", "package_base", ")", "]", ")", "imp", ".", "load_module", "(", "support_distro", ",", "file", ",", "path", ",", "desc", ")", "return", "\"{}.{}\"", ".", "format", "(", "support_distro", ",", "relative_path", ")" ]
Ensure that the given module is loaded as a submodule. Returns: str: The name that the module should be imported as.
[ "Ensure", "that", "the", "given", "module", "is", "loaded", "as", "a", "submodule", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L606-L644
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
_try_load_module
def _try_load_module(path, import_name=None): """Try to programmatically load a python module by path. Path should point to a python file (optionally without the .py) at the end. If it ends in a :<name> then name must point to an object defined in the module, which is returned instead of the module itself. Args: path (str): The path of the module to load import_name (str): The explicity name that the module should be given. If not specified, this defaults to being the basename() of path. However, if the module is inside of a support package, you should pass the correct name so that relative imports proceed correctly. Returns: str, object: The basename of the module loaded and the requested object. """ logger = logging.getLogger(__name__) obj_name = None if len(path) > 2 and ':' in path[2:]: # Don't flag windows C: type paths path, _, obj_name = path.rpartition(":") folder, basename = os.path.split(path) if folder == '': folder = './' if basename == '' or not os.path.exists(path): raise ArgumentError("Could not find python module to load extension", path=path) basename, ext = os.path.splitext(basename) if ext not in (".py", ".pyc", ""): raise ArgumentError("Attempted to load module is not a python package or module (.py or .pyc)", path=path) if import_name is None: import_name = basename else: logger.debug("Importing module as subpackage: %s", import_name) try: fileobj = None fileobj, pathname, description = imp.find_module(basename, [folder]) # Don't load modules twice if basename in sys.modules: mod = sys.modules[basename] else: mod = imp.load_module(import_name, fileobj, pathname, description) if obj_name is not None: if obj_name not in mod.__dict__: raise ArgumentError("Cannot find named object '%s' inside module '%s'" % (obj_name, basename), path=path) mod = mod.__dict__[obj_name] return basename, mod finally: if fileobj is not None: fileobj.close()
python
def _try_load_module(path, import_name=None): """Try to programmatically load a python module by path. Path should point to a python file (optionally without the .py) at the end. If it ends in a :<name> then name must point to an object defined in the module, which is returned instead of the module itself. Args: path (str): The path of the module to load import_name (str): The explicity name that the module should be given. If not specified, this defaults to being the basename() of path. However, if the module is inside of a support package, you should pass the correct name so that relative imports proceed correctly. Returns: str, object: The basename of the module loaded and the requested object. """ logger = logging.getLogger(__name__) obj_name = None if len(path) > 2 and ':' in path[2:]: # Don't flag windows C: type paths path, _, obj_name = path.rpartition(":") folder, basename = os.path.split(path) if folder == '': folder = './' if basename == '' or not os.path.exists(path): raise ArgumentError("Could not find python module to load extension", path=path) basename, ext = os.path.splitext(basename) if ext not in (".py", ".pyc", ""): raise ArgumentError("Attempted to load module is not a python package or module (.py or .pyc)", path=path) if import_name is None: import_name = basename else: logger.debug("Importing module as subpackage: %s", import_name) try: fileobj = None fileobj, pathname, description = imp.find_module(basename, [folder]) # Don't load modules twice if basename in sys.modules: mod = sys.modules[basename] else: mod = imp.load_module(import_name, fileobj, pathname, description) if obj_name is not None: if obj_name not in mod.__dict__: raise ArgumentError("Cannot find named object '%s' inside module '%s'" % (obj_name, basename), path=path) mod = mod.__dict__[obj_name] return basename, mod finally: if fileobj is not None: fileobj.close()
[ "def", "_try_load_module", "(", "path", ",", "import_name", "=", "None", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "obj_name", "=", "None", "if", "len", "(", "path", ")", ">", "2", "and", "':'", "in", "path", "[", "2", ":", "]", ":", "# Don't flag windows C: type paths", "path", ",", "_", ",", "obj_name", "=", "path", ".", "rpartition", "(", "\":\"", ")", "folder", ",", "basename", "=", "os", ".", "path", ".", "split", "(", "path", ")", "if", "folder", "==", "''", ":", "folder", "=", "'./'", "if", "basename", "==", "''", "or", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "ArgumentError", "(", "\"Could not find python module to load extension\"", ",", "path", "=", "path", ")", "basename", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "basename", ")", "if", "ext", "not", "in", "(", "\".py\"", ",", "\".pyc\"", ",", "\"\"", ")", ":", "raise", "ArgumentError", "(", "\"Attempted to load module is not a python package or module (.py or .pyc)\"", ",", "path", "=", "path", ")", "if", "import_name", "is", "None", ":", "import_name", "=", "basename", "else", ":", "logger", ".", "debug", "(", "\"Importing module as subpackage: %s\"", ",", "import_name", ")", "try", ":", "fileobj", "=", "None", "fileobj", ",", "pathname", ",", "description", "=", "imp", ".", "find_module", "(", "basename", ",", "[", "folder", "]", ")", "# Don't load modules twice", "if", "basename", "in", "sys", ".", "modules", ":", "mod", "=", "sys", ".", "modules", "[", "basename", "]", "else", ":", "mod", "=", "imp", ".", "load_module", "(", "import_name", ",", "fileobj", ",", "pathname", ",", "description", ")", "if", "obj_name", "is", "not", "None", ":", "if", "obj_name", "not", "in", "mod", ".", "__dict__", ":", "raise", "ArgumentError", "(", "\"Cannot find named object '%s' inside module '%s'\"", "%", "(", "obj_name", ",", "basename", ")", ",", "path", "=", "path", ")", "mod", "=", "mod", ".", "__dict__", "[", "obj_name", "]", "return", "basename", ",", "mod", "finally", ":", "if", "fileobj", "is", "not", "None", ":", "fileobj", ".", "close", "(", ")" ]
Try to programmatically load a python module by path. Path should point to a python file (optionally without the .py) at the end. If it ends in a :<name> then name must point to an object defined in the module, which is returned instead of the module itself. Args: path (str): The path of the module to load import_name (str): The explicity name that the module should be given. If not specified, this defaults to being the basename() of path. However, if the module is inside of a support package, you should pass the correct name so that relative imports proceed correctly. Returns: str, object: The basename of the module loaded and the requested object.
[ "Try", "to", "programmatically", "load", "a", "python", "module", "by", "path", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L647-L707
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.frozen
def frozen(self): """Return whether we have a cached list of all installed entry_points.""" frozen_path = os.path.join(_registry_folder(), 'frozen_extensions.json') return os.path.isfile(frozen_path)
python
def frozen(self): """Return whether we have a cached list of all installed entry_points.""" frozen_path = os.path.join(_registry_folder(), 'frozen_extensions.json') return os.path.isfile(frozen_path)
[ "def", "frozen", "(", "self", ")", ":", "frozen_path", "=", "os", ".", "path", ".", "join", "(", "_registry_folder", "(", ")", ",", "'frozen_extensions.json'", ")", "return", "os", ".", "path", ".", "isfile", "(", "frozen_path", ")" ]
Return whether we have a cached list of all installed entry_points.
[ "Return", "whether", "we", "have", "a", "cached", "list", "of", "all", "installed", "entry_points", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L45-L49
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.kvstore
def kvstore(self): """Lazily load the underlying key-value store backing this registry.""" if self._kvstore is None: self._kvstore = self.BackingType(self.BackingFileName, respect_venv=True) return self._kvstore
python
def kvstore(self): """Lazily load the underlying key-value store backing this registry.""" if self._kvstore is None: self._kvstore = self.BackingType(self.BackingFileName, respect_venv=True) return self._kvstore
[ "def", "kvstore", "(", "self", ")", ":", "if", "self", ".", "_kvstore", "is", "None", ":", "self", ".", "_kvstore", "=", "self", ".", "BackingType", "(", "self", ".", "BackingFileName", ",", "respect_venv", "=", "True", ")", "return", "self", ".", "_kvstore" ]
Lazily load the underlying key-value store backing this registry.
[ "Lazily", "load", "the", "underlying", "key", "-", "value", "store", "backing", "this", "registry", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L52-L58
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.plugins
def plugins(self): """Lazily load iotile plugins only on demand. This is a slow operation on computers with a slow FS and is rarely accessed information, so only compute it when it is actually asked for. """ if self._plugins is None: self._plugins = {} for _, plugin in self.load_extensions('iotile.plugin'): links = plugin() for name, value in links: self._plugins[name] = value return self._plugins
python
def plugins(self): """Lazily load iotile plugins only on demand. This is a slow operation on computers with a slow FS and is rarely accessed information, so only compute it when it is actually asked for. """ if self._plugins is None: self._plugins = {} for _, plugin in self.load_extensions('iotile.plugin'): links = plugin() for name, value in links: self._plugins[name] = value return self._plugins
[ "def", "plugins", "(", "self", ")", ":", "if", "self", ".", "_plugins", "is", "None", ":", "self", ".", "_plugins", "=", "{", "}", "for", "_", ",", "plugin", "in", "self", ".", "load_extensions", "(", "'iotile.plugin'", ")", ":", "links", "=", "plugin", "(", ")", "for", "name", ",", "value", "in", "links", ":", "self", ".", "_plugins", "[", "name", "]", "=", "value", "return", "self", ".", "_plugins" ]
Lazily load iotile plugins only on demand. This is a slow operation on computers with a slow FS and is rarely accessed information, so only compute it when it is actually asked for.
[ "Lazily", "load", "iotile", "plugins", "only", "on", "demand", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L61-L77
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.load_extensions
def load_extensions(self, group, name_filter=None, comp_filter=None, class_filter=None, product_name=None, unique=False): """Dynamically load and return extension objects of a given type. This is the centralized way for all parts of CoreTools to allow plugin behavior. Whenever a plugin is needed, this method should be called to load it. Examples of plugins are proxy modules, emulated tiles, iotile-build autobuilders, etc. Each kind of plugin will typically be a subclass of a certain base class and can be provided one of three ways: 1. It can be registered as an entry point in a pip installed package. The entry point group must map the group passed to load_extensions. 2. It can be listed as a product of an IOTile component that is stored in this ComponentRegistry. The relevant python file inside the component will be imported dynamically as needed. 3. It can be programmatically registered by calling ``register_extension()`` on this class with a string name and an object. This is equivalent to exposing that same object as an entry point with the given name. There is special behavior of this function if class_filter is passed and the object returned by one of the above three methods is a python module. The module will be search for object definitions that match the defined class. The order of the returned objects list is only partially defined. Locally installed components are searched before pip installed packages with entry points. The order of results within each group is not specified. Args: group (str): The extension type that you wish to enumerate. This will be used as the entry_point group for searching pip installed packages. name_filter (str): Only return objects with the given name comp_filter (str): When searching through installed components (not entry_points), only search through components with the given name. class_filter (type or tuple of types): An object that will be passed to instanceof() to verify that all extension objects have the correct types. If not passed, no checking will be done. product_name (str): If this extension can be provided by a registered local component, the name of the product that should be loaded. unique (bool): If True (default is False), there must be exactly one object found inside this extension that matches all of the other criteria. Returns: list of (str, object): A list of the found and loaded extension objects. The string returned with each extension is the name of the entry_point or the base name of the file in the component or the value provided by the call to register_extension depending on how the extension was found. If unique is True, then the list only contains a single entry and that entry will be directly returned. """ found_extensions = [] if product_name is not None: for comp in self.iter_components(): if comp_filter is not None and comp.name != comp_filter: continue products = comp.find_products(product_name) for product in products: try: entries = self.load_extension(product, name_filter=name_filter, class_filter=class_filter, component=comp) if len(entries) == 0 and name_filter is None: # Don't warn if we're filtering by name since most extensions won't match self._logger.warn("Found no valid extensions in product %s of component %s", product, comp.path) continue found_extensions.extend(entries) except: # pylint:disable=bare-except;We don't want a broken extension to take down the whole system self._logger.exception("Unable to load extension %s from local component %s at path %s", product_name, comp, product) for entry in self._iter_entrypoint_group(group): name = entry.name if name_filter is not None and name != name_filter: continue try: ext = entry.load() except: # pylint:disable=bare-except; self._logger.warn("Unable to load %s from %s", entry.name, entry.distro, exc_info=True) continue found_extensions.extend((name, x) for x in self._filter_subclasses(ext, class_filter)) for (name, ext) in self._registered_extensions.get(group, []): if name_filter is not None and name != name_filter: continue found_extensions.extend((name, x) for x in self._filter_subclasses(ext, class_filter)) found_extensions = [(name, x) for name, x in found_extensions if self._filter_nonextensions(x)] if unique is True: if len(found_extensions) > 1: raise ArgumentError("Extension %s should have had exactly one instance of class %s, found %d" % (group, class_filter.__name__, len(found_extensions)), classes=found_extensions) elif len(found_extensions) == 0: raise ArgumentError("Extension %s had no instances of class %s" % (group, class_filter.__name__)) return found_extensions[0] return found_extensions
python
def load_extensions(self, group, name_filter=None, comp_filter=None, class_filter=None, product_name=None, unique=False): """Dynamically load and return extension objects of a given type. This is the centralized way for all parts of CoreTools to allow plugin behavior. Whenever a plugin is needed, this method should be called to load it. Examples of plugins are proxy modules, emulated tiles, iotile-build autobuilders, etc. Each kind of plugin will typically be a subclass of a certain base class and can be provided one of three ways: 1. It can be registered as an entry point in a pip installed package. The entry point group must map the group passed to load_extensions. 2. It can be listed as a product of an IOTile component that is stored in this ComponentRegistry. The relevant python file inside the component will be imported dynamically as needed. 3. It can be programmatically registered by calling ``register_extension()`` on this class with a string name and an object. This is equivalent to exposing that same object as an entry point with the given name. There is special behavior of this function if class_filter is passed and the object returned by one of the above three methods is a python module. The module will be search for object definitions that match the defined class. The order of the returned objects list is only partially defined. Locally installed components are searched before pip installed packages with entry points. The order of results within each group is not specified. Args: group (str): The extension type that you wish to enumerate. This will be used as the entry_point group for searching pip installed packages. name_filter (str): Only return objects with the given name comp_filter (str): When searching through installed components (not entry_points), only search through components with the given name. class_filter (type or tuple of types): An object that will be passed to instanceof() to verify that all extension objects have the correct types. If not passed, no checking will be done. product_name (str): If this extension can be provided by a registered local component, the name of the product that should be loaded. unique (bool): If True (default is False), there must be exactly one object found inside this extension that matches all of the other criteria. Returns: list of (str, object): A list of the found and loaded extension objects. The string returned with each extension is the name of the entry_point or the base name of the file in the component or the value provided by the call to register_extension depending on how the extension was found. If unique is True, then the list only contains a single entry and that entry will be directly returned. """ found_extensions = [] if product_name is not None: for comp in self.iter_components(): if comp_filter is not None and comp.name != comp_filter: continue products = comp.find_products(product_name) for product in products: try: entries = self.load_extension(product, name_filter=name_filter, class_filter=class_filter, component=comp) if len(entries) == 0 and name_filter is None: # Don't warn if we're filtering by name since most extensions won't match self._logger.warn("Found no valid extensions in product %s of component %s", product, comp.path) continue found_extensions.extend(entries) except: # pylint:disable=bare-except;We don't want a broken extension to take down the whole system self._logger.exception("Unable to load extension %s from local component %s at path %s", product_name, comp, product) for entry in self._iter_entrypoint_group(group): name = entry.name if name_filter is not None and name != name_filter: continue try: ext = entry.load() except: # pylint:disable=bare-except; self._logger.warn("Unable to load %s from %s", entry.name, entry.distro, exc_info=True) continue found_extensions.extend((name, x) for x in self._filter_subclasses(ext, class_filter)) for (name, ext) in self._registered_extensions.get(group, []): if name_filter is not None and name != name_filter: continue found_extensions.extend((name, x) for x in self._filter_subclasses(ext, class_filter)) found_extensions = [(name, x) for name, x in found_extensions if self._filter_nonextensions(x)] if unique is True: if len(found_extensions) > 1: raise ArgumentError("Extension %s should have had exactly one instance of class %s, found %d" % (group, class_filter.__name__, len(found_extensions)), classes=found_extensions) elif len(found_extensions) == 0: raise ArgumentError("Extension %s had no instances of class %s" % (group, class_filter.__name__)) return found_extensions[0] return found_extensions
[ "def", "load_extensions", "(", "self", ",", "group", ",", "name_filter", "=", "None", ",", "comp_filter", "=", "None", ",", "class_filter", "=", "None", ",", "product_name", "=", "None", ",", "unique", "=", "False", ")", ":", "found_extensions", "=", "[", "]", "if", "product_name", "is", "not", "None", ":", "for", "comp", "in", "self", ".", "iter_components", "(", ")", ":", "if", "comp_filter", "is", "not", "None", "and", "comp", ".", "name", "!=", "comp_filter", ":", "continue", "products", "=", "comp", ".", "find_products", "(", "product_name", ")", "for", "product", "in", "products", ":", "try", ":", "entries", "=", "self", ".", "load_extension", "(", "product", ",", "name_filter", "=", "name_filter", ",", "class_filter", "=", "class_filter", ",", "component", "=", "comp", ")", "if", "len", "(", "entries", ")", "==", "0", "and", "name_filter", "is", "None", ":", "# Don't warn if we're filtering by name since most extensions won't match", "self", ".", "_logger", ".", "warn", "(", "\"Found no valid extensions in product %s of component %s\"", ",", "product", ",", "comp", ".", "path", ")", "continue", "found_extensions", ".", "extend", "(", "entries", ")", "except", ":", "# pylint:disable=bare-except;We don't want a broken extension to take down the whole system", "self", ".", "_logger", ".", "exception", "(", "\"Unable to load extension %s from local component %s at path %s\"", ",", "product_name", ",", "comp", ",", "product", ")", "for", "entry", "in", "self", ".", "_iter_entrypoint_group", "(", "group", ")", ":", "name", "=", "entry", ".", "name", "if", "name_filter", "is", "not", "None", "and", "name", "!=", "name_filter", ":", "continue", "try", ":", "ext", "=", "entry", ".", "load", "(", ")", "except", ":", "# pylint:disable=bare-except;", "self", ".", "_logger", ".", "warn", "(", "\"Unable to load %s from %s\"", ",", "entry", ".", "name", ",", "entry", ".", "distro", ",", "exc_info", "=", "True", ")", "continue", "found_extensions", ".", "extend", "(", "(", "name", ",", "x", ")", "for", "x", "in", "self", ".", "_filter_subclasses", "(", "ext", ",", "class_filter", ")", ")", "for", "(", "name", ",", "ext", ")", "in", "self", ".", "_registered_extensions", ".", "get", "(", "group", ",", "[", "]", ")", ":", "if", "name_filter", "is", "not", "None", "and", "name", "!=", "name_filter", ":", "continue", "found_extensions", ".", "extend", "(", "(", "name", ",", "x", ")", "for", "x", "in", "self", ".", "_filter_subclasses", "(", "ext", ",", "class_filter", ")", ")", "found_extensions", "=", "[", "(", "name", ",", "x", ")", "for", "name", ",", "x", "in", "found_extensions", "if", "self", ".", "_filter_nonextensions", "(", "x", ")", "]", "if", "unique", "is", "True", ":", "if", "len", "(", "found_extensions", ")", ">", "1", ":", "raise", "ArgumentError", "(", "\"Extension %s should have had exactly one instance of class %s, found %d\"", "%", "(", "group", ",", "class_filter", ".", "__name__", ",", "len", "(", "found_extensions", ")", ")", ",", "classes", "=", "found_extensions", ")", "elif", "len", "(", "found_extensions", ")", "==", "0", ":", "raise", "ArgumentError", "(", "\"Extension %s had no instances of class %s\"", "%", "(", "group", ",", "class_filter", ".", "__name__", ")", ")", "return", "found_extensions", "[", "0", "]", "return", "found_extensions" ]
Dynamically load and return extension objects of a given type. This is the centralized way for all parts of CoreTools to allow plugin behavior. Whenever a plugin is needed, this method should be called to load it. Examples of plugins are proxy modules, emulated tiles, iotile-build autobuilders, etc. Each kind of plugin will typically be a subclass of a certain base class and can be provided one of three ways: 1. It can be registered as an entry point in a pip installed package. The entry point group must map the group passed to load_extensions. 2. It can be listed as a product of an IOTile component that is stored in this ComponentRegistry. The relevant python file inside the component will be imported dynamically as needed. 3. It can be programmatically registered by calling ``register_extension()`` on this class with a string name and an object. This is equivalent to exposing that same object as an entry point with the given name. There is special behavior of this function if class_filter is passed and the object returned by one of the above three methods is a python module. The module will be search for object definitions that match the defined class. The order of the returned objects list is only partially defined. Locally installed components are searched before pip installed packages with entry points. The order of results within each group is not specified. Args: group (str): The extension type that you wish to enumerate. This will be used as the entry_point group for searching pip installed packages. name_filter (str): Only return objects with the given name comp_filter (str): When searching through installed components (not entry_points), only search through components with the given name. class_filter (type or tuple of types): An object that will be passed to instanceof() to verify that all extension objects have the correct types. If not passed, no checking will be done. product_name (str): If this extension can be provided by a registered local component, the name of the product that should be loaded. unique (bool): If True (default is False), there must be exactly one object found inside this extension that matches all of the other criteria. Returns: list of (str, object): A list of the found and loaded extension objects. The string returned with each extension is the name of the entry_point or the base name of the file in the component or the value provided by the call to register_extension depending on how the extension was found. If unique is True, then the list only contains a single entry and that entry will be directly returned.
[ "Dynamically", "load", "and", "return", "extension", "objects", "of", "a", "given", "type", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L79-L190
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.register_extension
def register_extension(self, group, name, extension): """Register an extension. Args: group (str): The type of the extension name (str): A name for the extension extension (str or class): If this is a string, then it will be interpreted as a path to import and load. Otherwise it will be treated as the extension object itself. """ if isinstance(extension, str): name, extension = self.load_extension(extension)[0] if group not in self._registered_extensions: self._registered_extensions[group] = [] self._registered_extensions[group].append((name, extension))
python
def register_extension(self, group, name, extension): """Register an extension. Args: group (str): The type of the extension name (str): A name for the extension extension (str or class): If this is a string, then it will be interpreted as a path to import and load. Otherwise it will be treated as the extension object itself. """ if isinstance(extension, str): name, extension = self.load_extension(extension)[0] if group not in self._registered_extensions: self._registered_extensions[group] = [] self._registered_extensions[group].append((name, extension))
[ "def", "register_extension", "(", "self", ",", "group", ",", "name", ",", "extension", ")", ":", "if", "isinstance", "(", "extension", ",", "str", ")", ":", "name", ",", "extension", "=", "self", ".", "load_extension", "(", "extension", ")", "[", "0", "]", "if", "group", "not", "in", "self", ".", "_registered_extensions", ":", "self", ".", "_registered_extensions", "[", "group", "]", "=", "[", "]", "self", ".", "_registered_extensions", "[", "group", "]", ".", "append", "(", "(", "name", ",", "extension", ")", ")" ]
Register an extension. Args: group (str): The type of the extension name (str): A name for the extension extension (str or class): If this is a string, then it will be interpreted as a path to import and load. Otherwise it will be treated as the extension object itself.
[ "Register", "an", "extension", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L192-L209
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.clear_extensions
def clear_extensions(self, group=None): """Clear all previously registered extensions.""" if group is None: ComponentRegistry._registered_extensions = {} return if group in self._registered_extensions: self._registered_extensions[group] = []
python
def clear_extensions(self, group=None): """Clear all previously registered extensions.""" if group is None: ComponentRegistry._registered_extensions = {} return if group in self._registered_extensions: self._registered_extensions[group] = []
[ "def", "clear_extensions", "(", "self", ",", "group", "=", "None", ")", ":", "if", "group", "is", "None", ":", "ComponentRegistry", ".", "_registered_extensions", "=", "{", "}", "return", "if", "group", "in", "self", ".", "_registered_extensions", ":", "self", ".", "_registered_extensions", "[", "group", "]", "=", "[", "]" ]
Clear all previously registered extensions.
[ "Clear", "all", "previously", "registered", "extensions", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L211-L219
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.freeze_extensions
def freeze_extensions(self): """Freeze the set of extensions into a single file. Freezing extensions can speed up the extension loading process on machines with slow file systems since it requires only a single file to store all of the extensions. Calling this method will save a file into the current virtual environment that stores a list of all currently found extensions that have been installed as entry_points. Future calls to `load_extensions` will only search the one single file containing frozen extensions rather than enumerating all installed distributions. """ output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') with open(output_path, "w") as outfile: json.dump(self._dump_extensions(), outfile)
python
def freeze_extensions(self): """Freeze the set of extensions into a single file. Freezing extensions can speed up the extension loading process on machines with slow file systems since it requires only a single file to store all of the extensions. Calling this method will save a file into the current virtual environment that stores a list of all currently found extensions that have been installed as entry_points. Future calls to `load_extensions` will only search the one single file containing frozen extensions rather than enumerating all installed distributions. """ output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') with open(output_path, "w") as outfile: json.dump(self._dump_extensions(), outfile)
[ "def", "freeze_extensions", "(", "self", ")", ":", "output_path", "=", "os", ".", "path", ".", "join", "(", "_registry_folder", "(", ")", ",", "'frozen_extensions.json'", ")", "with", "open", "(", "output_path", ",", "\"w\"", ")", "as", "outfile", ":", "json", ".", "dump", "(", "self", ".", "_dump_extensions", "(", ")", ",", "outfile", ")" ]
Freeze the set of extensions into a single file. Freezing extensions can speed up the extension loading process on machines with slow file systems since it requires only a single file to store all of the extensions. Calling this method will save a file into the current virtual environment that stores a list of all currently found extensions that have been installed as entry_points. Future calls to `load_extensions` will only search the one single file containing frozen extensions rather than enumerating all installed distributions.
[ "Freeze", "the", "set", "of", "extensions", "into", "a", "single", "file", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L221-L238
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.unfreeze_extensions
def unfreeze_extensions(self): """Remove a previously frozen list of extensions.""" output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') if not os.path.isfile(output_path): raise ExternalError("There is no frozen extension list") os.remove(output_path) ComponentRegistry._frozen_extensions = None
python
def unfreeze_extensions(self): """Remove a previously frozen list of extensions.""" output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') if not os.path.isfile(output_path): raise ExternalError("There is no frozen extension list") os.remove(output_path) ComponentRegistry._frozen_extensions = None
[ "def", "unfreeze_extensions", "(", "self", ")", ":", "output_path", "=", "os", ".", "path", ".", "join", "(", "_registry_folder", "(", ")", ",", "'frozen_extensions.json'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "output_path", ")", ":", "raise", "ExternalError", "(", "\"There is no frozen extension list\"", ")", "os", ".", "remove", "(", "output_path", ")", "ComponentRegistry", ".", "_frozen_extensions", "=", "None" ]
Remove a previously frozen list of extensions.
[ "Remove", "a", "previously", "frozen", "list", "of", "extensions", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L240-L248
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.load_extension
def load_extension(self, path, name_filter=None, class_filter=None, unique=False, component=None): """Load a single python module extension. This function is similar to using the imp module directly to load a module and potentially inspecting the objects it declares to filter them by class. Args: path (str): The path to the python file to load name_filter (str): If passed, the basename of the module must match name or nothing is returned. class_filter (type): If passed, only instance of this class are returned. unique (bool): If True (default is False), there must be exactly one object found inside this extension that matches all of the other criteria. component (IOTile): The component that this extension comes from if it is loaded from an installed component. This is used to properly import the extension as a submodule of the component's support package. Returns: list of (name, type): A list of the objects found at the extension path. If unique is True, then the list only contains a single entry and that entry will be directly returned. """ import_name = None if component is not None: import_name = _ensure_package_loaded(path, component) name, ext = _try_load_module(path, import_name=import_name) if name_filter is not None and name != name_filter: return [] found = [(name, x) for x in self._filter_subclasses(ext, class_filter)] found = [(name, x) for name, x in found if self._filter_nonextensions(x)] if not unique: return found if len(found) > 1: raise ArgumentError("Extension %s should have had exactly one instance of class %s, found %d" % (path, class_filter.__name__, len(found)), classes=found) elif len(found) == 0: raise ArgumentError("Extension %s had no instances of class %s" % (path, class_filter.__name__)) return found[0]
python
def load_extension(self, path, name_filter=None, class_filter=None, unique=False, component=None): """Load a single python module extension. This function is similar to using the imp module directly to load a module and potentially inspecting the objects it declares to filter them by class. Args: path (str): The path to the python file to load name_filter (str): If passed, the basename of the module must match name or nothing is returned. class_filter (type): If passed, only instance of this class are returned. unique (bool): If True (default is False), there must be exactly one object found inside this extension that matches all of the other criteria. component (IOTile): The component that this extension comes from if it is loaded from an installed component. This is used to properly import the extension as a submodule of the component's support package. Returns: list of (name, type): A list of the objects found at the extension path. If unique is True, then the list only contains a single entry and that entry will be directly returned. """ import_name = None if component is not None: import_name = _ensure_package_loaded(path, component) name, ext = _try_load_module(path, import_name=import_name) if name_filter is not None and name != name_filter: return [] found = [(name, x) for x in self._filter_subclasses(ext, class_filter)] found = [(name, x) for name, x in found if self._filter_nonextensions(x)] if not unique: return found if len(found) > 1: raise ArgumentError("Extension %s should have had exactly one instance of class %s, found %d" % (path, class_filter.__name__, len(found)), classes=found) elif len(found) == 0: raise ArgumentError("Extension %s had no instances of class %s" % (path, class_filter.__name__)) return found[0]
[ "def", "load_extension", "(", "self", ",", "path", ",", "name_filter", "=", "None", ",", "class_filter", "=", "None", ",", "unique", "=", "False", ",", "component", "=", "None", ")", ":", "import_name", "=", "None", "if", "component", "is", "not", "None", ":", "import_name", "=", "_ensure_package_loaded", "(", "path", ",", "component", ")", "name", ",", "ext", "=", "_try_load_module", "(", "path", ",", "import_name", "=", "import_name", ")", "if", "name_filter", "is", "not", "None", "and", "name", "!=", "name_filter", ":", "return", "[", "]", "found", "=", "[", "(", "name", ",", "x", ")", "for", "x", "in", "self", ".", "_filter_subclasses", "(", "ext", ",", "class_filter", ")", "]", "found", "=", "[", "(", "name", ",", "x", ")", "for", "name", ",", "x", "in", "found", "if", "self", ".", "_filter_nonextensions", "(", "x", ")", "]", "if", "not", "unique", ":", "return", "found", "if", "len", "(", "found", ")", ">", "1", ":", "raise", "ArgumentError", "(", "\"Extension %s should have had exactly one instance of class %s, found %d\"", "%", "(", "path", ",", "class_filter", ".", "__name__", ",", "len", "(", "found", ")", ")", ",", "classes", "=", "found", ")", "elif", "len", "(", "found", ")", "==", "0", ":", "raise", "ArgumentError", "(", "\"Extension %s had no instances of class %s\"", "%", "(", "path", ",", "class_filter", ".", "__name__", ")", ")", "return", "found", "[", "0", "]" ]
Load a single python module extension. This function is similar to using the imp module directly to load a module and potentially inspecting the objects it declares to filter them by class. Args: path (str): The path to the python file to load name_filter (str): If passed, the basename of the module must match name or nothing is returned. class_filter (type): If passed, only instance of this class are returned. unique (bool): If True (default is False), there must be exactly one object found inside this extension that matches all of the other criteria. component (IOTile): The component that this extension comes from if it is loaded from an installed component. This is used to properly import the extension as a submodule of the component's support package. Returns: list of (name, type): A list of the objects found at the extension path. If unique is True, then the list only contains a single entry and that entry will be directly returned.
[ "Load", "a", "single", "python", "module", "extension", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L250-L296
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry._filter_nonextensions
def _filter_nonextensions(cls, obj): """Remove all classes marked as not extensions. This allows us to have a deeper hierarchy of classes than just one base class that is filtered by _filter_subclasses. Any class can define a class propery named: __NO_EXTENSION__ = True That class will never be returned as an extension. This is useful for masking out base classes for extensions that are declared in CoreTools and would be present in module imports but should not create a second entry point. """ # Not all objects have __dict__ attributes. For example, tuples don't. # and tuples are used in iotile.build for some entry points. if hasattr(obj, '__dict__') and obj.__dict__.get('__NO_EXTENSION__', False) is True: return False return True
python
def _filter_nonextensions(cls, obj): """Remove all classes marked as not extensions. This allows us to have a deeper hierarchy of classes than just one base class that is filtered by _filter_subclasses. Any class can define a class propery named: __NO_EXTENSION__ = True That class will never be returned as an extension. This is useful for masking out base classes for extensions that are declared in CoreTools and would be present in module imports but should not create a second entry point. """ # Not all objects have __dict__ attributes. For example, tuples don't. # and tuples are used in iotile.build for some entry points. if hasattr(obj, '__dict__') and obj.__dict__.get('__NO_EXTENSION__', False) is True: return False return True
[ "def", "_filter_nonextensions", "(", "cls", ",", "obj", ")", ":", "# Not all objects have __dict__ attributes. For example, tuples don't.", "# and tuples are used in iotile.build for some entry points.", "if", "hasattr", "(", "obj", ",", "'__dict__'", ")", "and", "obj", ".", "__dict__", ".", "get", "(", "'__NO_EXTENSION__'", ",", "False", ")", "is", "True", ":", "return", "False", "return", "True" ]
Remove all classes marked as not extensions. This allows us to have a deeper hierarchy of classes than just one base class that is filtered by _filter_subclasses. Any class can define a class propery named: __NO_EXTENSION__ = True That class will never be returned as an extension. This is useful for masking out base classes for extensions that are declared in CoreTools and would be present in module imports but should not create a second entry point.
[ "Remove", "all", "classes", "marked", "as", "not", "extensions", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L331-L351
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.SetBackingStore
def SetBackingStore(cls, backing): """Set the global backing type used by the ComponentRegistry from this point forward This function must be called before any operations that use the registry are initiated otherwise they will work from different registries that will likely contain different data """ if backing not in ['json', 'sqlite', 'memory']: raise ArgumentError("Unknown backing store type that is not json or sqlite", backing=backing) if backing == 'json': cls.BackingType = JSONKVStore cls.BackingFileName = 'component_registry.json' elif backing == 'memory': cls.BackingType = InMemoryKVStore cls.BackingFileName = None else: cls.BackingType = SQLiteKVStore cls.BackingFileName = 'component_registry.db'
python
def SetBackingStore(cls, backing): """Set the global backing type used by the ComponentRegistry from this point forward This function must be called before any operations that use the registry are initiated otherwise they will work from different registries that will likely contain different data """ if backing not in ['json', 'sqlite', 'memory']: raise ArgumentError("Unknown backing store type that is not json or sqlite", backing=backing) if backing == 'json': cls.BackingType = JSONKVStore cls.BackingFileName = 'component_registry.json' elif backing == 'memory': cls.BackingType = InMemoryKVStore cls.BackingFileName = None else: cls.BackingType = SQLiteKVStore cls.BackingFileName = 'component_registry.db'
[ "def", "SetBackingStore", "(", "cls", ",", "backing", ")", ":", "if", "backing", "not", "in", "[", "'json'", ",", "'sqlite'", ",", "'memory'", "]", ":", "raise", "ArgumentError", "(", "\"Unknown backing store type that is not json or sqlite\"", ",", "backing", "=", "backing", ")", "if", "backing", "==", "'json'", ":", "cls", ".", "BackingType", "=", "JSONKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.json'", "elif", "backing", "==", "'memory'", ":", "cls", ".", "BackingType", "=", "InMemoryKVStore", "cls", ".", "BackingFileName", "=", "None", "else", ":", "cls", ".", "BackingType", "=", "SQLiteKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.db'" ]
Set the global backing type used by the ComponentRegistry from this point forward This function must be called before any operations that use the registry are initiated otherwise they will work from different registries that will likely contain different data
[ "Set", "the", "global", "backing", "type", "used", "by", "the", "ComponentRegistry", "from", "this", "point", "forward" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L390-L408
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.add_component
def add_component(self, component, temporary=False): """Register a component with ComponentRegistry. Component must be a buildable object with a module_settings.json file that describes its name and the domain that it is part of. By default, this component is saved in the permanent registry associated with this environment and will remain registered for future CoreTools invocations. If you only want this component to be temporarily registered during this program's session, you can pass temporary=True and the component will be stored in RAM only, not persisted to the underlying key-value store. Args: component (str): The path to a component that should be registered. temporary (bool): Optional flag to only temporarily register the component for the duration of this program invocation. """ tile = IOTile(component) value = os.path.normpath(os.path.abspath(component)) if temporary is True: self._component_overlays[tile.name] = value else: self.kvstore.set(tile.name, value)
python
def add_component(self, component, temporary=False): """Register a component with ComponentRegistry. Component must be a buildable object with a module_settings.json file that describes its name and the domain that it is part of. By default, this component is saved in the permanent registry associated with this environment and will remain registered for future CoreTools invocations. If you only want this component to be temporarily registered during this program's session, you can pass temporary=True and the component will be stored in RAM only, not persisted to the underlying key-value store. Args: component (str): The path to a component that should be registered. temporary (bool): Optional flag to only temporarily register the component for the duration of this program invocation. """ tile = IOTile(component) value = os.path.normpath(os.path.abspath(component)) if temporary is True: self._component_overlays[tile.name] = value else: self.kvstore.set(tile.name, value)
[ "def", "add_component", "(", "self", ",", "component", ",", "temporary", "=", "False", ")", ":", "tile", "=", "IOTile", "(", "component", ")", "value", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "abspath", "(", "component", ")", ")", "if", "temporary", "is", "True", ":", "self", ".", "_component_overlays", "[", "tile", ".", "name", "]", "=", "value", "else", ":", "self", ".", "kvstore", ".", "set", "(", "tile", ".", "name", ",", "value", ")" ]
Register a component with ComponentRegistry. Component must be a buildable object with a module_settings.json file that describes its name and the domain that it is part of. By default, this component is saved in the permanent registry associated with this environment and will remain registered for future CoreTools invocations. If you only want this component to be temporarily registered during this program's session, you can pass temporary=True and the component will be stored in RAM only, not persisted to the underlying key-value store. Args: component (str): The path to a component that should be registered. temporary (bool): Optional flag to only temporarily register the component for the duration of this program invocation.
[ "Register", "a", "component", "with", "ComponentRegistry", "." ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L410-L436
train
iotile/coretools
iotilecore/iotile/core/dev/registry.py
ComponentRegistry.list_plugins
def list_plugins(self): """ List all of the plugins that have been registerd for the iotile program on this computer """ vals = self.plugins.items() return {x: y for x, y in vals}
python
def list_plugins(self): """ List all of the plugins that have been registerd for the iotile program on this computer """ vals = self.plugins.items() return {x: y for x, y in vals}
[ "def", "list_plugins", "(", "self", ")", ":", "vals", "=", "self", ".", "plugins", ".", "items", "(", ")", "return", "{", "x", ":", "y", "for", "x", ",", "y", "in", "vals", "}" ]
List all of the plugins that have been registerd for the iotile program on this computer
[ "List", "all", "of", "the", "plugins", "that", "have", "been", "registerd", "for", "the", "iotile", "program", "on", "this", "computer" ]
2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/dev/registry.py#L449-L456
train