repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
titusjan/argos
argos/repo/rtiplugins/ncdf.py
NcdfGroupRti._fetchAllChildren
def _fetchAllChildren(self): """ Fetches all sub groups and variables that this group contains. """ assert self._ncGroup is not None, "dataset undefined (file not opened?)" assert self.canFetchChildren(), "canFetchChildren must be True" childItems = [] # Add dimensions for dimName, ncDim in self._ncGroup.dimensions.items(): childItems.append(NcdfDimensionRti(ncDim, nodeName=dimName, fileName=self.fileName)) # Add groups for groupName, ncGroup in self._ncGroup.groups.items(): childItems.append(NcdfGroupRti(ncGroup, nodeName=groupName, fileName=self.fileName)) # Add variables for varName, ncVar in self._ncGroup.variables.items(): childItems.append(NcdfVariableRti(ncVar, nodeName=varName, fileName=self.fileName)) return childItems
python
def _fetchAllChildren(self): """ Fetches all sub groups and variables that this group contains. """ assert self._ncGroup is not None, "dataset undefined (file not opened?)" assert self.canFetchChildren(), "canFetchChildren must be True" childItems = [] # Add dimensions for dimName, ncDim in self._ncGroup.dimensions.items(): childItems.append(NcdfDimensionRti(ncDim, nodeName=dimName, fileName=self.fileName)) # Add groups for groupName, ncGroup in self._ncGroup.groups.items(): childItems.append(NcdfGroupRti(ncGroup, nodeName=groupName, fileName=self.fileName)) # Add variables for varName, ncVar in self._ncGroup.variables.items(): childItems.append(NcdfVariableRti(ncVar, nodeName=varName, fileName=self.fileName)) return childItems
Fetches all sub groups and variables that this group contains.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/repo/rtiplugins/ncdf.py#L393-L413
titusjan/argos
argos/repo/rtiplugins/ncdf.py
NcdfFileRti._openResources
def _openResources(self): """ Opens the root Dataset. """ logger.info("Opening: {}".format(self._fileName)) self._ncGroup = Dataset(self._fileName)
python
def _openResources(self): """ Opens the root Dataset. """ logger.info("Opening: {}".format(self._fileName)) self._ncGroup = Dataset(self._fileName)
Opens the root Dataset.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/repo/rtiplugins/ncdf.py#L431-L435
titusjan/argos
argos/repo/rtiplugins/ncdf.py
NcdfFileRti._closeResources
def _closeResources(self): """ Closes the root Dataset. """ logger.info("Closing: {}".format(self._fileName)) self._ncGroup.close() self._ncGroup = None
python
def _closeResources(self): """ Closes the root Dataset. """ logger.info("Closing: {}".format(self._fileName)) self._ncGroup.close() self._ncGroup = None
Closes the root Dataset.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/repo/rtiplugins/ncdf.py#L437-L442
titusjan/argos
argos/repo/rtiplugins/scipyio.py
WavFileRti._openResources
def _openResources(self): """ Uses numpy.loadtxt to open the underlying file. """ try: rate, data = scipy.io.wavfile.read(self._fileName, mmap=True) except Exception as ex: logger.warning(ex) logger.warning("Unable to read wav with memmory mapping. Trying without now.") rate, data = scipy.io.wavfile.read(self._fileName, mmap=False) self._array = data self.attributes['rate'] = rate
python
def _openResources(self): """ Uses numpy.loadtxt to open the underlying file. """ try: rate, data = scipy.io.wavfile.read(self._fileName, mmap=True) except Exception as ex: logger.warning(ex) logger.warning("Unable to read wav with memmory mapping. Trying without now.") rate, data = scipy.io.wavfile.read(self._fileName, mmap=False) self._array = data self.attributes['rate'] = rate
Uses numpy.loadtxt to open the underlying file.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/repo/rtiplugins/scipyio.py#L134-L145
titusjan/argos
argos/repo/rtiplugins/scipyio.py
WavFileRti._fetchAllChildren
def _fetchAllChildren(self): """ Adds an ArrayRti per column as children so that they can be inspected easily """ childItems = [] if self._array.ndim == 2: _nRows, nCols = self._array.shape if self._array is not None else (0, 0) for col in range(nCols): colItem = SliceRti(self._array[:, col], nodeName="channel-{}".format(col), fileName=self.fileName, iconColor=self.iconColor, attributes=self.attributes) childItems.append(colItem) return childItems
python
def _fetchAllChildren(self): """ Adds an ArrayRti per column as children so that they can be inspected easily """ childItems = [] if self._array.ndim == 2: _nRows, nCols = self._array.shape if self._array is not None else (0, 0) for col in range(nCols): colItem = SliceRti(self._array[:, col], nodeName="channel-{}".format(col), fileName=self.fileName, iconColor=self.iconColor, attributes=self.attributes) childItems.append(colItem) return childItems
Adds an ArrayRti per column as children so that they can be inspected easily
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/repo/rtiplugins/scipyio.py#L155-L166
titusjan/argos
argos/inspector/qtplugins/text.py
TextInspector._drawContents
def _drawContents(self, reason=None, initiator=None): """ Converts the (zero-dimensional) sliced array to string and puts it in the text editor. The reason and initiator parameters are ignored. See AbstractInspector.updateContents for their description. """ logger.debug("TextInspector._drawContents: {}".format(self)) self._clearContents() slicedArray = self.collector.getSlicedArray() if slicedArray is None: return # Sanity check, the slicedArray should be zero-dimensional. It can be used as a scalar. # In fact, using an index (e.g. slicedArray[0]) will raise an exception. assert slicedArray.data.ndim == 0, \ "Expected zero-dimensional array. Got: {}".format(slicedArray.ndim) # Valid data from here... maskedArr = slicedArray.asMaskedArray() # So that we call mask[()] for boolean masks slicedScalar = maskedArr[()] # Convert to Numpy scalar isMasked = maskedArr.mask[()] text = to_string(slicedScalar, masked=isMasked, maskFormat='--', decode_bytes=self.config.encodingCti.configValue) self.editor.setPlainText(text) self.editor.setWordWrapMode(self.config.wordWrapCti.configValue) # Update the editor font from the font config item (will call self.editor.setFont) self.config.updateTarget()
python
def _drawContents(self, reason=None, initiator=None): """ Converts the (zero-dimensional) sliced array to string and puts it in the text editor. The reason and initiator parameters are ignored. See AbstractInspector.updateContents for their description. """ logger.debug("TextInspector._drawContents: {}".format(self)) self._clearContents() slicedArray = self.collector.getSlicedArray() if slicedArray is None: return # Sanity check, the slicedArray should be zero-dimensional. It can be used as a scalar. # In fact, using an index (e.g. slicedArray[0]) will raise an exception. assert slicedArray.data.ndim == 0, \ "Expected zero-dimensional array. Got: {}".format(slicedArray.ndim) # Valid data from here... maskedArr = slicedArray.asMaskedArray() # So that we call mask[()] for boolean masks slicedScalar = maskedArr[()] # Convert to Numpy scalar isMasked = maskedArr.mask[()] text = to_string(slicedScalar, masked=isMasked, maskFormat='--', decode_bytes=self.config.encodingCti.configValue) self.editor.setPlainText(text) self.editor.setWordWrapMode(self.config.wordWrapCti.configValue) # Update the editor font from the font config item (will call self.editor.setFont) self.config.updateTarget()
Converts the (zero-dimensional) sliced array to string and puts it in the text editor. The reason and initiator parameters are ignored. See AbstractInspector.updateContents for their description.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/inspector/qtplugins/text.py#L102-L132
titusjan/argos
argos/inspector/pgplugins/pgplotitem.py
middleMouseClickEvent
def middleMouseClickEvent(argosPgPlotItem, axisNumber, mouseClickEvent): """ Emits sigAxisReset when the middle mouse button is clicked on an axis of the the plot item. """ if mouseClickEvent.button() == QtCore.Qt.MiddleButton: mouseClickEvent.accept() argosPgPlotItem.emitResetAxisSignal(axisNumber)
python
def middleMouseClickEvent(argosPgPlotItem, axisNumber, mouseClickEvent): """ Emits sigAxisReset when the middle mouse button is clicked on an axis of the the plot item. """ if mouseClickEvent.button() == QtCore.Qt.MiddleButton: mouseClickEvent.accept() argosPgPlotItem.emitResetAxisSignal(axisNumber)
Emits sigAxisReset when the middle mouse button is clicked on an axis of the the plot item.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/inspector/pgplugins/pgplotitem.py#L49-L54
titusjan/argos
argos/inspector/pgplugins/pgplotitem.py
ArgosPgPlotItem.close
def close(self): """ Is called before destruction. Can be used to clean-up resources Could be called 'finalize' but PlotItem already has a close so we reuse that. """ logger.debug("Finalizing: {}".format(self)) super(ArgosPgPlotItem, self).close()
python
def close(self): """ Is called before destruction. Can be used to clean-up resources Could be called 'finalize' but PlotItem already has a close so we reuse that. """ logger.debug("Finalizing: {}".format(self)) super(ArgosPgPlotItem, self).close()
Is called before destruction. Can be used to clean-up resources Could be called 'finalize' but PlotItem already has a close so we reuse that.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/inspector/pgplugins/pgplotitem.py#L120-L125
titusjan/argos
argos/inspector/pgplugins/pgplotitem.py
ArgosPgPlotItem.contextMenuEvent
def contextMenuEvent(self, event): """ Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal. """ contextMenu = QtWidgets.QMenu() for action in self.actions(): contextMenu.addAction(action) contextMenu.exec_(event.screenPos())
python
def contextMenuEvent(self, event): """ Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal. """ contextMenu = QtWidgets.QMenu() for action in self.actions(): contextMenu.addAction(action) contextMenu.exec_(event.screenPos())
Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/inspector/pgplugins/pgplotitem.py#L128-L138
titusjan/argos
argos/inspector/pgplugins/pgplotitem.py
ArgosPgPlotItem.emitResetAxisSignal
def emitResetAxisSignal(self, axisNumber): """ Emits the sigResetAxis with the axisNumber as parameter axisNumber should be 0 for X, 1 for Y, and 2 for both axes. """ assert axisNumber in (VALID_AXES_NUMBERS), \ "Axis Nr should be one of {}, got {}".format(VALID_AXES_NUMBERS, axisNumber) # Hide 'auto-scale (A)' button logger.debug("ArgosPgPlotItem.autoBtnClicked, mode:{}".format(self.autoBtn.mode)) if self.autoBtn.mode == 'auto': self.autoBtn.hide() else: # Does this occur? msg = "Unexpected autobutton mode: {}".format(self.autoBtn.mode) if DEBUGGING: raise ValueError(msg) else: logger.warn(msg) logger.debug("Emitting sigAxisReset({}) for {!r}".format(axisNumber, self)) self.sigAxisReset.emit(axisNumber)
python
def emitResetAxisSignal(self, axisNumber): """ Emits the sigResetAxis with the axisNumber as parameter axisNumber should be 0 for X, 1 for Y, and 2 for both axes. """ assert axisNumber in (VALID_AXES_NUMBERS), \ "Axis Nr should be one of {}, got {}".format(VALID_AXES_NUMBERS, axisNumber) # Hide 'auto-scale (A)' button logger.debug("ArgosPgPlotItem.autoBtnClicked, mode:{}".format(self.autoBtn.mode)) if self.autoBtn.mode == 'auto': self.autoBtn.hide() else: # Does this occur? msg = "Unexpected autobutton mode: {}".format(self.autoBtn.mode) if DEBUGGING: raise ValueError(msg) else: logger.warn(msg) logger.debug("Emitting sigAxisReset({}) for {!r}".format(axisNumber, self)) self.sigAxisReset.emit(axisNumber)
Emits the sigResetAxis with the axisNumber as parameter axisNumber should be 0 for X, 1 for Y, and 2 for both axes.
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/inspector/pgplugins/pgplotitem.py#L148-L168
tarmstrong/nbdiff
nbdiff/merge.py
merge
def merge(local, base, remote, check_modified=False): """Generate unmerged series of changes (including conflicts). By diffing the two diffs, we find *changes* that are on the local branch, the remote branch, or both. We arbitrarily choose the "local" branch to be the "before" and the "remote" branch to be the "after" in the diff algorithm. Therefore: If a change is "deleted", that means that it occurs only on the local branch. If a change is "added" that means it occurs only on the remote branch. If a change is "unchanged", that means it occurs in both branches. Either the same addition or same deletion occurred in both branches, or the cell was not changed in either branch. Parameters ---------- local : list A sequence representing the items on the local branch. base : dict A sequence representing the items on the base branch remote : dict A sequence representing the items on the remote branch. Returns ------- result : A diff result comparing the changes on the local and remote branches. """ base_local = diff.diff(base, local, check_modified=check_modified) base_remote = diff.diff(base, remote, check_modified=check_modified) merge = diff.diff(base_local, base_remote) return merge
python
def merge(local, base, remote, check_modified=False): """Generate unmerged series of changes (including conflicts). By diffing the two diffs, we find *changes* that are on the local branch, the remote branch, or both. We arbitrarily choose the "local" branch to be the "before" and the "remote" branch to be the "after" in the diff algorithm. Therefore: If a change is "deleted", that means that it occurs only on the local branch. If a change is "added" that means it occurs only on the remote branch. If a change is "unchanged", that means it occurs in both branches. Either the same addition or same deletion occurred in both branches, or the cell was not changed in either branch. Parameters ---------- local : list A sequence representing the items on the local branch. base : dict A sequence representing the items on the base branch remote : dict A sequence representing the items on the remote branch. Returns ------- result : A diff result comparing the changes on the local and remote branches. """ base_local = diff.diff(base, local, check_modified=check_modified) base_remote = diff.diff(base, remote, check_modified=check_modified) merge = diff.diff(base_local, base_remote) return merge
Generate unmerged series of changes (including conflicts). By diffing the two diffs, we find *changes* that are on the local branch, the remote branch, or both. We arbitrarily choose the "local" branch to be the "before" and the "remote" branch to be the "after" in the diff algorithm. Therefore: If a change is "deleted", that means that it occurs only on the local branch. If a change is "added" that means it occurs only on the remote branch. If a change is "unchanged", that means it occurs in both branches. Either the same addition or same deletion occurred in both branches, or the cell was not changed in either branch. Parameters ---------- local : list A sequence representing the items on the local branch. base : dict A sequence representing the items on the base branch remote : dict A sequence representing the items on the remote branch. Returns ------- result : A diff result comparing the changes on the local and remote branches.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/merge.py#L11-L43
tarmstrong/nbdiff
nbdiff/merge.py
notebook_merge
def notebook_merge(local, base, remote, check_modified=False): """Unify three notebooks into a single notebook with merge metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the merge. Parameters ---------- local : dict The local branch's version of the notebook. base : dict The last common ancestor of local and remote. remote : dict The remote branch's version of the notebook. Returns ------- nb : A valid notebook containing merge metadata. """ local_cells = get_cells(local) base_cells = get_cells(base) remote_cells = get_cells(remote) rows = [] current_row = [] empty_cell = lambda: { 'cell_type': 'code', 'language': 'python', 'outputs': [], 'prompt_number': 1, 'text': ['Placeholder'], 'metadata': {'state': 'empty'} } diff_of_diffs = merge(local_cells, base_cells, remote_cells) # For each item in the higher-order diff, create a "row" that # corresponds to a row in the NBDiff interface. A row contains: # | LOCAL | BASE | REMOTE | for item in diff_of_diffs: state = item['state'] cell = copy.deepcopy(diff_result_to_cell(item['value'])) if state == 'deleted': # This change is between base and local branches. # It can be an addition or a deletion. if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue cell['metadata']['side'] = 'local' remote_cell = empty_cell() remote_cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': base_cell = copy.deepcopy(cell) else: base_cell = empty_cell() base_cell['metadata']['side'] = 'base' # This change is on the right. current_row = [ cell, base_cell, remote_cell, ] elif state == 'added': # This change is between base and remote branches. # It can be an addition or a deletion. cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue if cell['metadata']['state'] == 'deleted': base_cell = copy.deepcopy(cell) base_cell['metadata']['state'] = 'unchanged' local_cell = copy.deepcopy(cell) local_cell['metadata']['state'] = 'unchanged' else: base_cell = empty_cell() local_cell = empty_cell() base_cell['metadata']['side'] = 'base' local_cell['metadata']['side'] = 'local' current_row = [ local_cell, base_cell, cell, ] elif state == 'unchanged': # The same item occurs between base-local and base-remote. # This happens if both branches made the same change, whether # that is an addition or deletion. If neither branches # changed a given cell, that cell shows up here too. cell1 = copy.deepcopy(cell) cell3 = copy.deepcopy(cell) if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': # If the change is a deletion, the cell-to-be-deleted # should in the base as 'unchanged'. The user will # choose to make it deleted. cell2 = copy.deepcopy(cell) cell2['metadata']['state'] = 'unchanged' else: # If the change is an addition, it should not # show in the base; the user must add it to the merged version. cell2 = empty_cell() cell1['metadata']['side'] = 'local' cell2['metadata']['side'] = 'base' cell3['metadata']['side'] = 'remote' current_row = [ cell1, cell2, cell3, ] rows.append(current_row) # Chain all rows together; create a flat array from the nested array. # Use the base notebook's notebook-level metadata (title, version, etc.) result_notebook = local if len(result_notebook['worksheets']) == 0: result_notebook['worksheets'] = [nbformat.new_worksheet()] new_cell_array = list(it.chain.from_iterable(rows)) result_notebook['worksheets'][0]['cells'] = new_cell_array result_notebook['metadata']['nbdiff-type'] = 'merge' return result_notebook
python
def notebook_merge(local, base, remote, check_modified=False): """Unify three notebooks into a single notebook with merge metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the merge. Parameters ---------- local : dict The local branch's version of the notebook. base : dict The last common ancestor of local and remote. remote : dict The remote branch's version of the notebook. Returns ------- nb : A valid notebook containing merge metadata. """ local_cells = get_cells(local) base_cells = get_cells(base) remote_cells = get_cells(remote) rows = [] current_row = [] empty_cell = lambda: { 'cell_type': 'code', 'language': 'python', 'outputs': [], 'prompt_number': 1, 'text': ['Placeholder'], 'metadata': {'state': 'empty'} } diff_of_diffs = merge(local_cells, base_cells, remote_cells) # For each item in the higher-order diff, create a "row" that # corresponds to a row in the NBDiff interface. A row contains: # | LOCAL | BASE | REMOTE | for item in diff_of_diffs: state = item['state'] cell = copy.deepcopy(diff_result_to_cell(item['value'])) if state == 'deleted': # This change is between base and local branches. # It can be an addition or a deletion. if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue cell['metadata']['side'] = 'local' remote_cell = empty_cell() remote_cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': base_cell = copy.deepcopy(cell) else: base_cell = empty_cell() base_cell['metadata']['side'] = 'base' # This change is on the right. current_row = [ cell, base_cell, remote_cell, ] elif state == 'added': # This change is between base and remote branches. # It can be an addition or a deletion. cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue if cell['metadata']['state'] == 'deleted': base_cell = copy.deepcopy(cell) base_cell['metadata']['state'] = 'unchanged' local_cell = copy.deepcopy(cell) local_cell['metadata']['state'] = 'unchanged' else: base_cell = empty_cell() local_cell = empty_cell() base_cell['metadata']['side'] = 'base' local_cell['metadata']['side'] = 'local' current_row = [ local_cell, base_cell, cell, ] elif state == 'unchanged': # The same item occurs between base-local and base-remote. # This happens if both branches made the same change, whether # that is an addition or deletion. If neither branches # changed a given cell, that cell shows up here too. cell1 = copy.deepcopy(cell) cell3 = copy.deepcopy(cell) if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': # If the change is a deletion, the cell-to-be-deleted # should in the base as 'unchanged'. The user will # choose to make it deleted. cell2 = copy.deepcopy(cell) cell2['metadata']['state'] = 'unchanged' else: # If the change is an addition, it should not # show in the base; the user must add it to the merged version. cell2 = empty_cell() cell1['metadata']['side'] = 'local' cell2['metadata']['side'] = 'base' cell3['metadata']['side'] = 'remote' current_row = [ cell1, cell2, cell3, ] rows.append(current_row) # Chain all rows together; create a flat array from the nested array. # Use the base notebook's notebook-level metadata (title, version, etc.) result_notebook = local if len(result_notebook['worksheets']) == 0: result_notebook['worksheets'] = [nbformat.new_worksheet()] new_cell_array = list(it.chain.from_iterable(rows)) result_notebook['worksheets'][0]['cells'] = new_cell_array result_notebook['metadata']['nbdiff-type'] = 'merge' return result_notebook
Unify three notebooks into a single notebook with merge metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the merge. Parameters ---------- local : dict The local branch's version of the notebook. base : dict The last common ancestor of local and remote. remote : dict The remote branch's version of the notebook. Returns ------- nb : A valid notebook containing merge metadata.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/merge.py#L46-L177
tarmstrong/nbdiff
nbdiff/notebook_parser.py
NotebookParser.parse
def parse(self, json_data): """Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure. """ data = current.read(json_data, 'ipynb') json_data.close() return data
python
def parse(self, json_data): """Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure. """ data = current.read(json_data, 'ipynb') json_data.close() return data
Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_parser.py#L6-L20
tarmstrong/nbdiff
nbdiff/notebook_diff.py
notebook_diff
def notebook_diff(nb1, nb2, check_modified=True): """Unify two notebooks into a single notebook with diff metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the diffs. Parameters ---------- nb1 : dict An IPython Notebook to use as the baseline version. nb2 : dict An IPython Notebook to compare against the baseline. check_modified : bool Whether or not to detect cell modification. Returns ------- nb : A valid notebook containing diff metadata. """ nb1_cells = nb1['worksheets'][0]['cells'] nb2_cells = nb2['worksheets'][0]['cells'] diffed_nb = cells_diff(nb1_cells, nb2_cells, check_modified=check_modified) line_diffs = diff_modified_items(diffed_nb) cell_list = list() for i, item in enumerate(diffed_nb): cell = diff_result_to_cell(item) if i in line_diffs: cell['metadata']['extra-diff-data'] = line_diffs[i] cell_list.append(cell) nb1['worksheets'][0]['cells'] = cell_list nb1['metadata']['nbdiff-type'] = 'diff' return nb1
python
def notebook_diff(nb1, nb2, check_modified=True): """Unify two notebooks into a single notebook with diff metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the diffs. Parameters ---------- nb1 : dict An IPython Notebook to use as the baseline version. nb2 : dict An IPython Notebook to compare against the baseline. check_modified : bool Whether or not to detect cell modification. Returns ------- nb : A valid notebook containing diff metadata. """ nb1_cells = nb1['worksheets'][0]['cells'] nb2_cells = nb2['worksheets'][0]['cells'] diffed_nb = cells_diff(nb1_cells, nb2_cells, check_modified=check_modified) line_diffs = diff_modified_items(diffed_nb) cell_list = list() for i, item in enumerate(diffed_nb): cell = diff_result_to_cell(item) if i in line_diffs: cell['metadata']['extra-diff-data'] = line_diffs[i] cell_list.append(cell) nb1['worksheets'][0]['cells'] = cell_list nb1['metadata']['nbdiff-type'] = 'diff' return nb1
Unify two notebooks into a single notebook with diff metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the diffs. Parameters ---------- nb1 : dict An IPython Notebook to use as the baseline version. nb2 : dict An IPython Notebook to compare against the baseline. check_modified : bool Whether or not to detect cell modification. Returns ------- nb : A valid notebook containing diff metadata.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_diff.py#L5-L41
tarmstrong/nbdiff
nbdiff/notebook_diff.py
diff_result_to_cell
def diff_result_to_cell(item): '''diff.diff returns a dictionary with all the information we need, but we want to extract the cell and change its metadata.''' state = item['state'] if state == 'modified': new_cell = item['modifiedvalue'].data old_cell = item['originalvalue'].data new_cell['metadata']['state'] = state new_cell['metadata']['original'] = old_cell cell = new_cell else: cell = item['value'].data cell['metadata']['state'] = state return cell
python
def diff_result_to_cell(item): '''diff.diff returns a dictionary with all the information we need, but we want to extract the cell and change its metadata.''' state = item['state'] if state == 'modified': new_cell = item['modifiedvalue'].data old_cell = item['originalvalue'].data new_cell['metadata']['state'] = state new_cell['metadata']['original'] = old_cell cell = new_cell else: cell = item['value'].data cell['metadata']['state'] = state return cell
diff.diff returns a dictionary with all the information we need, but we want to extract the cell and change its metadata.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_diff.py#L61-L74
tarmstrong/nbdiff
nbdiff/notebook_diff.py
cells_diff
def cells_diff(before_cells, after_cells, check_modified=False): '''Diff two arrays of cells.''' before_comps = [ CellComparator(cell, check_modified=check_modified) for cell in before_cells ] after_comps = [ CellComparator(cell, check_modified=check_modified) for cell in after_cells ] diff_result = diff( before_comps, after_comps, check_modified=check_modified ) return diff_result
python
def cells_diff(before_cells, after_cells, check_modified=False): '''Diff two arrays of cells.''' before_comps = [ CellComparator(cell, check_modified=check_modified) for cell in before_cells ] after_comps = [ CellComparator(cell, check_modified=check_modified) for cell in after_cells ] diff_result = diff( before_comps, after_comps, check_modified=check_modified ) return diff_result
Diff two arrays of cells.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_diff.py#L77-L92
tarmstrong/nbdiff
nbdiff/notebook_diff.py
words_diff
def words_diff(before_words, after_words): '''Diff the words in two strings. This is intended for use in diffing prose and other forms of text where line breaks have little semantic value. Parameters ---------- before_words : str A string to be used as the baseline version. after_words : str A string to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information. ''' before_comps = before_words.split() after_comps = after_words.split() diff_result = diff( before_comps, after_comps ) return diff_result
python
def words_diff(before_words, after_words): '''Diff the words in two strings. This is intended for use in diffing prose and other forms of text where line breaks have little semantic value. Parameters ---------- before_words : str A string to be used as the baseline version. after_words : str A string to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information. ''' before_comps = before_words.split() after_comps = after_words.split() diff_result = diff( before_comps, after_comps ) return diff_result
Diff the words in two strings. This is intended for use in diffing prose and other forms of text where line breaks have little semantic value. Parameters ---------- before_words : str A string to be used as the baseline version. after_words : str A string to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_diff.py#L95-L119
tarmstrong/nbdiff
nbdiff/notebook_diff.py
lines_diff
def lines_diff(before_lines, after_lines, check_modified=False): '''Diff the lines in two strings. Parameters ---------- before_lines : iterable Iterable containing lines used as the baseline version. after_lines : iterable Iterable containing lines to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information. ''' before_comps = [ LineComparator(line, check_modified=check_modified) for line in before_lines ] after_comps = [ LineComparator(line, check_modified=check_modified) for line in after_lines ] diff_result = diff( before_comps, after_comps, check_modified=check_modified ) return diff_result
python
def lines_diff(before_lines, after_lines, check_modified=False): '''Diff the lines in two strings. Parameters ---------- before_lines : iterable Iterable containing lines used as the baseline version. after_lines : iterable Iterable containing lines to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information. ''' before_comps = [ LineComparator(line, check_modified=check_modified) for line in before_lines ] after_comps = [ LineComparator(line, check_modified=check_modified) for line in after_lines ] diff_result = diff( before_comps, after_comps, check_modified=check_modified ) return diff_result
Diff the lines in two strings. Parameters ---------- before_lines : iterable Iterable containing lines used as the baseline version. after_lines : iterable Iterable containing lines to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_diff.py#L122-L149
tarmstrong/nbdiff
nbdiff/diff.py
diff
def diff(before, after, check_modified=False): """Diff two sequences of comparable objects. The result of this function is a list of dictionaries containing values in ``before`` or ``after`` with a ``state`` of either 'unchanged', 'added', 'deleted', or 'modified'. >>> import pprint >>> result = diff(['a', 'b', 'c'], ['b', 'c', 'd']) >>> pprint.pprint(result) [{'state': 'deleted', 'value': 'a'}, {'state': 'unchanged', 'value': 'b'}, {'state': 'unchanged', 'value': 'c'}, {'state': 'added', 'value': 'd'}] Parameters ---------- before : iterable An iterable containing values to be used as the baseline version. after : iterable An iterable containing values to be compared against the baseline. check_modified : bool Whether or not to check for modifiedness. Returns ------- diff_items : A list of dictionaries containing diff information. """ # The grid will be empty if `before` or `after` are # empty; this will violate the assumptions made in the rest # of this function. # If this is the case, we know what the result of the diff is # anyways: the contents of the other, non-empty input. if len(before) == 0: return [ {'state': 'added', 'value': v} for v in after ] elif len(after) == 0: return [ {'state': 'deleted', 'value': v} for v in before ] grid = create_grid(before, after) nrows = len(grid[0]) ncols = len(grid) dps = diff_points(grid) result = [] for kind, col, row in dps: if kind == 'unchanged': value = before[col] result.append({ 'state': kind, 'value': value, }) elif kind == 'deleted': assert col < ncols value = before[col] result.append({ 'state': kind, 'value': value, }) elif kind == 'added': assert row < nrows value = after[row] result.append({ 'state': kind, 'value': value, }) elif check_modified and kind == 'modified': result.append({ 'state': kind, 'originalvalue': before[col], 'modifiedvalue': after[row], }) elif (not check_modified) and kind == 'modified': result.append({ 'state': 'deleted', 'value': before[col], }) result.append({ 'state': 'added', 'value': after[row], }) else: raise Exception('We should not be here.') return result
python
def diff(before, after, check_modified=False): """Diff two sequences of comparable objects. The result of this function is a list of dictionaries containing values in ``before`` or ``after`` with a ``state`` of either 'unchanged', 'added', 'deleted', or 'modified'. >>> import pprint >>> result = diff(['a', 'b', 'c'], ['b', 'c', 'd']) >>> pprint.pprint(result) [{'state': 'deleted', 'value': 'a'}, {'state': 'unchanged', 'value': 'b'}, {'state': 'unchanged', 'value': 'c'}, {'state': 'added', 'value': 'd'}] Parameters ---------- before : iterable An iterable containing values to be used as the baseline version. after : iterable An iterable containing values to be compared against the baseline. check_modified : bool Whether or not to check for modifiedness. Returns ------- diff_items : A list of dictionaries containing diff information. """ # The grid will be empty if `before` or `after` are # empty; this will violate the assumptions made in the rest # of this function. # If this is the case, we know what the result of the diff is # anyways: the contents of the other, non-empty input. if len(before) == 0: return [ {'state': 'added', 'value': v} for v in after ] elif len(after) == 0: return [ {'state': 'deleted', 'value': v} for v in before ] grid = create_grid(before, after) nrows = len(grid[0]) ncols = len(grid) dps = diff_points(grid) result = [] for kind, col, row in dps: if kind == 'unchanged': value = before[col] result.append({ 'state': kind, 'value': value, }) elif kind == 'deleted': assert col < ncols value = before[col] result.append({ 'state': kind, 'value': value, }) elif kind == 'added': assert row < nrows value = after[row] result.append({ 'state': kind, 'value': value, }) elif check_modified and kind == 'modified': result.append({ 'state': kind, 'originalvalue': before[col], 'modifiedvalue': after[row], }) elif (not check_modified) and kind == 'modified': result.append({ 'state': 'deleted', 'value': before[col], }) result.append({ 'state': 'added', 'value': after[row], }) else: raise Exception('We should not be here.') return result
Diff two sequences of comparable objects. The result of this function is a list of dictionaries containing values in ``before`` or ``after`` with a ``state`` of either 'unchanged', 'added', 'deleted', or 'modified'. >>> import pprint >>> result = diff(['a', 'b', 'c'], ['b', 'c', 'd']) >>> pprint.pprint(result) [{'state': 'deleted', 'value': 'a'}, {'state': 'unchanged', 'value': 'b'}, {'state': 'unchanged', 'value': 'c'}, {'state': 'added', 'value': 'd'}] Parameters ---------- before : iterable An iterable containing values to be used as the baseline version. after : iterable An iterable containing values to be compared against the baseline. check_modified : bool Whether or not to check for modifiedness. Returns ------- diff_items : A list of dictionaries containing diff information.
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/diff.py#L7-L96
tarmstrong/nbdiff
nbdiff/comparable.py
LineComparator.equal
def equal(self, line1, line2): ''' return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus ''' eqLine = line1 == line2 if eqLine: return BooleanPlus(True, False) else: unchanged_count = self.count_similar_words(line1, line2) similarity_percent = ( (2.0 * unchanged_count) / (len(line1.split()) + len(line2.split())) ) if similarity_percent >= 0.50: return BooleanPlus(True, True) return BooleanPlus(False, False)
python
def equal(self, line1, line2): ''' return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus ''' eqLine = line1 == line2 if eqLine: return BooleanPlus(True, False) else: unchanged_count = self.count_similar_words(line1, line2) similarity_percent = ( (2.0 * unchanged_count) / (len(line1.split()) + len(line2.split())) ) if similarity_percent >= 0.50: return BooleanPlus(True, True) return BooleanPlus(False, False)
return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/comparable.py#L34-L52
tarmstrong/nbdiff
nbdiff/comparable.py
CellComparator.compare_cells
def compare_cells(self, cell1, cell2): ''' return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus ''' eqlanguage = cell1["language"] == cell2["language"] eqinput = cell1["input"] == cell2["input"] eqoutputs = self.equaloutputs(cell1["outputs"], cell2["outputs"]) if eqlanguage and eqinput and eqoutputs: return BooleanPlus(True, False) elif not self.check_modified: return BooleanPlus(False, False) input1 = u"".join(cell1['input']) input2 = u"".join(cell2['input']) similarity_percent = Levenshtein.ratio(input1, input2) if similarity_percent >= 0.65: return BooleanPlus(True, True) return BooleanPlus(False, False)
python
def compare_cells(self, cell1, cell2): ''' return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus ''' eqlanguage = cell1["language"] == cell2["language"] eqinput = cell1["input"] == cell2["input"] eqoutputs = self.equaloutputs(cell1["outputs"], cell2["outputs"]) if eqlanguage and eqinput and eqoutputs: return BooleanPlus(True, False) elif not self.check_modified: return BooleanPlus(False, False) input1 = u"".join(cell1['input']) input2 = u"".join(cell2['input']) similarity_percent = Levenshtein.ratio(input1, input2) if similarity_percent >= 0.65: return BooleanPlus(True, True) return BooleanPlus(False, False)
return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/comparable.py#L122-L142
asphalt-framework/asphalt
asphalt/core/runner.py
run_application
def run_application(component: Union[Component, Dict[str, Any]], *, event_loop_policy: str = None, max_threads: int = None, logging: Union[Dict[str, Any], int, None] = INFO, start_timeout: Union[int, float, None] = 10): """ Configure logging and start the given root component in the default asyncio event loop. Assuming the root component was started successfully, the event loop will continue running until the process is terminated. Initializes the logging system first based on the value of ``logging``: * If the value is a dictionary, it is passed to :func:`logging.config.dictConfig` as argument. * If the value is an integer, it is passed to :func:`logging.basicConfig` as the logging level. * If the value is ``None``, logging setup is skipped entirely. By default, the logging system is initialized using :func:`~logging.basicConfig` using the ``INFO`` logging level. The default executor in the event loop is replaced with a new :class:`~concurrent.futures.ThreadPoolExecutor` where the maximum number of threads is set to the value of ``max_threads`` or, if omitted, the default value of :class:`~concurrent.futures.ThreadPoolExecutor`. :param component: the root component (either a component instance or a configuration dictionary where the special ``type`` key is either a component class or a ``module:varname`` reference to one) :param event_loop_policy: entry point name (from the ``asphalt.core.event_loop_policies`` namespace) of an alternate event loop policy (or a module:varname reference to one) :param max_threads: the maximum number of worker threads in the default thread pool executor (the default value depends on the event loop implementation) :param logging: a logging configuration dictionary, :ref:`logging level <python:levels>` or ``None`` :param start_timeout: seconds to wait for the root component (and its subcomponents) to start up before giving up (``None`` = wait forever) """ assert check_argument_types() # Configure the logging system if isinstance(logging, dict): dictConfig(logging) elif isinstance(logging, int): basicConfig(level=logging) # Inform the user whether -O or PYTHONOPTIMIZE was set when Python was launched logger = getLogger(__name__) logger.info('Running in %s mode', 'development' if __debug__ else 'production') # Switch to an alternate event loop policy if one was provided if event_loop_policy: create_policy = policies.resolve(event_loop_policy) policy = create_policy() asyncio.set_event_loop_policy(policy) logger.info('Switched event loop policy to %s', qualified_name(policy)) # Assign a new default executor with the given max worker thread limit if one was provided event_loop = asyncio.get_event_loop() if max_threads is not None: event_loop.set_default_executor(ThreadPoolExecutor(max_threads)) logger.info('Installed a new thread pool executor with max_workers=%d', max_threads) # Instantiate the root component if a dict was given if isinstance(component, dict): component = cast(Component, component_types.create_object(**component)) logger.info('Starting application') context = Context() exception = None # type: Optional[BaseException] exit_code = 0 # Start the root component try: coro = asyncio.wait_for(component.start(context), start_timeout, loop=event_loop) event_loop.run_until_complete(coro) except asyncio.TimeoutError as e: exception = e logger.error('Timeout waiting for the root component to start') exit_code = 1 except Exception as e: exception = e logger.exception('Error during application startup') exit_code = 1 else: logger.info('Application started') # Add a signal handler to gracefully deal with SIGTERM try: event_loop.add_signal_handler(signal.SIGTERM, sigterm_handler, logger, event_loop) except NotImplementedError: pass # Windows does not support signals very well # Finally, run the event loop until the process is terminated or Ctrl+C is pressed try: event_loop.run_forever() except KeyboardInterrupt: pass except SystemExit as e: exit_code = e.code # Close the root context logger.info('Stopping application') event_loop.run_until_complete(context.close(exception)) # Shut down leftover async generators (requires Python 3.6+) try: event_loop.run_until_complete(event_loop.shutdown_asyncgens()) except (AttributeError, NotImplementedError): pass # Finally, close the event loop itself event_loop.close() logger.info('Application stopped') # Shut down the logging system shutdown() if exit_code: sys.exit(exit_code)
python
def run_application(component: Union[Component, Dict[str, Any]], *, event_loop_policy: str = None, max_threads: int = None, logging: Union[Dict[str, Any], int, None] = INFO, start_timeout: Union[int, float, None] = 10): """ Configure logging and start the given root component in the default asyncio event loop. Assuming the root component was started successfully, the event loop will continue running until the process is terminated. Initializes the logging system first based on the value of ``logging``: * If the value is a dictionary, it is passed to :func:`logging.config.dictConfig` as argument. * If the value is an integer, it is passed to :func:`logging.basicConfig` as the logging level. * If the value is ``None``, logging setup is skipped entirely. By default, the logging system is initialized using :func:`~logging.basicConfig` using the ``INFO`` logging level. The default executor in the event loop is replaced with a new :class:`~concurrent.futures.ThreadPoolExecutor` where the maximum number of threads is set to the value of ``max_threads`` or, if omitted, the default value of :class:`~concurrent.futures.ThreadPoolExecutor`. :param component: the root component (either a component instance or a configuration dictionary where the special ``type`` key is either a component class or a ``module:varname`` reference to one) :param event_loop_policy: entry point name (from the ``asphalt.core.event_loop_policies`` namespace) of an alternate event loop policy (or a module:varname reference to one) :param max_threads: the maximum number of worker threads in the default thread pool executor (the default value depends on the event loop implementation) :param logging: a logging configuration dictionary, :ref:`logging level <python:levels>` or ``None`` :param start_timeout: seconds to wait for the root component (and its subcomponents) to start up before giving up (``None`` = wait forever) """ assert check_argument_types() # Configure the logging system if isinstance(logging, dict): dictConfig(logging) elif isinstance(logging, int): basicConfig(level=logging) # Inform the user whether -O or PYTHONOPTIMIZE was set when Python was launched logger = getLogger(__name__) logger.info('Running in %s mode', 'development' if __debug__ else 'production') # Switch to an alternate event loop policy if one was provided if event_loop_policy: create_policy = policies.resolve(event_loop_policy) policy = create_policy() asyncio.set_event_loop_policy(policy) logger.info('Switched event loop policy to %s', qualified_name(policy)) # Assign a new default executor with the given max worker thread limit if one was provided event_loop = asyncio.get_event_loop() if max_threads is not None: event_loop.set_default_executor(ThreadPoolExecutor(max_threads)) logger.info('Installed a new thread pool executor with max_workers=%d', max_threads) # Instantiate the root component if a dict was given if isinstance(component, dict): component = cast(Component, component_types.create_object(**component)) logger.info('Starting application') context = Context() exception = None # type: Optional[BaseException] exit_code = 0 # Start the root component try: coro = asyncio.wait_for(component.start(context), start_timeout, loop=event_loop) event_loop.run_until_complete(coro) except asyncio.TimeoutError as e: exception = e logger.error('Timeout waiting for the root component to start') exit_code = 1 except Exception as e: exception = e logger.exception('Error during application startup') exit_code = 1 else: logger.info('Application started') # Add a signal handler to gracefully deal with SIGTERM try: event_loop.add_signal_handler(signal.SIGTERM, sigterm_handler, logger, event_loop) except NotImplementedError: pass # Windows does not support signals very well # Finally, run the event loop until the process is terminated or Ctrl+C is pressed try: event_loop.run_forever() except KeyboardInterrupt: pass except SystemExit as e: exit_code = e.code # Close the root context logger.info('Stopping application') event_loop.run_until_complete(context.close(exception)) # Shut down leftover async generators (requires Python 3.6+) try: event_loop.run_until_complete(event_loop.shutdown_asyncgens()) except (AttributeError, NotImplementedError): pass # Finally, close the event loop itself event_loop.close() logger.info('Application stopped') # Shut down the logging system shutdown() if exit_code: sys.exit(exit_code)
Configure logging and start the given root component in the default asyncio event loop. Assuming the root component was started successfully, the event loop will continue running until the process is terminated. Initializes the logging system first based on the value of ``logging``: * If the value is a dictionary, it is passed to :func:`logging.config.dictConfig` as argument. * If the value is an integer, it is passed to :func:`logging.basicConfig` as the logging level. * If the value is ``None``, logging setup is skipped entirely. By default, the logging system is initialized using :func:`~logging.basicConfig` using the ``INFO`` logging level. The default executor in the event loop is replaced with a new :class:`~concurrent.futures.ThreadPoolExecutor` where the maximum number of threads is set to the value of ``max_threads`` or, if omitted, the default value of :class:`~concurrent.futures.ThreadPoolExecutor`. :param component: the root component (either a component instance or a configuration dictionary where the special ``type`` key is either a component class or a ``module:varname`` reference to one) :param event_loop_policy: entry point name (from the ``asphalt.core.event_loop_policies`` namespace) of an alternate event loop policy (or a module:varname reference to one) :param max_threads: the maximum number of worker threads in the default thread pool executor (the default value depends on the event loop implementation) :param logging: a logging configuration dictionary, :ref:`logging level <python:levels>` or ``None`` :param start_timeout: seconds to wait for the root component (and its subcomponents) to start up before giving up (``None`` = wait forever)
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/runner.py#L27-L145
asphalt-framework/asphalt
asphalt/core/concurrent.py
executor
def executor(func_or_executor: Union[Executor, str, Callable[..., T_Retval]]) -> \ Union[WrappedCallable, Callable[..., WrappedCallable]]: """ Decorate a function to run in an executor. If no executor (or ``None``) is given, the current event loop's default executor is used. Otherwise, the argument must be a PEP 3148 compliant thread pool executor or the name of an :class:`~concurrent.futures.Executor` instance. If a decorated callable is called in a worker thread, the executor argument is ignored and the wrapped function is called directly. Callables wrapped with this decorator must be used with ``await`` when called in the event loop thread. Example use with the default executor (``None``):: @executor def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) With a named :class:`~concurrent.futures.Executor` resource:: @executor('special_ops') def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) :param func_or_executor: either a callable (when used as a decorator), an executor instance or the name of an :class:`~concurrent.futures.Executor` resource """ def outer(func: Callable[..., T_Retval], executor: Union[Executor, str] = None) -> Callable[..., Awaitable[T_Retval]]: def wrapper(*args, **kwargs): try: loop = get_event_loop() except RuntimeError: # Event loop not available -- we're in a worker thread return func(*args, **kwargs) # Resolve the executor resource name to an Executor instance if isinstance(executor, str): try: ctx = next(obj for obj in args[:2] if isinstance(obj, Context)) except StopIteration: raise RuntimeError('the callable needs to be called with a Context as the ' 'first or second positional argument') _executor = ctx.require_resource(Executor, executor) else: _executor = executor callback = partial(func, *args, **kwargs) return loop.run_in_executor(_executor, callback) assert check_argument_types() assert not inspect.iscoroutinefunction(func), \ 'Cannot wrap coroutine functions to be run in an executor' return wraps(func)(wrapper) if isinstance(func_or_executor, (str, Executor)): return partial(outer, executor=func_or_executor) else: return outer(func_or_executor)
python
def executor(func_or_executor: Union[Executor, str, Callable[..., T_Retval]]) -> \ Union[WrappedCallable, Callable[..., WrappedCallable]]: """ Decorate a function to run in an executor. If no executor (or ``None``) is given, the current event loop's default executor is used. Otherwise, the argument must be a PEP 3148 compliant thread pool executor or the name of an :class:`~concurrent.futures.Executor` instance. If a decorated callable is called in a worker thread, the executor argument is ignored and the wrapped function is called directly. Callables wrapped with this decorator must be used with ``await`` when called in the event loop thread. Example use with the default executor (``None``):: @executor def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) With a named :class:`~concurrent.futures.Executor` resource:: @executor('special_ops') def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) :param func_or_executor: either a callable (when used as a decorator), an executor instance or the name of an :class:`~concurrent.futures.Executor` resource """ def outer(func: Callable[..., T_Retval], executor: Union[Executor, str] = None) -> Callable[..., Awaitable[T_Retval]]: def wrapper(*args, **kwargs): try: loop = get_event_loop() except RuntimeError: # Event loop not available -- we're in a worker thread return func(*args, **kwargs) # Resolve the executor resource name to an Executor instance if isinstance(executor, str): try: ctx = next(obj for obj in args[:2] if isinstance(obj, Context)) except StopIteration: raise RuntimeError('the callable needs to be called with a Context as the ' 'first or second positional argument') _executor = ctx.require_resource(Executor, executor) else: _executor = executor callback = partial(func, *args, **kwargs) return loop.run_in_executor(_executor, callback) assert check_argument_types() assert not inspect.iscoroutinefunction(func), \ 'Cannot wrap coroutine functions to be run in an executor' return wraps(func)(wrapper) if isinstance(func_or_executor, (str, Executor)): return partial(outer, executor=func_or_executor) else: return outer(func_or_executor)
Decorate a function to run in an executor. If no executor (or ``None``) is given, the current event loop's default executor is used. Otherwise, the argument must be a PEP 3148 compliant thread pool executor or the name of an :class:`~concurrent.futures.Executor` instance. If a decorated callable is called in a worker thread, the executor argument is ignored and the wrapped function is called directly. Callables wrapped with this decorator must be used with ``await`` when called in the event loop thread. Example use with the default executor (``None``):: @executor def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) With a named :class:`~concurrent.futures.Executor` resource:: @executor('special_ops') def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) :param func_or_executor: either a callable (when used as a decorator), an executor instance or the name of an :class:`~concurrent.futures.Executor` resource
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/concurrent.py#L17-L86
asphalt-framework/asphalt
asphalt/core/utils.py
qualified_name
def qualified_name(obj) -> str: """ Return the qualified name (e.g. package.module.Type) for the given object. If ``obj`` is not a class, the returned name will match its type instead. """ if not isclass(obj): obj = type(obj) if obj.__module__ == 'builtins': return obj.__name__ else: return '{}.{}'.format(obj.__module__, obj.__qualname__)
python
def qualified_name(obj) -> str: """ Return the qualified name (e.g. package.module.Type) for the given object. If ``obj`` is not a class, the returned name will match its type instead. """ if not isclass(obj): obj = type(obj) if obj.__module__ == 'builtins': return obj.__name__ else: return '{}.{}'.format(obj.__module__, obj.__qualname__)
Return the qualified name (e.g. package.module.Type) for the given object. If ``obj`` is not a class, the returned name will match its type instead.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/utils.py#L46-L59
asphalt-framework/asphalt
asphalt/core/utils.py
callable_name
def callable_name(func: Callable) -> str: """Return the qualified name (e.g. package.module.func) for the given callable.""" if func.__module__ == 'builtins': return func.__name__ else: return '{}.{}'.format(func.__module__, func.__qualname__)
python
def callable_name(func: Callable) -> str: """Return the qualified name (e.g. package.module.func) for the given callable.""" if func.__module__ == 'builtins': return func.__name__ else: return '{}.{}'.format(func.__module__, func.__qualname__)
Return the qualified name (e.g. package.module.func) for the given callable.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/utils.py#L62-L67
asphalt-framework/asphalt
asphalt/core/utils.py
merge_config
def merge_config(original: Optional[Dict[str, Any]], overrides: Optional[Dict[str, Any]]) -> Dict[str, Any]: """ Return a copy of the ``original`` configuration dictionary, with overrides from ``overrides`` applied. This similar to what :meth:`dict.update` does, but when a dictionary is about to be replaced with another dictionary, it instead merges the contents. If a key in ``overrides`` is a dotted path (ie. ``foo.bar.baz: value``), it is assumed to be a shorthand for ``foo: {bar: {baz: value}}``. :param original: a configuration dictionary (or ``None``) :param overrides: a dictionary containing overriding values to the configuration (or ``None``) :return: the merge result """ assert check_argument_types() copied = original.copy() if original else {} if overrides: for key, value in overrides.items(): if '.' in key: key, rest = key.split('.', 1) value = {rest: value} orig_value = copied.get(key) if isinstance(orig_value, dict) and isinstance(value, dict): copied[key] = merge_config(orig_value, value) else: copied[key] = value return copied
python
def merge_config(original: Optional[Dict[str, Any]], overrides: Optional[Dict[str, Any]]) -> Dict[str, Any]: """ Return a copy of the ``original`` configuration dictionary, with overrides from ``overrides`` applied. This similar to what :meth:`dict.update` does, but when a dictionary is about to be replaced with another dictionary, it instead merges the contents. If a key in ``overrides`` is a dotted path (ie. ``foo.bar.baz: value``), it is assumed to be a shorthand for ``foo: {bar: {baz: value}}``. :param original: a configuration dictionary (or ``None``) :param overrides: a dictionary containing overriding values to the configuration (or ``None``) :return: the merge result """ assert check_argument_types() copied = original.copy() if original else {} if overrides: for key, value in overrides.items(): if '.' in key: key, rest = key.split('.', 1) value = {rest: value} orig_value = copied.get(key) if isinstance(orig_value, dict) and isinstance(value, dict): copied[key] = merge_config(orig_value, value) else: copied[key] = value return copied
Return a copy of the ``original`` configuration dictionary, with overrides from ``overrides`` applied. This similar to what :meth:`dict.update` does, but when a dictionary is about to be replaced with another dictionary, it instead merges the contents. If a key in ``overrides`` is a dotted path (ie. ``foo.bar.baz: value``), it is assumed to be a shorthand for ``foo: {bar: {baz: value}}``. :param original: a configuration dictionary (or ``None``) :param overrides: a dictionary containing overriding values to the configuration (or ``None``) :return: the merge result
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/utils.py#L70-L101
asphalt-framework/asphalt
asphalt/core/utils.py
PluginContainer.resolve
def resolve(self, obj): """ Resolve a reference to an entry point or a variable in a module. If ``obj`` is a ``module:varname`` reference to an object, :func:`resolve_reference` is used to resolve it. If it is a string of any other kind, the named entry point is loaded from this container's namespace. Otherwise, ``obj`` is returned as is. :param obj: an entry point identifier, an object reference or an arbitrary object :return: the loaded entry point, resolved object or the unchanged input value :raises LookupError: if ``obj`` was a string but the named entry point was not found """ if not isinstance(obj, str): return obj if ':' in obj: return resolve_reference(obj) value = self._entrypoints.get(obj) if value is None: raise LookupError('no such entry point in {}: {}'.format(self.namespace, obj)) if isinstance(value, EntryPoint): value = self._entrypoints[obj] = value.load() return value
python
def resolve(self, obj): """ Resolve a reference to an entry point or a variable in a module. If ``obj`` is a ``module:varname`` reference to an object, :func:`resolve_reference` is used to resolve it. If it is a string of any other kind, the named entry point is loaded from this container's namespace. Otherwise, ``obj`` is returned as is. :param obj: an entry point identifier, an object reference or an arbitrary object :return: the loaded entry point, resolved object or the unchanged input value :raises LookupError: if ``obj`` was a string but the named entry point was not found """ if not isinstance(obj, str): return obj if ':' in obj: return resolve_reference(obj) value = self._entrypoints.get(obj) if value is None: raise LookupError('no such entry point in {}: {}'.format(self.namespace, obj)) if isinstance(value, EntryPoint): value = self._entrypoints[obj] = value.load() return value
Resolve a reference to an entry point or a variable in a module. If ``obj`` is a ``module:varname`` reference to an object, :func:`resolve_reference` is used to resolve it. If it is a string of any other kind, the named entry point is loaded from this container's namespace. Otherwise, ``obj`` is returned as is. :param obj: an entry point identifier, an object reference or an arbitrary object :return: the loaded entry point, resolved object or the unchanged input value :raises LookupError: if ``obj`` was a string but the named entry point was not found
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/utils.py#L120-L145
asphalt-framework/asphalt
asphalt/core/utils.py
PluginContainer.create_object
def create_object(self, type: Union[type, str], **constructor_kwargs): """ Instantiate a plugin. The entry points in this namespace must point to subclasses of the ``base_class`` parameter passed to this container. :param type: an entry point identifier, a ``module:varname`` reference to a class, or an actual class object :param constructor_kwargs: keyword arguments passed to the constructor of the plugin class :return: the plugin instance """ assert check_argument_types() assert self.base_class, 'base class has not been defined' plugin_class = self.resolve(type) if not issubclass(plugin_class, self.base_class): raise TypeError('{} is not a subclass of {}'.format( qualified_name(plugin_class), qualified_name(self.base_class))) return plugin_class(**constructor_kwargs)
python
def create_object(self, type: Union[type, str], **constructor_kwargs): """ Instantiate a plugin. The entry points in this namespace must point to subclasses of the ``base_class`` parameter passed to this container. :param type: an entry point identifier, a ``module:varname`` reference to a class, or an actual class object :param constructor_kwargs: keyword arguments passed to the constructor of the plugin class :return: the plugin instance """ assert check_argument_types() assert self.base_class, 'base class has not been defined' plugin_class = self.resolve(type) if not issubclass(plugin_class, self.base_class): raise TypeError('{} is not a subclass of {}'.format( qualified_name(plugin_class), qualified_name(self.base_class))) return plugin_class(**constructor_kwargs)
Instantiate a plugin. The entry points in this namespace must point to subclasses of the ``base_class`` parameter passed to this container. :param type: an entry point identifier, a ``module:varname`` reference to a class, or an actual class object :param constructor_kwargs: keyword arguments passed to the constructor of the plugin class :return: the plugin instance
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/utils.py#L147-L167
asphalt-framework/asphalt
asphalt/core/utils.py
PluginContainer.all
def all(self) -> List[Any]: """ Load all entry points (if not already loaded) in this namespace and return the resulting objects as a list. """ values = [] for name, value in self._entrypoints.items(): if isinstance(value, EntryPoint): value = self._entrypoints[name] = value.load() values.append(value) return values
python
def all(self) -> List[Any]: """ Load all entry points (if not already loaded) in this namespace and return the resulting objects as a list. """ values = [] for name, value in self._entrypoints.items(): if isinstance(value, EntryPoint): value = self._entrypoints[name] = value.load() values.append(value) return values
Load all entry points (if not already loaded) in this namespace and return the resulting objects as a list.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/utils.py#L174-L187
asphalt-framework/asphalt
asphalt/core/component.py
ContainerComponent.add_component
def add_component(self, alias: str, type: Union[str, type] = None, **config): """ Add a child component. This will instantiate a component class, as specified by the ``type`` argument. If the second argument is omitted, the value of ``alias`` is used as its value. The locally given configuration can be overridden by component configuration parameters supplied to the constructor (via the ``components`` argument). When configuration values are provided both as keyword arguments to this method and component configuration through the ``components`` constructor argument, the configurations are merged together using :func:`~asphalt.core.util.merge_config` in a way that the configuration values from the ``components`` argument override the keyword arguments to this method. :param alias: a name for the component instance, unique within this container :param type: entry point name or :class:`Component` subclass or a ``module:varname`` reference to one :param config: keyword arguments passed to the component's constructor """ assert check_argument_types() if not isinstance(alias, str) or not alias: raise TypeError('component_alias must be a nonempty string') if alias in self.child_components: raise ValueError('there is already a child component named "{}"'.format(alias)) config['type'] = type or alias # Allow the external configuration to override the constructor arguments override_config = self.component_configs.get(alias) or {} config = merge_config(config, override_config) component = component_types.create_object(**config) self.child_components[alias] = component
python
def add_component(self, alias: str, type: Union[str, type] = None, **config): """ Add a child component. This will instantiate a component class, as specified by the ``type`` argument. If the second argument is omitted, the value of ``alias`` is used as its value. The locally given configuration can be overridden by component configuration parameters supplied to the constructor (via the ``components`` argument). When configuration values are provided both as keyword arguments to this method and component configuration through the ``components`` constructor argument, the configurations are merged together using :func:`~asphalt.core.util.merge_config` in a way that the configuration values from the ``components`` argument override the keyword arguments to this method. :param alias: a name for the component instance, unique within this container :param type: entry point name or :class:`Component` subclass or a ``module:varname`` reference to one :param config: keyword arguments passed to the component's constructor """ assert check_argument_types() if not isinstance(alias, str) or not alias: raise TypeError('component_alias must be a nonempty string') if alias in self.child_components: raise ValueError('there is already a child component named "{}"'.format(alias)) config['type'] = type or alias # Allow the external configuration to override the constructor arguments override_config = self.component_configs.get(alias) or {} config = merge_config(config, override_config) component = component_types.create_object(**config) self.child_components[alias] = component
Add a child component. This will instantiate a component class, as specified by the ``type`` argument. If the second argument is omitted, the value of ``alias`` is used as its value. The locally given configuration can be overridden by component configuration parameters supplied to the constructor (via the ``components`` argument). When configuration values are provided both as keyword arguments to this method and component configuration through the ``components`` constructor argument, the configurations are merged together using :func:`~asphalt.core.util.merge_config` in a way that the configuration values from the ``components`` argument override the keyword arguments to this method. :param alias: a name for the component instance, unique within this container :param type: entry point name or :class:`Component` subclass or a ``module:varname`` reference to one :param config: keyword arguments passed to the component's constructor
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/component.py#L63-L99
asphalt-framework/asphalt
asphalt/core/component.py
ContainerComponent.start
async def start(self, ctx: Context): """ Create child components that have been configured but not yet created and then calls their :meth:`~Component.start` methods in separate tasks and waits until they have completed. """ for alias in self.component_configs: if alias not in self.child_components: self.add_component(alias) tasks = [component.start(ctx) for component in self.child_components.values()] if tasks: await asyncio.gather(*tasks)
python
async def start(self, ctx: Context): """ Create child components that have been configured but not yet created and then calls their :meth:`~Component.start` methods in separate tasks and waits until they have completed. """ for alias in self.component_configs: if alias not in self.child_components: self.add_component(alias) tasks = [component.start(ctx) for component in self.child_components.values()] if tasks: await asyncio.gather(*tasks)
Create child components that have been configured but not yet created and then calls their :meth:`~Component.start` methods in separate tasks and waits until they have completed.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/component.py#L101-L113
asphalt-framework/asphalt
asphalt/core/context.py
executor
def executor(arg: Union[Executor, str, Callable] = None): """ Decorate a function so that it runs in an :class:`~concurrent.futures.Executor`. If a resource name is given, the first argument must be a :class:`~.Context`. Usage:: @executor def should_run_in_executor(): ... With a resource name:: @executor('resourcename') def should_run_in_executor(ctx): ... :param arg: a callable to decorate, an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :return: the wrapped function """ def outer_wrapper(func: Callable): @wraps(func) def inner_wrapper(*args, **kwargs): try: ctx = next(arg for arg in args[:2] if isinstance(arg, Context)) except StopIteration: raise RuntimeError('the first positional argument to {}() has to be a Context ' 'instance'.format(callable_name(func))) from None executor = ctx.require_resource(Executor, resource_name) return asyncio_extras.call_in_executor(func, *args, executor=executor, **kwargs) return inner_wrapper if isinstance(arg, str): resource_name = arg return outer_wrapper return asyncio_extras.threadpool(arg)
python
def executor(arg: Union[Executor, str, Callable] = None): """ Decorate a function so that it runs in an :class:`~concurrent.futures.Executor`. If a resource name is given, the first argument must be a :class:`~.Context`. Usage:: @executor def should_run_in_executor(): ... With a resource name:: @executor('resourcename') def should_run_in_executor(ctx): ... :param arg: a callable to decorate, an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :return: the wrapped function """ def outer_wrapper(func: Callable): @wraps(func) def inner_wrapper(*args, **kwargs): try: ctx = next(arg for arg in args[:2] if isinstance(arg, Context)) except StopIteration: raise RuntimeError('the first positional argument to {}() has to be a Context ' 'instance'.format(callable_name(func))) from None executor = ctx.require_resource(Executor, resource_name) return asyncio_extras.call_in_executor(func, *args, executor=executor, **kwargs) return inner_wrapper if isinstance(arg, str): resource_name = arg return outer_wrapper return asyncio_extras.threadpool(arg)
Decorate a function so that it runs in an :class:`~concurrent.futures.Executor`. If a resource name is given, the first argument must be a :class:`~.Context`. Usage:: @executor def should_run_in_executor(): ... With a resource name:: @executor('resourcename') def should_run_in_executor(ctx): ... :param arg: a callable to decorate, an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :return: the wrapped function
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L539-L580
asphalt-framework/asphalt
asphalt/core/context.py
context_teardown
def context_teardown(func: Callable): """ Wrap an async generator function to execute the rest of the function at context teardown. This function returns an async function, which, when called, starts the wrapped async generator. The wrapped async function is run until the first ``yield`` statement (``await async_generator.yield_()`` on Python 3.5). When the context is being torn down, the exception that ended the context, if any, is sent to the generator. For example:: class SomeComponent(Component): @context_teardown async def start(self, ctx: Context): service = SomeService() ctx.add_resource(service) exception = yield service.stop() :param func: an async generator function :return: an async function """ @wraps(func) async def wrapper(*args, **kwargs) -> None: async def teardown_callback(exception: Optional[Exception]): try: await generator.asend(exception) except StopAsyncIteration: pass finally: await generator.aclose() try: ctx = next(arg for arg in args[:2] if isinstance(arg, Context)) except StopIteration: raise RuntimeError('the first positional argument to {}() has to be a Context ' 'instance'.format(callable_name(func))) from None generator = func(*args, **kwargs) try: await generator.asend(None) except StopAsyncIteration: pass except BaseException: await generator.aclose() raise else: ctx.add_teardown_callback(teardown_callback, True) if iscoroutinefunction(func): func = async_generator(func) elif not isasyncgenfunction(func): raise TypeError('{} must be an async generator function'.format(callable_name(func))) return wrapper
python
def context_teardown(func: Callable): """ Wrap an async generator function to execute the rest of the function at context teardown. This function returns an async function, which, when called, starts the wrapped async generator. The wrapped async function is run until the first ``yield`` statement (``await async_generator.yield_()`` on Python 3.5). When the context is being torn down, the exception that ended the context, if any, is sent to the generator. For example:: class SomeComponent(Component): @context_teardown async def start(self, ctx: Context): service = SomeService() ctx.add_resource(service) exception = yield service.stop() :param func: an async generator function :return: an async function """ @wraps(func) async def wrapper(*args, **kwargs) -> None: async def teardown_callback(exception: Optional[Exception]): try: await generator.asend(exception) except StopAsyncIteration: pass finally: await generator.aclose() try: ctx = next(arg for arg in args[:2] if isinstance(arg, Context)) except StopIteration: raise RuntimeError('the first positional argument to {}() has to be a Context ' 'instance'.format(callable_name(func))) from None generator = func(*args, **kwargs) try: await generator.asend(None) except StopAsyncIteration: pass except BaseException: await generator.aclose() raise else: ctx.add_teardown_callback(teardown_callback, True) if iscoroutinefunction(func): func = async_generator(func) elif not isasyncgenfunction(func): raise TypeError('{} must be an async generator function'.format(callable_name(func))) return wrapper
Wrap an async generator function to execute the rest of the function at context teardown. This function returns an async function, which, when called, starts the wrapped async generator. The wrapped async function is run until the first ``yield`` statement (``await async_generator.yield_()`` on Python 3.5). When the context is being torn down, the exception that ended the context, if any, is sent to the generator. For example:: class SomeComponent(Component): @context_teardown async def start(self, ctx: Context): service = SomeService() ctx.add_resource(service) exception = yield service.stop() :param func: an async generator function :return: an async function
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L583-L638
asphalt-framework/asphalt
asphalt/core/context.py
Context.context_chain
def context_chain(self) -> List['Context']: """Return a list of contexts starting from this one, its parent and so on.""" contexts = [] ctx = self # type: Optional[Context] while ctx is not None: contexts.append(ctx) ctx = ctx.parent return contexts
python
def context_chain(self) -> List['Context']: """Return a list of contexts starting from this one, its parent and so on.""" contexts = [] ctx = self # type: Optional[Context] while ctx is not None: contexts.append(ctx) ctx = ctx.parent return contexts
Return a list of contexts starting from this one, its parent and so on.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L176-L184
asphalt-framework/asphalt
asphalt/core/context.py
Context.add_teardown_callback
def add_teardown_callback(self, callback: Callable, pass_exception: bool = False) -> None: """ Add a callback to be called when this context closes. This is intended for cleanup of resources, and the list of callbacks is processed in the reverse order in which they were added, so the last added callback will be called first. The callback may return an awaitable. If it does, the awaitable is awaited on before calling any further callbacks. :param callback: a callable that is called with either no arguments or with the exception that ended this context, based on the value of ``pass_exception`` :param pass_exception: ``True`` to pass the callback the exception that ended this context (or ``None`` if the context ended cleanly) """ assert check_argument_types() self._check_closed() self._teardown_callbacks.append((callback, pass_exception))
python
def add_teardown_callback(self, callback: Callable, pass_exception: bool = False) -> None: """ Add a callback to be called when this context closes. This is intended for cleanup of resources, and the list of callbacks is processed in the reverse order in which they were added, so the last added callback will be called first. The callback may return an awaitable. If it does, the awaitable is awaited on before calling any further callbacks. :param callback: a callable that is called with either no arguments or with the exception that ended this context, based on the value of ``pass_exception`` :param pass_exception: ``True`` to pass the callback the exception that ended this context (or ``None`` if the context ended cleanly) """ assert check_argument_types() self._check_closed() self._teardown_callbacks.append((callback, pass_exception))
Add a callback to be called when this context closes. This is intended for cleanup of resources, and the list of callbacks is processed in the reverse order in which they were added, so the last added callback will be called first. The callback may return an awaitable. If it does, the awaitable is awaited on before calling any further callbacks. :param callback: a callable that is called with either no arguments or with the exception that ended this context, based on the value of ``pass_exception`` :param pass_exception: ``True`` to pass the callback the exception that ended this context (or ``None`` if the context ended cleanly)
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L205-L223
asphalt-framework/asphalt
asphalt/core/context.py
Context.close
async def close(self, exception: BaseException = None) -> None: """ Close this context and call any necessary resource teardown callbacks. If a teardown callback returns an awaitable, the return value is awaited on before calling any further teardown callbacks. All callbacks will be processed, even if some of them raise exceptions. If at least one callback raised an error, this method will raise a :exc:`~.TeardownError` at the end. After this method has been called, resources can no longer be requested or published on this context. :param exception: the exception, if any, that caused this context to be closed :raises .TeardownError: if one or more teardown callbacks raise an exception """ self._check_closed() self._closed = True exceptions = [] for callback, pass_exception in reversed(self._teardown_callbacks): try: retval = callback(exception) if pass_exception else callback() if isawaitable(retval): await retval except Exception as e: exceptions.append(e) del self._teardown_callbacks if exceptions: raise TeardownError(exceptions)
python
async def close(self, exception: BaseException = None) -> None: """ Close this context and call any necessary resource teardown callbacks. If a teardown callback returns an awaitable, the return value is awaited on before calling any further teardown callbacks. All callbacks will be processed, even if some of them raise exceptions. If at least one callback raised an error, this method will raise a :exc:`~.TeardownError` at the end. After this method has been called, resources can no longer be requested or published on this context. :param exception: the exception, if any, that caused this context to be closed :raises .TeardownError: if one or more teardown callbacks raise an exception """ self._check_closed() self._closed = True exceptions = [] for callback, pass_exception in reversed(self._teardown_callbacks): try: retval = callback(exception) if pass_exception else callback() if isawaitable(retval): await retval except Exception as e: exceptions.append(e) del self._teardown_callbacks if exceptions: raise TeardownError(exceptions)
Close this context and call any necessary resource teardown callbacks. If a teardown callback returns an awaitable, the return value is awaited on before calling any further teardown callbacks. All callbacks will be processed, even if some of them raise exceptions. If at least one callback raised an error, this method will raise a :exc:`~.TeardownError` at the end. After this method has been called, resources can no longer be requested or published on this context. :param exception: the exception, if any, that caused this context to be closed :raises .TeardownError: if one or more teardown callbacks raise an exception
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L225-L256
asphalt-framework/asphalt
asphalt/core/context.py
Context.add_resource
def add_resource(self, value, name: str = 'default', context_attr: str = None, types: Union[type, Sequence[type]] = ()) -> None: """ Add a resource to this context. This will cause a ``resource_added`` event to be dispatched. :param value: the actual resource value :param name: name of this resource (unique among all its registered types within a single context) :param context_attr: name of the context attribute this resource will be accessible as :param types: type(s) to register the resource as (omit to use the type of ``value``) :raises asphalt.core.context.ResourceConflict: if the resource conflicts with an existing one in any way """ assert check_argument_types() self._check_closed() if isinstance(types, type): types = (types,) elif not types: types = (type(value),) if value is None: raise ValueError('"value" must not be None') if not resource_name_re.fullmatch(name): raise ValueError('"name" must be a nonempty string consisting only of alphanumeric ' 'characters and underscores') if context_attr and getattr_static(self, context_attr, None) is not None: raise ResourceConflict('this context already has an attribute {!r}'.format( context_attr)) for resource_type in types: if (resource_type, name) in self._resources: raise ResourceConflict( 'this context already contains a resource of type {} using the name {!r}'. format(qualified_name(resource_type), name)) resource = ResourceContainer(value, tuple(types), name, context_attr, False) for type_ in resource.types: self._resources[(type_, name)] = resource if context_attr: setattr(self, context_attr, value) # Notify listeners that a new resource has been made available self.resource_added.dispatch(types, name, False)
python
def add_resource(self, value, name: str = 'default', context_attr: str = None, types: Union[type, Sequence[type]] = ()) -> None: """ Add a resource to this context. This will cause a ``resource_added`` event to be dispatched. :param value: the actual resource value :param name: name of this resource (unique among all its registered types within a single context) :param context_attr: name of the context attribute this resource will be accessible as :param types: type(s) to register the resource as (omit to use the type of ``value``) :raises asphalt.core.context.ResourceConflict: if the resource conflicts with an existing one in any way """ assert check_argument_types() self._check_closed() if isinstance(types, type): types = (types,) elif not types: types = (type(value),) if value is None: raise ValueError('"value" must not be None') if not resource_name_re.fullmatch(name): raise ValueError('"name" must be a nonempty string consisting only of alphanumeric ' 'characters and underscores') if context_attr and getattr_static(self, context_attr, None) is not None: raise ResourceConflict('this context already has an attribute {!r}'.format( context_attr)) for resource_type in types: if (resource_type, name) in self._resources: raise ResourceConflict( 'this context already contains a resource of type {} using the name {!r}'. format(qualified_name(resource_type), name)) resource = ResourceContainer(value, tuple(types), name, context_attr, False) for type_ in resource.types: self._resources[(type_, name)] = resource if context_attr: setattr(self, context_attr, value) # Notify listeners that a new resource has been made available self.resource_added.dispatch(types, name, False)
Add a resource to this context. This will cause a ``resource_added`` event to be dispatched. :param value: the actual resource value :param name: name of this resource (unique among all its registered types within a single context) :param context_attr: name of the context attribute this resource will be accessible as :param types: type(s) to register the resource as (omit to use the type of ``value``) :raises asphalt.core.context.ResourceConflict: if the resource conflicts with an existing one in any way
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L272-L317
asphalt-framework/asphalt
asphalt/core/context.py
Context.add_resource_factory
def add_resource_factory(self, factory_callback: factory_callback_type, types: Union[type, Sequence[Type]], name: str = 'default', context_attr: str = None) -> None: """ Add a resource factory to this context. This will cause a ``resource_added`` event to be dispatched. A resource factory is a callable that generates a "contextual" resource when it is requested by either using any of the methods :meth:`get_resource`, :meth:`require_resource` or :meth:`request_resource` or its context attribute is accessed. When a new resource is created in this manner, it is always bound to the context through it was requested, regardless of where in the chain the factory itself was added to. :param factory_callback: a (non-coroutine) callable that takes a context instance as argument and returns the created resource object :param types: one or more types to register the generated resource as on the target context :param name: name of the resource that will be created in the target context :param context_attr: name of the context attribute the created resource will be accessible as :raises asphalt.core.context.ResourceConflict: if there is an existing resource factory for the given type/name combinations or the given context variable """ assert check_argument_types() self._check_closed() if not resource_name_re.fullmatch(name): raise ValueError('"name" must be a nonempty string consisting only of alphanumeric ' 'characters and underscores') if iscoroutinefunction(factory_callback): raise TypeError('"factory_callback" must not be a coroutine function') if not types: raise ValueError('"types" must not be empty') if isinstance(types, type): resource_types = (types,) # type: Tuple[type, ...] else: resource_types = tuple(types) # Check for a conflicting context attribute if context_attr in self._resource_factories_by_context_attr: raise ResourceConflict( 'this context already contains a resource factory for the context attribute {!r}'. format(context_attr)) # Check for conflicts with existing resource factories for type_ in resource_types: if (type_, name) in self._resource_factories: raise ResourceConflict('this context already contains a resource factory for the ' 'type {}'.format(qualified_name(type_))) # Add the resource factory to the appropriate lookup tables resource = ResourceContainer(factory_callback, resource_types, name, context_attr, True) for type_ in resource_types: self._resource_factories[(type_, name)] = resource if context_attr: self._resource_factories_by_context_attr[context_attr] = resource # Notify listeners that a new resource has been made available self.resource_added.dispatch(resource_types, name, True)
python
def add_resource_factory(self, factory_callback: factory_callback_type, types: Union[type, Sequence[Type]], name: str = 'default', context_attr: str = None) -> None: """ Add a resource factory to this context. This will cause a ``resource_added`` event to be dispatched. A resource factory is a callable that generates a "contextual" resource when it is requested by either using any of the methods :meth:`get_resource`, :meth:`require_resource` or :meth:`request_resource` or its context attribute is accessed. When a new resource is created in this manner, it is always bound to the context through it was requested, regardless of where in the chain the factory itself was added to. :param factory_callback: a (non-coroutine) callable that takes a context instance as argument and returns the created resource object :param types: one or more types to register the generated resource as on the target context :param name: name of the resource that will be created in the target context :param context_attr: name of the context attribute the created resource will be accessible as :raises asphalt.core.context.ResourceConflict: if there is an existing resource factory for the given type/name combinations or the given context variable """ assert check_argument_types() self._check_closed() if not resource_name_re.fullmatch(name): raise ValueError('"name" must be a nonempty string consisting only of alphanumeric ' 'characters and underscores') if iscoroutinefunction(factory_callback): raise TypeError('"factory_callback" must not be a coroutine function') if not types: raise ValueError('"types" must not be empty') if isinstance(types, type): resource_types = (types,) # type: Tuple[type, ...] else: resource_types = tuple(types) # Check for a conflicting context attribute if context_attr in self._resource_factories_by_context_attr: raise ResourceConflict( 'this context already contains a resource factory for the context attribute {!r}'. format(context_attr)) # Check for conflicts with existing resource factories for type_ in resource_types: if (type_, name) in self._resource_factories: raise ResourceConflict('this context already contains a resource factory for the ' 'type {}'.format(qualified_name(type_))) # Add the resource factory to the appropriate lookup tables resource = ResourceContainer(factory_callback, resource_types, name, context_attr, True) for type_ in resource_types: self._resource_factories[(type_, name)] = resource if context_attr: self._resource_factories_by_context_attr[context_attr] = resource # Notify listeners that a new resource has been made available self.resource_added.dispatch(resource_types, name, True)
Add a resource factory to this context. This will cause a ``resource_added`` event to be dispatched. A resource factory is a callable that generates a "contextual" resource when it is requested by either using any of the methods :meth:`get_resource`, :meth:`require_resource` or :meth:`request_resource` or its context attribute is accessed. When a new resource is created in this manner, it is always bound to the context through it was requested, regardless of where in the chain the factory itself was added to. :param factory_callback: a (non-coroutine) callable that takes a context instance as argument and returns the created resource object :param types: one or more types to register the generated resource as on the target context :param name: name of the resource that will be created in the target context :param context_attr: name of the context attribute the created resource will be accessible as :raises asphalt.core.context.ResourceConflict: if there is an existing resource factory for the given type/name combinations or the given context variable
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L319-L380
asphalt-framework/asphalt
asphalt/core/context.py
Context.get_resource
def get_resource(self, type: Type[T_Resource], name: str = 'default') -> Optional[T_Resource]: """ Look up a resource in the chain of contexts. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource, or ``None`` if none was available """ assert check_argument_types() self._check_closed() key = (type, name) # First check if there's already a matching resource in this context resource = self._resources.get(key) if resource is not None: return resource.value_or_factory # Next, check if there's a resource factory available on the context chain resource = next((ctx._resource_factories[key] for ctx in self.context_chain if key in ctx._resource_factories), None) if resource is not None: return resource.generate_value(self) # Finally, check parents for a matching resource return next((ctx._resources[key].value_or_factory for ctx in self.context_chain if key in ctx._resources), None)
python
def get_resource(self, type: Type[T_Resource], name: str = 'default') -> Optional[T_Resource]: """ Look up a resource in the chain of contexts. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource, or ``None`` if none was available """ assert check_argument_types() self._check_closed() key = (type, name) # First check if there's already a matching resource in this context resource = self._resources.get(key) if resource is not None: return resource.value_or_factory # Next, check if there's a resource factory available on the context chain resource = next((ctx._resource_factories[key] for ctx in self.context_chain if key in ctx._resource_factories), None) if resource is not None: return resource.generate_value(self) # Finally, check parents for a matching resource return next((ctx._resources[key].value_or_factory for ctx in self.context_chain if key in ctx._resources), None)
Look up a resource in the chain of contexts. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource, or ``None`` if none was available
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L382-L408
asphalt-framework/asphalt
asphalt/core/context.py
Context.get_resources
def get_resources(self, type: Type[T_Resource]) -> Set[T_Resource]: """ Retrieve all the resources of the given type in this context and its parents. Any matching resource factories are also triggered if necessary. :param type: type of the resources to get :return: a set of all found resources of the given type """ assert check_argument_types() # Collect all the matching resources from this context resources = {container.name: container.value_or_factory for container in self._resources.values() if not container.is_factory and type in container.types } # type: Dict[str, T_Resource] # Next, find all matching resource factories in the context chain and generate resources resources.update({container.name: container.generate_value(self) for ctx in self.context_chain for container in ctx._resources.values() if container.is_factory and type in container.types and container.name not in resources}) # Finally, add the resource values from the parent contexts resources.update({container.name: container.value_or_factory for ctx in self.context_chain[1:] for container in ctx._resources.values() if not container.is_factory and type in container.types and container.name not in resources}) return set(resources.values())
python
def get_resources(self, type: Type[T_Resource]) -> Set[T_Resource]: """ Retrieve all the resources of the given type in this context and its parents. Any matching resource factories are also triggered if necessary. :param type: type of the resources to get :return: a set of all found resources of the given type """ assert check_argument_types() # Collect all the matching resources from this context resources = {container.name: container.value_or_factory for container in self._resources.values() if not container.is_factory and type in container.types } # type: Dict[str, T_Resource] # Next, find all matching resource factories in the context chain and generate resources resources.update({container.name: container.generate_value(self) for ctx in self.context_chain for container in ctx._resources.values() if container.is_factory and type in container.types and container.name not in resources}) # Finally, add the resource values from the parent contexts resources.update({container.name: container.value_or_factory for ctx in self.context_chain[1:] for container in ctx._resources.values() if not container.is_factory and type in container.types and container.name not in resources}) return set(resources.values())
Retrieve all the resources of the given type in this context and its parents. Any matching resource factories are also triggered if necessary. :param type: type of the resources to get :return: a set of all found resources of the given type
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L410-L442
asphalt-framework/asphalt
asphalt/core/context.py
Context.require_resource
def require_resource(self, type: Type[T_Resource], name: str = 'default') -> T_Resource: """ Look up a resource in the chain of contexts and raise an exception if it is not found. This is like :meth:`get_resource` except that instead of returning ``None`` when a resource is not found, it will raise :exc:`~asphalt.core.context.ResourceNotFound`. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource :raises asphalt.core.context.ResourceNotFound: if a resource of the given type and name was not found """ resource = self.get_resource(type, name) if resource is None: raise ResourceNotFound(type, name) return resource
python
def require_resource(self, type: Type[T_Resource], name: str = 'default') -> T_Resource: """ Look up a resource in the chain of contexts and raise an exception if it is not found. This is like :meth:`get_resource` except that instead of returning ``None`` when a resource is not found, it will raise :exc:`~asphalt.core.context.ResourceNotFound`. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource :raises asphalt.core.context.ResourceNotFound: if a resource of the given type and name was not found """ resource = self.get_resource(type, name) if resource is None: raise ResourceNotFound(type, name) return resource
Look up a resource in the chain of contexts and raise an exception if it is not found. This is like :meth:`get_resource` except that instead of returning ``None`` when a resource is not found, it will raise :exc:`~asphalt.core.context.ResourceNotFound`. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource :raises asphalt.core.context.ResourceNotFound: if a resource of the given type and name was not found
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L444-L462
asphalt-framework/asphalt
asphalt/core/context.py
Context.request_resource
async def request_resource(self, type: Type[T_Resource], name: str = 'default') -> T_Resource: """ Look up a resource in the chain of contexts. This is like :meth:`get_resource` except that if the resource is not already available, it will wait for one to become available. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource """ # First try to locate an existing resource in this context and its parents value = self.get_resource(type, name) if value is not None: return value # Wait until a matching resource or resource factory is available signals = [ctx.resource_added for ctx in self.context_chain] await wait_event( signals, lambda event: event.resource_name == name and type in event.resource_types) return self.require_resource(type, name)
python
async def request_resource(self, type: Type[T_Resource], name: str = 'default') -> T_Resource: """ Look up a resource in the chain of contexts. This is like :meth:`get_resource` except that if the resource is not already available, it will wait for one to become available. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource """ # First try to locate an existing resource in this context and its parents value = self.get_resource(type, name) if value is not None: return value # Wait until a matching resource or resource factory is available signals = [ctx.resource_added for ctx in self.context_chain] await wait_event( signals, lambda event: event.resource_name == name and type in event.resource_types) return self.require_resource(type, name)
Look up a resource in the chain of contexts. This is like :meth:`get_resource` except that if the resource is not already available, it will wait for one to become available. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L464-L485
asphalt-framework/asphalt
asphalt/core/context.py
Context.call_async
def call_async(self, func: Callable, *args, **kwargs): """ Call the given callable in the event loop thread. This method lets you call asynchronous code from a worker thread. Do not use it from within the event loop thread. If the callable returns an awaitable, it is resolved before returning to the caller. :param func: a regular function or a coroutine function :param args: positional arguments to call the callable with :param kwargs: keyword arguments to call the callable with :return: the return value of the call """ return asyncio_extras.call_async(self.loop, func, *args, **kwargs)
python
def call_async(self, func: Callable, *args, **kwargs): """ Call the given callable in the event loop thread. This method lets you call asynchronous code from a worker thread. Do not use it from within the event loop thread. If the callable returns an awaitable, it is resolved before returning to the caller. :param func: a regular function or a coroutine function :param args: positional arguments to call the callable with :param kwargs: keyword arguments to call the callable with :return: the return value of the call """ return asyncio_extras.call_async(self.loop, func, *args, **kwargs)
Call the given callable in the event loop thread. This method lets you call asynchronous code from a worker thread. Do not use it from within the event loop thread. If the callable returns an awaitable, it is resolved before returning to the caller. :param func: a regular function or a coroutine function :param args: positional arguments to call the callable with :param kwargs: keyword arguments to call the callable with :return: the return value of the call
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L487-L502
asphalt-framework/asphalt
asphalt/core/context.py
Context.call_in_executor
def call_in_executor(self, func: Callable, *args, executor: Union[Executor, str] = None, **kwargs) -> Awaitable: """ Call the given callable in an executor. :param func: the callable to call :param args: positional arguments to call the callable with :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :param kwargs: keyword arguments to call the callable with :return: an awaitable that resolves to the return value of the call """ assert check_argument_types() if isinstance(executor, str): executor = self.require_resource(Executor, executor) return asyncio_extras.call_in_executor(func, *args, executor=executor, **kwargs)
python
def call_in_executor(self, func: Callable, *args, executor: Union[Executor, str] = None, **kwargs) -> Awaitable: """ Call the given callable in an executor. :param func: the callable to call :param args: positional arguments to call the callable with :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :param kwargs: keyword arguments to call the callable with :return: an awaitable that resolves to the return value of the call """ assert check_argument_types() if isinstance(executor, str): executor = self.require_resource(Executor, executor) return asyncio_extras.call_in_executor(func, *args, executor=executor, **kwargs)
Call the given callable in an executor. :param func: the callable to call :param args: positional arguments to call the callable with :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :param kwargs: keyword arguments to call the callable with :return: an awaitable that resolves to the return value of the call
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L504-L521
asphalt-framework/asphalt
asphalt/core/context.py
Context.threadpool
def threadpool(self, executor: Union[Executor, str] = None): """ Return an asynchronous context manager that runs the block in a (thread pool) executor. :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :return: an asynchronous context manager """ assert check_argument_types() if isinstance(executor, str): executor = self.require_resource(Executor, executor) return asyncio_extras.threadpool(executor)
python
def threadpool(self, executor: Union[Executor, str] = None): """ Return an asynchronous context manager that runs the block in a (thread pool) executor. :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :return: an asynchronous context manager """ assert check_argument_types() if isinstance(executor, str): executor = self.require_resource(Executor, executor) return asyncio_extras.threadpool(executor)
Return an asynchronous context manager that runs the block in a (thread pool) executor. :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :return: an asynchronous context manager
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/context.py#L523-L536
asphalt-framework/asphalt
asphalt/core/event.py
stream_events
def stream_events(signals: Sequence[Signal], filter: Callable[[T_Event], bool] = None, *, max_queue_size: int = 0) -> AsyncIterator[T_Event]: """ Return an async generator that yields events from the given signals. Only events that pass the filter callable (if one has been given) are returned. If no filter function was given, all events are yielded from the generator. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :param max_queue_size: maximum size of the queue, after which it will start to drop events """ @async_generator async def streamer(): try: while True: event = await queue.get() if filter is None or filter(event): await yield_(event) finally: cleanup() def cleanup(): nonlocal queue if queue is not None: for signal in signals: signal.disconnect(queue.put_nowait) queue = None assert check_argument_types() queue = Queue(max_queue_size) # type: Queue[T_Event] for signal in signals: signal.connect(queue.put_nowait) gen = [streamer()] # this is to allow the reference count to drop to 0 weakref.finalize(gen[0], cleanup) return gen.pop()
python
def stream_events(signals: Sequence[Signal], filter: Callable[[T_Event], bool] = None, *, max_queue_size: int = 0) -> AsyncIterator[T_Event]: """ Return an async generator that yields events from the given signals. Only events that pass the filter callable (if one has been given) are returned. If no filter function was given, all events are yielded from the generator. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :param max_queue_size: maximum size of the queue, after which it will start to drop events """ @async_generator async def streamer(): try: while True: event = await queue.get() if filter is None or filter(event): await yield_(event) finally: cleanup() def cleanup(): nonlocal queue if queue is not None: for signal in signals: signal.disconnect(queue.put_nowait) queue = None assert check_argument_types() queue = Queue(max_queue_size) # type: Queue[T_Event] for signal in signals: signal.connect(queue.put_nowait) gen = [streamer()] # this is to allow the reference count to drop to 0 weakref.finalize(gen[0], cleanup) return gen.pop()
Return an async generator that yields events from the given signals. Only events that pass the filter callable (if one has been given) are returned. If no filter function was given, all events are yielded from the generator. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :param max_queue_size: maximum size of the queue, after which it will start to drop events
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L228-L267
asphalt-framework/asphalt
asphalt/core/event.py
wait_event
async def wait_event(signals: Sequence['Signal[T_Event]'], filter: Callable[[T_Event], bool] = None) -> T_Event: """ Wait until any of the given signals dispatches an event that satisfies the filter (if any). If no filter has been given, the first event dispatched from the signal is returned. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :return: the event that was dispatched """ if sys.version_info >= (3, 5, 3): assert check_argument_types() async with aclosing(stream_events(signals, filter)) as events: return await events.asend(None)
python
async def wait_event(signals: Sequence['Signal[T_Event]'], filter: Callable[[T_Event], bool] = None) -> T_Event: """ Wait until any of the given signals dispatches an event that satisfies the filter (if any). If no filter has been given, the first event dispatched from the signal is returned. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :return: the event that was dispatched """ if sys.version_info >= (3, 5, 3): assert check_argument_types() async with aclosing(stream_events(signals, filter)) as events: return await events.asend(None)
Wait until any of the given signals dispatches an event that satisfies the filter (if any). If no filter has been given, the first event dispatched from the signal is returned. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :return: the event that was dispatched
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L270-L287
asphalt-framework/asphalt
asphalt/core/event.py
Signal.connect
def connect(self, callback: Callable[[T_Event], Any]) -> Callable[[T_Event], Any]: """ Connect a callback to this signal. Each callable can only be connected once. Duplicate registrations are ignored. If you need to pass extra arguments to the callback, you can use :func:`functools.partial` to wrap the callable. :param callback: a callable that will receive an event object as its only argument. :return: the value of ``callback`` argument """ assert check_argument_types() if self.listeners is None: self.listeners = [] if callback not in self.listeners: self.listeners.append(callback) return callback
python
def connect(self, callback: Callable[[T_Event], Any]) -> Callable[[T_Event], Any]: """ Connect a callback to this signal. Each callable can only be connected once. Duplicate registrations are ignored. If you need to pass extra arguments to the callback, you can use :func:`functools.partial` to wrap the callable. :param callback: a callable that will receive an event object as its only argument. :return: the value of ``callback`` argument """ assert check_argument_types() if self.listeners is None: self.listeners = [] if callback not in self.listeners: self.listeners.append(callback) return callback
Connect a callback to this signal. Each callable can only be connected once. Duplicate registrations are ignored. If you need to pass extra arguments to the callback, you can use :func:`functools.partial` to wrap the callable. :param callback: a callable that will receive an event object as its only argument. :return: the value of ``callback`` argument
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L106-L125
asphalt-framework/asphalt
asphalt/core/event.py
Signal.disconnect
def disconnect(self, callback: Callable) -> None: """ Disconnects the given callback. The callback will no longer receive events from this signal. No action is taken if the callback is not on the list of listener callbacks. :param callback: the callable to remove """ assert check_argument_types() try: if self.listeners is not None: self.listeners.remove(callback) except ValueError: pass
python
def disconnect(self, callback: Callable) -> None: """ Disconnects the given callback. The callback will no longer receive events from this signal. No action is taken if the callback is not on the list of listener callbacks. :param callback: the callable to remove """ assert check_argument_types() try: if self.listeners is not None: self.listeners.remove(callback) except ValueError: pass
Disconnects the given callback. The callback will no longer receive events from this signal. No action is taken if the callback is not on the list of listener callbacks. :param callback: the callable to remove
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L127-L143
asphalt-framework/asphalt
asphalt/core/event.py
Signal.dispatch_raw
def dispatch_raw(self, event: Event) -> Awaitable[bool]: """ Dispatch the given event object to all listeners. Creates a new task in which all listener callbacks are called with the given event as the only argument. Coroutine callbacks are converted to their own respective tasks and waited for concurrently. Before the dispatching is done, a snapshot of the listeners is taken and the event is only dispatched to those listeners, so adding a listener between the call to this method and the actual dispatching will only affect future calls to this method. :param event: the event object to dispatch :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ async def do_dispatch() -> None: awaitables = [] all_successful = True for callback in listeners: try: retval = callback(event) except Exception: logger.exception('Uncaught exception in event listener') all_successful = False else: if isawaitable(retval): awaitables.append(retval) # For any callbacks that returned awaitables, wait for their completion and log any # exceptions they raised if awaitables: done, _ = await wait(awaitables, loop=loop) for f in done: exc = f.exception() if exc is not None: all_successful = False logger.error('Uncaught exception in event listener', exc_info=exc) if not future.cancelled(): future.set_result(all_successful) if not isinstance(event, self.event_class): raise TypeError('event must be of type {}'.format(qualified_name(self.event_class))) loop = get_event_loop() future = loop.create_future() if self.listeners: listeners = list(self.listeners) loop.create_task(do_dispatch()) else: future.set_result(True) return future
python
def dispatch_raw(self, event: Event) -> Awaitable[bool]: """ Dispatch the given event object to all listeners. Creates a new task in which all listener callbacks are called with the given event as the only argument. Coroutine callbacks are converted to their own respective tasks and waited for concurrently. Before the dispatching is done, a snapshot of the listeners is taken and the event is only dispatched to those listeners, so adding a listener between the call to this method and the actual dispatching will only affect future calls to this method. :param event: the event object to dispatch :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ async def do_dispatch() -> None: awaitables = [] all_successful = True for callback in listeners: try: retval = callback(event) except Exception: logger.exception('Uncaught exception in event listener') all_successful = False else: if isawaitable(retval): awaitables.append(retval) # For any callbacks that returned awaitables, wait for their completion and log any # exceptions they raised if awaitables: done, _ = await wait(awaitables, loop=loop) for f in done: exc = f.exception() if exc is not None: all_successful = False logger.error('Uncaught exception in event listener', exc_info=exc) if not future.cancelled(): future.set_result(all_successful) if not isinstance(event, self.event_class): raise TypeError('event must be of type {}'.format(qualified_name(self.event_class))) loop = get_event_loop() future = loop.create_future() if self.listeners: listeners = list(self.listeners) loop.create_task(do_dispatch()) else: future.set_result(True) return future
Dispatch the given event object to all listeners. Creates a new task in which all listener callbacks are called with the given event as the only argument. Coroutine callbacks are converted to their own respective tasks and waited for concurrently. Before the dispatching is done, a snapshot of the listeners is taken and the event is only dispatched to those listeners, so adding a listener between the call to this method and the actual dispatching will only affect future calls to this method. :param event: the event object to dispatch :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L145-L200
asphalt-framework/asphalt
asphalt/core/event.py
Signal.dispatch
def dispatch(self, *args, **kwargs) -> Awaitable[bool]: """ Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ event = self.event_class(self.source(), cast(str, self.topic), *args, **kwargs) return self.dispatch_raw(event)
python
def dispatch(self, *args, **kwargs) -> Awaitable[bool]: """ Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ event = self.event_class(self.source(), cast(str, self.topic), *args, **kwargs) return self.dispatch_raw(event)
Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L202-L217
asphalt-framework/asphalt
asphalt/core/event.py
Signal.wait_event
def wait_event(self, filter: Callable[[T_Event], bool] = None) -> Awaitable[T_Event]: """Shortcut for calling :func:`wait_event` with this signal in the first argument.""" return wait_event([self], filter)
python
def wait_event(self, filter: Callable[[T_Event], bool] = None) -> Awaitable[T_Event]: """Shortcut for calling :func:`wait_event` with this signal in the first argument.""" return wait_event([self], filter)
Shortcut for calling :func:`wait_event` with this signal in the first argument.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L219-L221
asphalt-framework/asphalt
asphalt/core/event.py
Signal.stream_events
def stream_events(self, filter: Callable[[Event], bool] = None, *, max_queue_size: int = 0): """Shortcut for calling :func:`stream_events` with this signal in the first argument.""" return stream_events([self], filter, max_queue_size=max_queue_size)
python
def stream_events(self, filter: Callable[[Event], bool] = None, *, max_queue_size: int = 0): """Shortcut for calling :func:`stream_events` with this signal in the first argument.""" return stream_events([self], filter, max_queue_size=max_queue_size)
Shortcut for calling :func:`stream_events` with this signal in the first argument.
https://github.com/asphalt-framework/asphalt/blob/4114b3ac9743cbd9facb374a3f53e19d3afef22d/asphalt/core/event.py#L223-L225
samfoo/vt102
vt102/__init__.py
stream._escape_sequence
def _escape_sequence(self, char): """ Handle characters seen when in an escape sequence. Most non-vt52 commands start with a left-bracket after the escape and then a stream of parameters and a command. """ num = ord(char) if char == "[": self.state = "escape-lb" elif char == "(": self.state = "charset-g0" elif char == ")": self.state = "charset-g1" elif num in self.escape: self.dispatch(self.escape[num]) self.state = "stream" elif self.fail_on_unknown_esc: raise StreamProcessError("Unexpected character '%c' == '0x%02x'" % (char, ord(char)))
python
def _escape_sequence(self, char): """ Handle characters seen when in an escape sequence. Most non-vt52 commands start with a left-bracket after the escape and then a stream of parameters and a command. """ num = ord(char) if char == "[": self.state = "escape-lb" elif char == "(": self.state = "charset-g0" elif char == ")": self.state = "charset-g1" elif num in self.escape: self.dispatch(self.escape[num]) self.state = "stream" elif self.fail_on_unknown_esc: raise StreamProcessError("Unexpected character '%c' == '0x%02x'" % (char, ord(char)))
Handle characters seen when in an escape sequence. Most non-vt52 commands start with a left-bracket after the escape and then a stream of parameters and a command.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L189-L207
samfoo/vt102
vt102/__init__.py
stream._end_escape_sequence
def _end_escape_sequence(self, char): """ Handle the end of an escape sequence. The final character in an escape sequence is the command to execute, which corresponds to the event that is dispatched here. """ num = ord(char) if num in self.sequence: self.dispatch(self.sequence[num], *self.params) self.state = "stream" self.current_param = "" self.params = []
python
def _end_escape_sequence(self, char): """ Handle the end of an escape sequence. The final character in an escape sequence is the command to execute, which corresponds to the event that is dispatched here. """ num = ord(char) if num in self.sequence: self.dispatch(self.sequence[num], *self.params) self.state = "stream" self.current_param = "" self.params = []
Handle the end of an escape sequence. The final character in an escape sequence is the command to execute, which corresponds to the event that is dispatched here.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L209-L221
samfoo/vt102
vt102/__init__.py
stream._escape_parameters
def _escape_parameters(self, char): """ Parse parameters in an escape sequence. Parameters are a list of numbers in ascii (e.g. '12', '4', '42', etc) separated by a semicolon (e.g. "12;4;42"). See the [vt102 user guide](http://vt100.net/docs/vt102-ug/) for more details on the formatting of escape parameters. """ if char == ";": self.params.append(int(self.current_param)) self.current_param = "" elif char == "?": self.state = "mode" elif not char.isdigit(): if len(self.current_param) > 0: self.params.append(int(self.current_param)) # If we're in parameter parsing mode, but we see a non-numeric # value, it must be the end of the control sequence. self._end_escape_sequence(char) else: self.current_param += char
python
def _escape_parameters(self, char): """ Parse parameters in an escape sequence. Parameters are a list of numbers in ascii (e.g. '12', '4', '42', etc) separated by a semicolon (e.g. "12;4;42"). See the [vt102 user guide](http://vt100.net/docs/vt102-ug/) for more details on the formatting of escape parameters. """ if char == ";": self.params.append(int(self.current_param)) self.current_param = "" elif char == "?": self.state = "mode" elif not char.isdigit(): if len(self.current_param) > 0: self.params.append(int(self.current_param)) # If we're in parameter parsing mode, but we see a non-numeric # value, it must be the end of the control sequence. self._end_escape_sequence(char) else: self.current_param += char
Parse parameters in an escape sequence. Parameters are a list of numbers in ascii (e.g. '12', '4', '42', etc) separated by a semicolon (e.g. "12;4;42"). See the [vt102 user guide](http://vt100.net/docs/vt102-ug/) for more details on the formatting of escape parameters.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L223-L246
samfoo/vt102
vt102/__init__.py
stream._stream
def _stream(self, char): """ Process a character when in the default 'stream' state. """ num = ord(char) if num in self.basic: self.dispatch(self.basic[num]) elif num == ctrl.ESC: self.state = "escape" elif num == 0x00: # nulls are just ignored. pass else: self.dispatch("print", char)
python
def _stream(self, char): """ Process a character when in the default 'stream' state. """ num = ord(char) if num in self.basic: self.dispatch(self.basic[num]) elif num == ctrl.ESC: self.state = "escape" elif num == 0x00: # nulls are just ignored. pass else: self.dispatch("print", char)
Process a character when in the default 'stream' state.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L263-L278
samfoo/vt102
vt102/__init__.py
stream.consume
def consume(self, char): """ Consume a single character and advance the state as necessary. """ if self.state == "stream": self._stream(char) elif self.state == "escape": self._escape_sequence(char) elif self.state == "escape-lb": self._escape_parameters(char) elif self.state == "mode": self._mode(char) elif self.state == "charset-g0": self._charset_g0(char) elif self.state == "charset-g1": self._charset_g1(char)
python
def consume(self, char): """ Consume a single character and advance the state as necessary. """ if self.state == "stream": self._stream(char) elif self.state == "escape": self._escape_sequence(char) elif self.state == "escape-lb": self._escape_parameters(char) elif self.state == "mode": self._mode(char) elif self.state == "charset-g0": self._charset_g0(char) elif self.state == "charset-g1": self._charset_g1(char)
Consume a single character and advance the state as necessary.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L280-L296
samfoo/vt102
vt102/__init__.py
stream.process
def process(self, chars): """ Consume a string of and advance the state as necessary. """ while len(chars) > 0: self.consume(chars[0]) chars = chars[1:]
python
def process(self, chars): """ Consume a string of and advance the state as necessary. """ while len(chars) > 0: self.consume(chars[0]) chars = chars[1:]
Consume a string of and advance the state as necessary.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L298-L305
samfoo/vt102
vt102/__init__.py
stream.add_event_listener
def add_event_listener(self, event, function): """ Add an event listen for a particular event. Depending on the event there may or may not be parameters passed to function. Most escape streams also allow for an empty set of parameters (with a default value). Providing these default values and accepting variable arguments is the responsibility of function. More than one listener may be added for a single event. Each listener will be called. * **event** The event to listen for. * **function** The callable to invoke. """ if event not in self.listeners: self.listeners[event] = [] self.listeners[event].append(function)
python
def add_event_listener(self, event, function): """ Add an event listen for a particular event. Depending on the event there may or may not be parameters passed to function. Most escape streams also allow for an empty set of parameters (with a default value). Providing these default values and accepting variable arguments is the responsibility of function. More than one listener may be added for a single event. Each listener will be called. * **event** The event to listen for. * **function** The callable to invoke. """ if event not in self.listeners: self.listeners[event] = [] self.listeners[event].append(function)
Add an event listen for a particular event. Depending on the event there may or may not be parameters passed to function. Most escape streams also allow for an empty set of parameters (with a default value). Providing these default values and accepting variable arguments is the responsibility of function. More than one listener may be added for a single event. Each listener will be called. * **event** The event to listen for. * **function** The callable to invoke.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L307-L325
samfoo/vt102
vt102/__init__.py
stream.dispatch
def dispatch(self, event, *args): """ Dispatch an event where `args` is a tuple of the arguments to send to any callbacks. If any callback throws an exception, the subsequent callbacks will be aborted. """ for callback in self.listeners.get(event, []): if len(args) > 0: callback(*args) else: callback()
python
def dispatch(self, event, *args): """ Dispatch an event where `args` is a tuple of the arguments to send to any callbacks. If any callback throws an exception, the subsequent callbacks will be aborted. """ for callback in self.listeners.get(event, []): if len(args) > 0: callback(*args) else: callback()
Dispatch an event where `args` is a tuple of the arguments to send to any callbacks. If any callback throws an exception, the subsequent callbacks will be aborted.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L327-L338
samfoo/vt102
vt102/__init__.py
screen.attach
def attach(self, events): """ Attach this screen to a events that processes commands and dispatches events. Sets up the appropriate event handlers so that the screen will update itself automatically as the events processes data. """ if events is not None: events.add_event_listener("print", self._print) events.add_event_listener("backspace", self._backspace) events.add_event_listener("tab", self._tab) events.add_event_listener("linefeed", self._linefeed) events.add_event_listener("reverse-linefeed", self._reverse_linefeed) events.add_event_listener("carriage-return", self._carriage_return) events.add_event_listener("index", self._index) events.add_event_listener("reverse-index", self._reverse_index) events.add_event_listener("store-cursor", self._save_cursor) events.add_event_listener("restore-cursor", self._restore_cursor) events.add_event_listener("cursor-up", self._cursor_up) events.add_event_listener("cursor-down", self._cursor_down) events.add_event_listener("cursor-right", self._cursor_forward) events.add_event_listener("cursor-left", self._cursor_back) events.add_event_listener("cursor-move", self._cursor_position) events.add_event_listener("erase-in-line", self._erase_in_line) events.add_event_listener("erase-in-display", self._erase_in_display) events.add_event_listener("delete-characters", self._delete_character) events.add_event_listener("insert-lines", self._insert_line) events.add_event_listener("delete-lines", self._delete_line) events.add_event_listener("select-graphic-rendition", self._select_graphic_rendition) events.add_event_listener("charset-g0", self._charset_g0) events.add_event_listener("charset-g1", self._charset_g1) events.add_event_listener("shift-in", self._shift_in) events.add_event_listener("shift-out", self._shift_out) events.add_event_listener("bell", self._bell)
python
def attach(self, events): """ Attach this screen to a events that processes commands and dispatches events. Sets up the appropriate event handlers so that the screen will update itself automatically as the events processes data. """ if events is not None: events.add_event_listener("print", self._print) events.add_event_listener("backspace", self._backspace) events.add_event_listener("tab", self._tab) events.add_event_listener("linefeed", self._linefeed) events.add_event_listener("reverse-linefeed", self._reverse_linefeed) events.add_event_listener("carriage-return", self._carriage_return) events.add_event_listener("index", self._index) events.add_event_listener("reverse-index", self._reverse_index) events.add_event_listener("store-cursor", self._save_cursor) events.add_event_listener("restore-cursor", self._restore_cursor) events.add_event_listener("cursor-up", self._cursor_up) events.add_event_listener("cursor-down", self._cursor_down) events.add_event_listener("cursor-right", self._cursor_forward) events.add_event_listener("cursor-left", self._cursor_back) events.add_event_listener("cursor-move", self._cursor_position) events.add_event_listener("erase-in-line", self._erase_in_line) events.add_event_listener("erase-in-display", self._erase_in_display) events.add_event_listener("delete-characters", self._delete_character) events.add_event_listener("insert-lines", self._insert_line) events.add_event_listener("delete-lines", self._delete_line) events.add_event_listener("select-graphic-rendition", self._select_graphic_rendition) events.add_event_listener("charset-g0", self._charset_g0) events.add_event_listener("charset-g1", self._charset_g1) events.add_event_listener("shift-in", self._shift_in) events.add_event_listener("shift-out", self._shift_out) events.add_event_listener("bell", self._bell)
Attach this screen to a events that processes commands and dispatches events. Sets up the appropriate event handlers so that the screen will update itself automatically as the events processes data.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L397-L434
samfoo/vt102
vt102/__init__.py
screen.resize
def resize(self, shape): """ Resize the screen. If the requested screen size has more rows than the existing screen, rows will be added at the bottom. If the requested size has less rows than the existing screen rows will be clipped at the top of the screen. Similarly if the existing screen has less columns than the requested size, columns will be added at the right, and it it has more, columns will be clipped at the right. """ rows, cols = shape # Honestly though, you can't trust anyone these days... assert(rows > 0 and cols > 0) # First resize the rows if self.size[0] < rows: # If the current display size is shorter than the requested screen # size, then add rows to the bottom. Note that the old column size # is used here so these new rows will get expanded/contracted as # necessary by the column resize when it happens next. self.display += [u" " * self.size[1]] * (rows - self.size[0]) self.attributes += [[self.default_attributes] * self.size[1]] * \ (rows - self.size[0]) elif self.size[0] > rows: # If the current display size is taller than the requested display, # then take rows off the top. self.display = self.display[self.size[0]-rows:] self.attributes = self.attributes[self.size[0]-rows:] # Next, of course, resize the columns. if self.size[1] < cols: # If the current display size is thinner than the requested size, # expand each row to be the new size. self.display = \ [row + (u" " * (cols - self.size[1])) for row in self.display] self.attributes = \ [row + ([self.default_attributes] * (cols - self.size[1])) for row in self.attributes] elif self.size[1] > cols: # If the current display size is fatter than the requested size, # then trim each row from the right to be the new size. self.display = [row[:cols-self.size[1]] for row in self.display] self.attributes = [row[:cols-self.size[1]] for row in self.attributes] self.size = (rows, cols) return self.size
python
def resize(self, shape): """ Resize the screen. If the requested screen size has more rows than the existing screen, rows will be added at the bottom. If the requested size has less rows than the existing screen rows will be clipped at the top of the screen. Similarly if the existing screen has less columns than the requested size, columns will be added at the right, and it it has more, columns will be clipped at the right. """ rows, cols = shape # Honestly though, you can't trust anyone these days... assert(rows > 0 and cols > 0) # First resize the rows if self.size[0] < rows: # If the current display size is shorter than the requested screen # size, then add rows to the bottom. Note that the old column size # is used here so these new rows will get expanded/contracted as # necessary by the column resize when it happens next. self.display += [u" " * self.size[1]] * (rows - self.size[0]) self.attributes += [[self.default_attributes] * self.size[1]] * \ (rows - self.size[0]) elif self.size[0] > rows: # If the current display size is taller than the requested display, # then take rows off the top. self.display = self.display[self.size[0]-rows:] self.attributes = self.attributes[self.size[0]-rows:] # Next, of course, resize the columns. if self.size[1] < cols: # If the current display size is thinner than the requested size, # expand each row to be the new size. self.display = \ [row + (u" " * (cols - self.size[1])) for row in self.display] self.attributes = \ [row + ([self.default_attributes] * (cols - self.size[1])) for row in self.attributes] elif self.size[1] > cols: # If the current display size is fatter than the requested size, # then trim each row from the right to be the new size. self.display = [row[:cols-self.size[1]] for row in self.display] self.attributes = [row[:cols-self.size[1]] for row in self.attributes] self.size = (rows, cols) return self.size
Resize the screen. If the requested screen size has more rows than the existing screen, rows will be added at the bottom. If the requested size has less rows than the existing screen rows will be clipped at the top of the screen. Similarly if the existing screen has less columns than the requested size, columns will be added at the right, and it it has more, columns will be clipped at the right.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L442-L488
samfoo/vt102
vt102/__init__.py
screen._print
def _print(self, char): """ Print a character at the current cursor position and advance the cursor. """ # Don't make bugs where we try to print a screen. assert len(char) == 1 try: try: # Python 3 char = self.decoder(bytes(char, self.encoding))[0] except TypeError: # Python 2.x char = self.decoder(char)[0] except UnicodeDecodeError: char = "?" if self.current_charset == "g0" and self.g0 is not None: char = char.translate(self.g0) elif self.current_charset == "g1" and self.g1 is not None: char = char.translate(self.g1) row = self.display[self.y] self.display[self.y] = row[:self.x] + char + row[self.x+1:] attrs = self.attributes[self.y] self.attributes[self.y] = attrs[:self.x] + [self.cursor_attributes] + \ attrs[self.x+1:] self.x += 1 if self.x >= self.size[1]: # If this was the last column in a row, move the cursor to the # next row. self._linefeed()
python
def _print(self, char): """ Print a character at the current cursor position and advance the cursor. """ # Don't make bugs where we try to print a screen. assert len(char) == 1 try: try: # Python 3 char = self.decoder(bytes(char, self.encoding))[0] except TypeError: # Python 2.x char = self.decoder(char)[0] except UnicodeDecodeError: char = "?" if self.current_charset == "g0" and self.g0 is not None: char = char.translate(self.g0) elif self.current_charset == "g1" and self.g1 is not None: char = char.translate(self.g1) row = self.display[self.y] self.display[self.y] = row[:self.x] + char + row[self.x+1:] attrs = self.attributes[self.y] self.attributes[self.y] = attrs[:self.x] + [self.cursor_attributes] + \ attrs[self.x+1:] self.x += 1 if self.x >= self.size[1]: # If this was the last column in a row, move the cursor to the # next row. self._linefeed()
Print a character at the current cursor position and advance the cursor.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L513-L550
samfoo/vt102
vt102/__init__.py
screen._index
def _index(self): """ Move the cursor down one row in the same column. If the cursor is at the last row, create a new row at the bottom. """ if self.y + 1 >= self.size[0]: # If the cursor is currently on the last row, then spawn another # and scroll down (removing the top row). self.display = self.display[1:] + [u" " * self.size[1]] else: # If the cursor is anywhere else, then just move it to the # next line. self.y += 1
python
def _index(self): """ Move the cursor down one row in the same column. If the cursor is at the last row, create a new row at the bottom. """ if self.y + 1 >= self.size[0]: # If the cursor is currently on the last row, then spawn another # and scroll down (removing the top row). self.display = self.display[1:] + [u" " * self.size[1]] else: # If the cursor is anywhere else, then just move it to the # next line. self.y += 1
Move the cursor down one row in the same column. If the cursor is at the last row, create a new row at the bottom.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L559-L572
samfoo/vt102
vt102/__init__.py
screen._reverse_index
def _reverse_index(self): """ Move the cursor up one row in the same column. If the cursor is at the first row, create a new row at the top. """ if self.y == 0: # If the cursor is currently at the first row, then scroll the # screen up. self.display = [u" " * self.size[1]] + self.display[:-1] else: # If the cursor is anywhere other than the first row than just move # it up by one row. self.y -= 1
python
def _reverse_index(self): """ Move the cursor up one row in the same column. If the cursor is at the first row, create a new row at the top. """ if self.y == 0: # If the cursor is currently at the first row, then scroll the # screen up. self.display = [u" " * self.size[1]] + self.display[:-1] else: # If the cursor is anywhere other than the first row than just move # it up by one row. self.y -= 1
Move the cursor up one row in the same column. If the cursor is at the first row, create a new row at the top.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L574-L586
samfoo/vt102
vt102/__init__.py
screen._next_tab_stop
def _next_tab_stop(self): """ Return the x value of the next available tabstop or the x value of the margin if there are no more tabstops. """ for stop in sorted(self.tabstops): if self.x < stop: return stop return self.size[1] - 1
python
def _next_tab_stop(self): """ Return the x value of the next available tabstop or the x value of the margin if there are no more tabstops. """ for stop in sorted(self.tabstops): if self.x < stop: return stop return self.size[1] - 1
Return the x value of the next available tabstop or the x value of the margin if there are no more tabstops.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L604-L613
samfoo/vt102
vt102/__init__.py
screen._restore_cursor
def _restore_cursor(self): """ Set the current cursor position to whatever cursor is on top of the stack. """ if len(self.cursor_save_stack): self.x, self.y = self.cursor_save_stack.pop()
python
def _restore_cursor(self): """ Set the current cursor position to whatever cursor is on top of the stack. """ if len(self.cursor_save_stack): self.x, self.y = self.cursor_save_stack.pop()
Set the current cursor position to whatever cursor is on top of the stack.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L637-L644
samfoo/vt102
vt102/__init__.py
screen._insert_line
def _insert_line(self, count=1): """ Inserts lines at line with cursor. Lines displayed below cursor move down. Lines moved past the bottom margin are lost. """ trimmed = self.display[:self.y+1] + \ [u" " * self.size[1]] * count + \ self.display[self.y+1:self.y+count+1] self.display = trimmed[:self.size[0]]
python
def _insert_line(self, count=1): """ Inserts lines at line with cursor. Lines displayed below cursor move down. Lines moved past the bottom margin are lost. """ trimmed = self.display[:self.y+1] + \ [u" " * self.size[1]] * count + \ self.display[self.y+1:self.y+count+1] self.display = trimmed[:self.size[0]]
Inserts lines at line with cursor. Lines displayed below cursor move down. Lines moved past the bottom margin are lost.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L646-L654
samfoo/vt102
vt102/__init__.py
screen._delete_line
def _delete_line(self, count=1): """ Deletes count lines, starting at line with cursor. As lines are deleted, lines displayed below cursor move up. Lines added to bottom of screen have spaces with same character attributes as last line moved up. """ self.display = self.display[:self.y] + \ self.display[self.y+1:] self.display.append([u" " * self.size[1]] * count) self.attributes = self.attributes[:self.y] + \ self.attributes[self.y+1:] last_attributes = self.attributes[-1] for _ in xrange(count): self.attributes.append(copy(last_attributes))
python
def _delete_line(self, count=1): """ Deletes count lines, starting at line with cursor. As lines are deleted, lines displayed below cursor move up. Lines added to bottom of screen have spaces with same character attributes as last line moved up. """ self.display = self.display[:self.y] + \ self.display[self.y+1:] self.display.append([u" " * self.size[1]] * count) self.attributes = self.attributes[:self.y] + \ self.attributes[self.y+1:] last_attributes = self.attributes[-1] for _ in xrange(count): self.attributes.append(copy(last_attributes))
Deletes count lines, starting at line with cursor. As lines are deleted, lines displayed below cursor move up. Lines added to bottom of screen have spaces with same character attributes as last line moved up.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L656-L670
samfoo/vt102
vt102/__init__.py
screen._delete_character
def _delete_character(self, count=1): """ Deletes count characters, starting with the character at cursor position. When a character is deleted, all characters to the right of cursor move left. """ # First resize the text display row = self.display[self.y] count = min(count, self.size[1] - self.x) row = row[:self.x] + row[self.x+count:] + u" " * count self.display[self.y] = row # Then resize the attribute array too attrs = self.attributes[self.y] attrs = attrs[:self.x] + attrs[self.x+count:] + [self.default_attributes] * count self.attributes[self.y] = attrs
python
def _delete_character(self, count=1): """ Deletes count characters, starting with the character at cursor position. When a character is deleted, all characters to the right of cursor move left. """ # First resize the text display row = self.display[self.y] count = min(count, self.size[1] - self.x) row = row[:self.x] + row[self.x+count:] + u" " * count self.display[self.y] = row # Then resize the attribute array too attrs = self.attributes[self.y] attrs = attrs[:self.x] + attrs[self.x+count:] + [self.default_attributes] * count self.attributes[self.y] = attrs
Deletes count characters, starting with the character at cursor position. When a character is deleted, all characters to the right of cursor move left.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L672-L688
samfoo/vt102
vt102/__init__.py
screen._erase_in_line
def _erase_in_line(self, type_of=0): """ Erases the row in a specific way, depending on the type_of. """ row = self.display[self.y] attrs = self.attributes[self.y] if type_of == 0: # Erase from the cursor to the end of line, including the cursor row = row[:self.x] + u" " * (self.size[1] - self.x) attrs = attrs[:self.x] + [self.default_attributes] * (self.size[1] - self.x) elif type_of == 1: # Erase from the beginning of the line to the cursor, including it row = u" " * (self.x+1) + row[self.x+1:] attrs = [self.default_attributes] * (self.x+1) + attrs[self.x+1:] elif type_of == 2: # Erase the entire line. row = u" " * self.size[1] attrs = [self.default_attributes] * self.size[1] self.display[self.y] = row self.attributes[self.y] = attrs
python
def _erase_in_line(self, type_of=0): """ Erases the row in a specific way, depending on the type_of. """ row = self.display[self.y] attrs = self.attributes[self.y] if type_of == 0: # Erase from the cursor to the end of line, including the cursor row = row[:self.x] + u" " * (self.size[1] - self.x) attrs = attrs[:self.x] + [self.default_attributes] * (self.size[1] - self.x) elif type_of == 1: # Erase from the beginning of the line to the cursor, including it row = u" " * (self.x+1) + row[self.x+1:] attrs = [self.default_attributes] * (self.x+1) + attrs[self.x+1:] elif type_of == 2: # Erase the entire line. row = u" " * self.size[1] attrs = [self.default_attributes] * self.size[1] self.display[self.y] = row self.attributes[self.y] = attrs
Erases the row in a specific way, depending on the type_of.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L690-L711
samfoo/vt102
vt102/__init__.py
screen._cursor_down
def _cursor_down(self, count=1): """ Moves cursor down count lines in same column. Cursor stops at bottom margin. """ self.y = min(self.size[0] - 1, self.y + count)
python
def _cursor_down(self, count=1): """ Moves cursor down count lines in same column. Cursor stops at bottom margin. """ self.y = min(self.size[0] - 1, self.y + count)
Moves cursor down count lines in same column. Cursor stops at bottom margin.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L765-L770
samfoo/vt102
vt102/__init__.py
screen._cursor_forward
def _cursor_forward(self, count=1): """ Moves cursor right count columns. Cursor stops at right margin. """ self.x = min(self.size[1] - 1, self.x + count)
python
def _cursor_forward(self, count=1): """ Moves cursor right count columns. Cursor stops at right margin. """ self.x = min(self.size[1] - 1, self.x + count)
Moves cursor right count columns. Cursor stops at right margin.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L778-L782
samfoo/vt102
vt102/__init__.py
screen._cursor_position
def _cursor_position(self, row=0, column=0): """ Set the cursor to a specific row and column. Obnoxiously row/column is 1 based, instead of zero based, so we need to compensate. I know I've created bugs in here somehow. Confoundingly, inputs of 0 are still acceptable, and should move to the beginning of the row/column as if they were 1. *sigh* """ if row == 0: row = 1 if column == 0: column = 1 self.y = min(row - 1, self.size[0] - 1) self.x = min(column - 1, self.size[1] - 1)
python
def _cursor_position(self, row=0, column=0): """ Set the cursor to a specific row and column. Obnoxiously row/column is 1 based, instead of zero based, so we need to compensate. I know I've created bugs in here somehow. Confoundingly, inputs of 0 are still acceptable, and should move to the beginning of the row/column as if they were 1. *sigh* """ if row == 0: row = 1 if column == 0: column = 1 self.y = min(row - 1, self.size[0] - 1) self.x = min(column - 1, self.size[1] - 1)
Set the cursor to a specific row and column. Obnoxiously row/column is 1 based, instead of zero based, so we need to compensate. I know I've created bugs in here somehow. Confoundingly, inputs of 0 are still acceptable, and should move to the beginning of the row/column as if they were 1. *sigh*
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L784-L800
samfoo/vt102
vt102/__init__.py
screen._text_attr
def _text_attr(self, attr): """ Given a text attribute, set the current cursor appropriately. """ attr = text[attr] if attr == "reset": self.cursor_attributes = self.default_attributes elif attr == "underline-off": self.cursor_attributes = self._remove_text_attr("underline") elif attr == "blink-off": self.cursor_attributes = self._remove_text_attr("blink") elif attr == "reverse-off": self.cursor_attributes = self._remove_text_attr("reverse") else: self.cursor_attributes = self._add_text_attr(attr)
python
def _text_attr(self, attr): """ Given a text attribute, set the current cursor appropriately. """ attr = text[attr] if attr == "reset": self.cursor_attributes = self.default_attributes elif attr == "underline-off": self.cursor_attributes = self._remove_text_attr("underline") elif attr == "blink-off": self.cursor_attributes = self._remove_text_attr("blink") elif attr == "reverse-off": self.cursor_attributes = self._remove_text_attr("reverse") else: self.cursor_attributes = self._add_text_attr(attr)
Given a text attribute, set the current cursor appropriately.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L820-L834
samfoo/vt102
vt102/__init__.py
screen._color_attr
def _color_attr(self, ground, attr): """ Given a color attribute, set the current cursor appropriately. """ attr = colors[ground][attr] attrs = self.cursor_attributes if ground == "foreground": self.cursor_attributes = (attrs[0], attr, attrs[2]) elif ground == "background": self.cursor_attributes = (attrs[0], attrs[1], attr)
python
def _color_attr(self, ground, attr): """ Given a color attribute, set the current cursor appropriately. """ attr = colors[ground][attr] attrs = self.cursor_attributes if ground == "foreground": self.cursor_attributes = (attrs[0], attr, attrs[2]) elif ground == "background": self.cursor_attributes = (attrs[0], attrs[1], attr)
Given a color attribute, set the current cursor appropriately.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L836-L845
samfoo/vt102
vt102/__init__.py
screen._set_attr
def _set_attr(self, attr): """ Given some text attribute, set the current cursor attributes appropriately. """ if attr in text: self._text_attr(attr) elif attr in colors["foreground"]: self._color_attr("foreground", attr) elif attr in colors["background"]: self._color_attr("background", attr)
python
def _set_attr(self, attr): """ Given some text attribute, set the current cursor attributes appropriately. """ if attr in text: self._text_attr(attr) elif attr in colors["foreground"]: self._color_attr("foreground", attr) elif attr in colors["background"]: self._color_attr("background", attr)
Given some text attribute, set the current cursor attributes appropriately.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L847-L857
samfoo/vt102
vt102/__init__.py
screen._select_graphic_rendition
def _select_graphic_rendition(self, *attrs): """ Set the current text attribute. """ if len(attrs) == 0: # No arguments means that we're really trying to do a reset. attrs = [0] for attr in attrs: self._set_attr(attr)
python
def _select_graphic_rendition(self, *attrs): """ Set the current text attribute. """ if len(attrs) == 0: # No arguments means that we're really trying to do a reset. attrs = [0] for attr in attrs: self._set_attr(attr)
Set the current text attribute.
https://github.com/samfoo/vt102/blob/ff5be883bc9a880a422b09bb87b210d7c408cf2c/vt102/__init__.py#L859-L869
gmr/flatdict
flatdict.py
FlatDict.as_dict
def as_dict(self): """Return the :class:`~flatdict.FlatDict` as a :class:`dict` :rtype: dict """ out = dict({}) for key in self.keys(): if self._has_delimiter(key): pk, ck = key.split(self._delimiter, 1) if self._has_delimiter(ck): ck = ck.split(self._delimiter, 1)[0] if isinstance(self._values[pk], FlatDict) and pk not in out: out[pk] = dict() if isinstance(self._values[pk][ck], FlatDict): out[pk][ck] = self._values[pk][ck].as_dict() else: out[pk][ck] = self._values[pk][ck] else: out[key] = self._values[key] return out
python
def as_dict(self): """Return the :class:`~flatdict.FlatDict` as a :class:`dict` :rtype: dict """ out = dict({}) for key in self.keys(): if self._has_delimiter(key): pk, ck = key.split(self._delimiter, 1) if self._has_delimiter(ck): ck = ck.split(self._delimiter, 1)[0] if isinstance(self._values[pk], FlatDict) and pk not in out: out[pk] = dict() if isinstance(self._values[pk][ck], FlatDict): out[pk][ck] = self._values[pk][ck].as_dict() else: out[pk][ck] = self._values[pk][ck] else: out[key] = self._values[key] return out
Return the :class:`~flatdict.FlatDict` as a :class:`dict` :rtype: dict
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L168-L188
gmr/flatdict
flatdict.py
FlatDict.keys
def keys(self): """Return a copy of the flat dictionary's list of keys. See the note for :meth:`flatdict.FlatDict.items`. :rtype: list """ keys = [] for key, value in self._values.items(): if isinstance(value, (FlatDict, dict)): nested = [self._delimiter.join([key, k]) for k in value.keys()] keys += nested if nested else [key] else: keys.append(key) return sorted(keys)
python
def keys(self): """Return a copy of the flat dictionary's list of keys. See the note for :meth:`flatdict.FlatDict.items`. :rtype: list """ keys = [] for key, value in self._values.items(): if isinstance(value, (FlatDict, dict)): nested = [self._delimiter.join([key, k]) for k in value.keys()] keys += nested if nested else [key] else: keys.append(key) return sorted(keys)
Return a copy of the flat dictionary's list of keys. See the note for :meth:`flatdict.FlatDict.items`. :rtype: list
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L276-L290
gmr/flatdict
flatdict.py
FlatDict.pop
def pop(self, key, default=NO_DEFAULT): """If key is in the flat dictionary, remove it and return its value, else return default. If default is not given and key is not in the dictionary, :exc:`KeyError` is raised. :param mixed key: The key name :param mixed default: The default value :rtype: mixed """ if key not in self and default != NO_DEFAULT: return default value = self[key] self.__delitem__(key) return value
python
def pop(self, key, default=NO_DEFAULT): """If key is in the flat dictionary, remove it and return its value, else return default. If default is not given and key is not in the dictionary, :exc:`KeyError` is raised. :param mixed key: The key name :param mixed default: The default value :rtype: mixed """ if key not in self and default != NO_DEFAULT: return default value = self[key] self.__delitem__(key) return value
If key is in the flat dictionary, remove it and return its value, else return default. If default is not given and key is not in the dictionary, :exc:`KeyError` is raised. :param mixed key: The key name :param mixed default: The default value :rtype: mixed
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L292-L306
gmr/flatdict
flatdict.py
FlatDict.setdefault
def setdefault(self, key, default): """If key is in the flat dictionary, return its value. If not, insert key with a value of default and return default. default defaults to ``None``. :param mixed key: The key name :param mixed default: The default value :rtype: mixed """ if key not in self or not self.__getitem__(key): self.__setitem__(key, default) return self.__getitem__(key)
python
def setdefault(self, key, default): """If key is in the flat dictionary, return its value. If not, insert key with a value of default and return default. default defaults to ``None``. :param mixed key: The key name :param mixed default: The default value :rtype: mixed """ if key not in self or not self.__getitem__(key): self.__setitem__(key, default) return self.__getitem__(key)
If key is in the flat dictionary, return its value. If not, insert key with a value of default and return default. default defaults to ``None``. :param mixed key: The key name :param mixed default: The default value :rtype: mixed
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L308-L320
gmr/flatdict
flatdict.py
FlatDict.set_delimiter
def set_delimiter(self, delimiter): """Override the default or passed in delimiter with a new value. If the requested delimiter already exists in a key, a :exc:`ValueError` will be raised. :param str delimiter: The delimiter to use :raises: ValueError """ for key in self.keys(): if delimiter in key: raise ValueError('Key {!r} collides with delimiter {!r}', key, delimiter) self._delimiter = delimiter for key in self._values.keys(): if isinstance(self._values[key], FlatDict): self._values[key].set_delimiter(delimiter)
python
def set_delimiter(self, delimiter): """Override the default or passed in delimiter with a new value. If the requested delimiter already exists in a key, a :exc:`ValueError` will be raised. :param str delimiter: The delimiter to use :raises: ValueError """ for key in self.keys(): if delimiter in key: raise ValueError('Key {!r} collides with delimiter {!r}', key, delimiter) self._delimiter = delimiter for key in self._values.keys(): if isinstance(self._values[key], FlatDict): self._values[key].set_delimiter(delimiter)
Override the default or passed in delimiter with a new value. If the requested delimiter already exists in a key, a :exc:`ValueError` will be raised. :param str delimiter: The delimiter to use :raises: ValueError
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L322-L338
gmr/flatdict
flatdict.py
FlatterDict.as_dict
def as_dict(self): """Return the :class:`~flatdict.FlatterDict` as a nested :class:`dict`. :rtype: dict """ out = dict({}) for key in self.keys(): if self._has_delimiter(key): pk, ck = key.split(self._delimiter, 1) if self._has_delimiter(ck): ck = ck.split(self._delimiter, 1)[0] if isinstance(self._values[pk], FlatterDict) and pk not in out: out[pk] = dict() if isinstance(self._values[pk][ck], FlatterDict): if self._values[pk][ck].original_type == tuple: out[pk][ck] = tuple(self._child_as_list(pk, ck)) elif self._values[pk][ck].original_type == list: out[pk][ck] = self._child_as_list(pk, ck) elif self._values[pk][ck].original_type == set: out[pk][ck] = set(self._child_as_list(pk, ck)) elif self._values[pk][ck].original_type == dict: out[pk][ck] = self._values[pk][ck].as_dict() else: out[pk][ck] = self._values[pk][ck] else: out[key] = self._values[key] return out
python
def as_dict(self): """Return the :class:`~flatdict.FlatterDict` as a nested :class:`dict`. :rtype: dict """ out = dict({}) for key in self.keys(): if self._has_delimiter(key): pk, ck = key.split(self._delimiter, 1) if self._has_delimiter(ck): ck = ck.split(self._delimiter, 1)[0] if isinstance(self._values[pk], FlatterDict) and pk not in out: out[pk] = dict() if isinstance(self._values[pk][ck], FlatterDict): if self._values[pk][ck].original_type == tuple: out[pk][ck] = tuple(self._child_as_list(pk, ck)) elif self._values[pk][ck].original_type == list: out[pk][ck] = self._child_as_list(pk, ck) elif self._values[pk][ck].original_type == set: out[pk][ck] = set(self._child_as_list(pk, ck)) elif self._values[pk][ck].original_type == dict: out[pk][ck] = self._values[pk][ck].as_dict() else: out[pk][ck] = self._values[pk][ck] else: out[key] = self._values[key] return out
Return the :class:`~flatdict.FlatterDict` as a nested :class:`dict`. :rtype: dict
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L420-L448
gmr/flatdict
flatdict.py
FlatterDict._child_as_list
def _child_as_list(self, pk, ck): """Returns a list of values from the child FlatterDict instance with string based integer keys. :param str pk: The parent key :param str ck: The child key :rtype: list """ return [self._values[pk][ck][k] for k in sorted(self._values[pk][ck].keys(), key=lambda x: int(x))]
python
def _child_as_list(self, pk, ck): """Returns a list of values from the child FlatterDict instance with string based integer keys. :param str pk: The parent key :param str ck: The child key :rtype: list """ return [self._values[pk][ck][k] for k in sorted(self._values[pk][ck].keys(), key=lambda x: int(x))]
Returns a list of values from the child FlatterDict instance with string based integer keys. :param str pk: The parent key :param str ck: The child key :rtype: list
https://github.com/gmr/flatdict/blob/40bfa64972b2dc148643116db786aa106e7d7d56/flatdict.py#L450-L461
eugene-eeo/graphlite
graphlite/query.py
V.gen_query
def gen_query(self): """ Generate an SQL query for the edge object. """ return ( SQL.forwards_relation(self.src, self.rel) if self.dst is None else SQL.inverse_relation(self.dst, self.rel) )
python
def gen_query(self): """ Generate an SQL query for the edge object. """ return ( SQL.forwards_relation(self.src, self.rel) if self.dst is None else SQL.inverse_relation(self.dst, self.rel) )
Generate an SQL query for the edge object.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/query.py#L43-L50
eugene-eeo/graphlite
graphlite/query.py
Query.derived
def derived(self, statement, params=(), replace=False): """ Returns a new query object set up correctly with the *statement* and *params* appended to the end of the new instance's internal query and params, along with the current instance's connection. :param statement: The SQL query string to append. :param params: The parameters to append. :param replace: Whether to replace the entire SQL query. """ return Query( db=self.db, sql=(statement,) if replace else self.sql + (statement,), params=self.params + params, )
python
def derived(self, statement, params=(), replace=False): """ Returns a new query object set up correctly with the *statement* and *params* appended to the end of the new instance's internal query and params, along with the current instance's connection. :param statement: The SQL query string to append. :param params: The parameters to append. :param replace: Whether to replace the entire SQL query. """ return Query( db=self.db, sql=(statement,) if replace else self.sql + (statement,), params=self.params + params, )
Returns a new query object set up correctly with the *statement* and *params* appended to the end of the new instance's internal query and params, along with the current instance's connection. :param statement: The SQL query string to append. :param params: The parameters to append. :param replace: Whether to replace the entire SQL query.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/query.py#L87-L103
eugene-eeo/graphlite
graphlite/query.py
Query.traverse
def traverse(self, edge): """ Traverse the graph, and selecting the destination nodes for a particular relation that the selected nodes are a source of, i.e. select the friends of my friends. You can traverse indefinitely. :param edge: The edge query. If the edge's destination node is specified then the source nodes will be selected. """ query = self.statement rel, dst = edge.rel, edge.dst statement, params = ( SQL.compound_fwd_query(query, rel) if dst is None else SQL.compound_inv_query(query, rel, dst) ) return self.derived(statement, params, replace=True)
python
def traverse(self, edge): """ Traverse the graph, and selecting the destination nodes for a particular relation that the selected nodes are a source of, i.e. select the friends of my friends. You can traverse indefinitely. :param edge: The edge query. If the edge's destination node is specified then the source nodes will be selected. """ query = self.statement rel, dst = edge.rel, edge.dst statement, params = ( SQL.compound_fwd_query(query, rel) if dst is None else SQL.compound_inv_query(query, rel, dst) ) return self.derived(statement, params, replace=True)
Traverse the graph, and selecting the destination nodes for a particular relation that the selected nodes are a source of, i.e. select the friends of my friends. You can traverse indefinitely. :param edge: The edge query. If the edge's destination node is specified then the source nodes will be selected.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/query.py#L117-L134
eugene-eeo/graphlite
graphlite/graph.py
Graph.setup_sql
def setup_sql(self, graphs): """ Sets up the SQL tables for the graph object, and creates indexes as well. :param graphs: The graphs to create. """ with closing(self.db.cursor()) as cursor: for table in graphs: cursor.execute(SQL.CREATE_TABLE % (table)) for index in SQL.INDEXES: cursor.execute(index % (table)) self.db.commit()
python
def setup_sql(self, graphs): """ Sets up the SQL tables for the graph object, and creates indexes as well. :param graphs: The graphs to create. """ with closing(self.db.cursor()) as cursor: for table in graphs: cursor.execute(SQL.CREATE_TABLE % (table)) for index in SQL.INDEXES: cursor.execute(index % (table)) self.db.commit()
Sets up the SQL tables for the graph object, and creates indexes as well. :param graphs: The graphs to create.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/graph.py#L21-L33
eugene-eeo/graphlite
graphlite/sql.py
remove
def remove(src, rel, dst): """ Returns an SQL statement that removes edges from the SQL backing store. Either `src` or `dst` may be specified, even both. :param src: The source node. :param rel: The relation. :param dst: The destination node. """ smt = 'DELETE FROM %s' % rel queries = [] params = [] if src is not None: queries.append('src = ?') params.append(src) if dst is not None: queries.append('dst = ?') params.append(dst) if not queries: return smt, params smt = '%s WHERE %s' % (smt, ' AND '.join(queries)) return smt, params
python
def remove(src, rel, dst): """ Returns an SQL statement that removes edges from the SQL backing store. Either `src` or `dst` may be specified, even both. :param src: The source node. :param rel: The relation. :param dst: The destination node. """ smt = 'DELETE FROM %s' % rel queries = [] params = [] if src is not None: queries.append('src = ?') params.append(src) if dst is not None: queries.append('dst = ?') params.append(dst) if not queries: return smt, params smt = '%s WHERE %s' % (smt, ' AND '.join(queries)) return smt, params
Returns an SQL statement that removes edges from the SQL backing store. Either `src` or `dst` may be specified, even both. :param src: The source node. :param rel: The relation. :param dst: The destination node.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/sql.py#L28-L53
eugene-eeo/graphlite
graphlite/sql.py
limit
def limit(lower, upper): """ Returns a SQlite-compliant LIMIT statement that takes the *lower* and *upper* bounds into account. :param lower: The lower bound. :param upper: The upper bound. """ offset = lower or 0 lim = (upper - offset) if upper else -1 smt = 'LIMIT %d OFFSET %d' % (lim, offset) return smt, ()
python
def limit(lower, upper): """ Returns a SQlite-compliant LIMIT statement that takes the *lower* and *upper* bounds into account. :param lower: The lower bound. :param upper: The upper bound. """ offset = lower or 0 lim = (upper - offset) if upper else -1 smt = 'LIMIT %d OFFSET %d' % (lim, offset) return smt, ()
Returns a SQlite-compliant LIMIT statement that takes the *lower* and *upper* bounds into account. :param lower: The lower bound. :param upper: The upper bound.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/sql.py#L120-L131
eugene-eeo/graphlite
graphlite/transaction.py
Transaction.perform_ops
def perform_ops(self): """ Performs the stored operations on the database connection. """ with self.db: with closing(self.db.cursor()) as cursor: cursor.execute('BEGIN TRANSACTION') self._perform_ops(cursor)
python
def perform_ops(self): """ Performs the stored operations on the database connection. """ with self.db: with closing(self.db.cursor()) as cursor: cursor.execute('BEGIN TRANSACTION') self._perform_ops(cursor)
Performs the stored operations on the database connection.
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/transaction.py#L85-L93
wtolson/gnsq
gnsq/httpclient.py
HTTPClient.from_url
def from_url(cls, url, **kwargs): """Create a client from a url.""" url = urllib3.util.parse_url(url) if url.host: kwargs.setdefault('host', url.host) if url.port: kwargs.setdefault('port', url.port) if url.scheme == 'https': kwargs.setdefault('connection_class', urllib3.HTTPSConnectionPool) return cls(**kwargs)
python
def from_url(cls, url, **kwargs): """Create a client from a url.""" url = urllib3.util.parse_url(url) if url.host: kwargs.setdefault('host', url.host) if url.port: kwargs.setdefault('port', url.port) if url.scheme == 'https': kwargs.setdefault('connection_class', urllib3.HTTPSConnectionPool) return cls(**kwargs)
Create a client from a url.
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/httpclient.py#L28-L40
wtolson/gnsq
gnsq/nsqd.py
NsqdTCPClient.connect
def connect(self): """Initialize connection to the nsqd.""" if self.state == DISCONNECTED: raise errors.NSQException('connection already closed') if self.is_connected: return stream = Stream(self.address, self.port, self.timeout) stream.connect() self.stream = stream self.state = CONNECTED self.send(nsq.MAGIC_V2)
python
def connect(self): """Initialize connection to the nsqd.""" if self.state == DISCONNECTED: raise errors.NSQException('connection already closed') if self.is_connected: return stream = Stream(self.address, self.port, self.timeout) stream.connect() self.stream = stream self.state = CONNECTED self.send(nsq.MAGIC_V2)
Initialize connection to the nsqd.
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/nsqd.py#L205-L218
wtolson/gnsq
gnsq/nsqd.py
NsqdTCPClient.close_stream
def close_stream(self): """Close the underlying socket.""" if not self.is_connected: return self.stream.close() self.state = DISCONNECTED self.on_close.send(self)
python
def close_stream(self): """Close the underlying socket.""" if not self.is_connected: return self.stream.close() self.state = DISCONNECTED self.on_close.send(self)
Close the underlying socket.
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/nsqd.py#L220-L227
wtolson/gnsq
gnsq/nsqd.py
NsqdTCPClient.read_response
def read_response(self): """Read an individual response from nsqd. :returns: tuple of the frame type and the processed data. """ response = self._read_response() frame, data = nsq.unpack_response(response) self.last_response = time.time() if frame not in self._frame_handlers: raise errors.NSQFrameError('unknown frame {}'.format(frame)) frame_handler = self._frame_handlers[frame] processed_data = frame_handler(data) return frame, processed_data
python
def read_response(self): """Read an individual response from nsqd. :returns: tuple of the frame type and the processed data. """ response = self._read_response() frame, data = nsq.unpack_response(response) self.last_response = time.time() if frame not in self._frame_handlers: raise errors.NSQFrameError('unknown frame {}'.format(frame)) frame_handler = self._frame_handlers[frame] processed_data = frame_handler(data) return frame, processed_data
Read an individual response from nsqd. :returns: tuple of the frame type and the processed data.
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/nsqd.py#L244-L259
wtolson/gnsq
gnsq/nsqd.py
NsqdTCPClient.identify
def identify(self): """Update client metadata on the server and negotiate features. :returns: nsqd response data if there was feature negotiation, otherwise ``None`` """ self.send(nsq.identify({ # nsqd 0.2.28+ 'client_id': self.client_id, 'hostname': self.hostname, # nsqd 0.2.19+ 'feature_negotiation': True, 'heartbeat_interval': self.heartbeat_interval, # nsqd 0.2.21+ 'output_buffer_size': self.output_buffer_size, 'output_buffer_timeout': self.output_buffer_timeout, # nsqd 0.2.22+ 'tls_v1': self.tls_v1, # nsqd 0.2.23+ 'snappy': self.snappy, 'deflate': self.deflate, 'deflate_level': self.deflate_level, # nsqd nsqd 0.2.25+ 'sample_rate': self.sample_rate, 'user_agent': self.user_agent, })) frame, data = self.read_response() if frame == nsq.FRAME_TYPE_ERROR: raise data if data == nsq.OK: return try: data = json.loads(data.decode('utf-8')) except ValueError: self.close_stream() raise errors.NSQException( 'failed to parse IDENTIFY response JSON from nsqd: ' '{!r}'.format(data)) self.max_ready_count = data.get('max_rdy_count', self.max_ready_count) if self.tls_v1 and data.get('tls_v1'): self.upgrade_to_tls() if self.snappy and data.get('snappy'): self.upgrade_to_snappy() elif self.deflate and data.get('deflate'): self.deflate_level = data.get('deflate_level', self.deflate_level) self.upgrade_to_defalte() if self.auth_secret and data.get('auth_required'): self.auth() return data
python
def identify(self): """Update client metadata on the server and negotiate features. :returns: nsqd response data if there was feature negotiation, otherwise ``None`` """ self.send(nsq.identify({ # nsqd 0.2.28+ 'client_id': self.client_id, 'hostname': self.hostname, # nsqd 0.2.19+ 'feature_negotiation': True, 'heartbeat_interval': self.heartbeat_interval, # nsqd 0.2.21+ 'output_buffer_size': self.output_buffer_size, 'output_buffer_timeout': self.output_buffer_timeout, # nsqd 0.2.22+ 'tls_v1': self.tls_v1, # nsqd 0.2.23+ 'snappy': self.snappy, 'deflate': self.deflate, 'deflate_level': self.deflate_level, # nsqd nsqd 0.2.25+ 'sample_rate': self.sample_rate, 'user_agent': self.user_agent, })) frame, data = self.read_response() if frame == nsq.FRAME_TYPE_ERROR: raise data if data == nsq.OK: return try: data = json.loads(data.decode('utf-8')) except ValueError: self.close_stream() raise errors.NSQException( 'failed to parse IDENTIFY response JSON from nsqd: ' '{!r}'.format(data)) self.max_ready_count = data.get('max_rdy_count', self.max_ready_count) if self.tls_v1 and data.get('tls_v1'): self.upgrade_to_tls() if self.snappy and data.get('snappy'): self.upgrade_to_snappy() elif self.deflate and data.get('deflate'): self.deflate_level = data.get('deflate_level', self.deflate_level) self.upgrade_to_defalte() if self.auth_secret and data.get('auth_required'): self.auth() return data
Update client metadata on the server and negotiate features. :returns: nsqd response data if there was feature negotiation, otherwise ``None``
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/nsqd.py#L329-L393
wtolson/gnsq
gnsq/nsqd.py
NsqdTCPClient.auth
def auth(self): """Send authorization secret to nsqd.""" self.send(nsq.auth(self.auth_secret)) frame, data = self.read_response() if frame == nsq.FRAME_TYPE_ERROR: raise data try: response = json.loads(data.decode('utf-8')) except ValueError: self.close_stream() raise errors.NSQException( 'failed to parse AUTH response JSON from nsqd: ' '{!r}'.format(data)) self.on_auth.send(self, response=response) return response
python
def auth(self): """Send authorization secret to nsqd.""" self.send(nsq.auth(self.auth_secret)) frame, data = self.read_response() if frame == nsq.FRAME_TYPE_ERROR: raise data try: response = json.loads(data.decode('utf-8')) except ValueError: self.close_stream() raise errors.NSQException( 'failed to parse AUTH response JSON from nsqd: ' '{!r}'.format(data)) self.on_auth.send(self, response=response) return response
Send authorization secret to nsqd.
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/nsqd.py#L395-L412
wtolson/gnsq
gnsq/nsqd.py
NsqdTCPClient.subscribe
def subscribe(self, topic, channel): """Subscribe to a nsq `topic` and `channel`.""" self.send(nsq.subscribe(topic, channel))
python
def subscribe(self, topic, channel): """Subscribe to a nsq `topic` and `channel`.""" self.send(nsq.subscribe(topic, channel))
Subscribe to a nsq `topic` and `channel`.
https://github.com/wtolson/gnsq/blob/0fd02578b2c9c5fa30626d78579db2a46c10edac/gnsq/nsqd.py#L414-L416