repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
pmuller/versions | versions/packages.py | Package.build_options | def build_options(self):
"""The package build options.
:returns: :func:`set` of build options strings.
"""
if self.version.build_metadata:
return set(self.version.build_metadata.split('.'))
else:
return set() | python | def build_options(self):
"""The package build options.
:returns: :func:`set` of build options strings.
"""
if self.version.build_metadata:
return set(self.version.build_metadata.split('.'))
else:
return set() | [
"def",
"build_options",
"(",
"self",
")",
":",
"if",
"self",
".",
"version",
".",
"build_metadata",
":",
"return",
"set",
"(",
"self",
".",
"version",
".",
"build_metadata",
".",
"split",
"(",
"'.'",
")",
")",
"else",
":",
"return",
"set",
"(",
")"
] | The package build options.
:returns: :func:`set` of build options strings. | [
"The",
"package",
"build",
"options",
"."
] | 951bc3fd99b6a675190f11ee0752af1d7ff5b440 | https://github.com/pmuller/versions/blob/951bc3fd99b6a675190f11ee0752af1d7ff5b440/versions/packages.py#L84-L93 | train |
bitesofcode/projexui | projexui/widgets/xserialedit.py | XSerialEdit.clearSelection | def clearSelection(self):
"""
Clears the selected text for this edit.
"""
first = None
editors = self.editors()
for editor in editors:
if not editor.selectedText():
continue
first = first or editor
editor.backspace()
for editor in editors:
editor.setFocus()
if first:
first.setFocus() | python | def clearSelection(self):
"""
Clears the selected text for this edit.
"""
first = None
editors = self.editors()
for editor in editors:
if not editor.selectedText():
continue
first = first or editor
editor.backspace()
for editor in editors:
editor.setFocus()
if first:
first.setFocus() | [
"def",
"clearSelection",
"(",
"self",
")",
":",
"first",
"=",
"None",
"editors",
"=",
"self",
".",
"editors",
"(",
")",
"for",
"editor",
"in",
"editors",
":",
"if",
"not",
"editor",
".",
"selectedText",
"(",
")",
":",
"continue",
"first",
"=",
"first",
"or",
"editor",
"editor",
".",
"backspace",
"(",
")",
"for",
"editor",
"in",
"editors",
":",
"editor",
".",
"setFocus",
"(",
")",
"if",
"first",
":",
"first",
".",
"setFocus",
"(",
")"
] | Clears the selected text for this edit. | [
"Clears",
"the",
"selected",
"text",
"for",
"this",
"edit",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xserialedit.py#L48-L65 | train |
bitesofcode/projexui | projexui/widgets/xserialedit.py | XSerialEdit.cut | def cut(self):
"""
Cuts the text from the serial to the clipboard.
"""
text = self.selectedText()
for editor in self.editors():
editor.cut()
QtGui.QApplication.clipboard().setText(text) | python | def cut(self):
"""
Cuts the text from the serial to the clipboard.
"""
text = self.selectedText()
for editor in self.editors():
editor.cut()
QtGui.QApplication.clipboard().setText(text) | [
"def",
"cut",
"(",
"self",
")",
":",
"text",
"=",
"self",
".",
"selectedText",
"(",
")",
"for",
"editor",
"in",
"self",
".",
"editors",
"(",
")",
":",
"editor",
".",
"cut",
"(",
")",
"QtGui",
".",
"QApplication",
".",
"clipboard",
"(",
")",
".",
"setText",
"(",
"text",
")"
] | Cuts the text from the serial to the clipboard. | [
"Cuts",
"the",
"text",
"from",
"the",
"serial",
"to",
"the",
"clipboard",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xserialedit.py#L82-L90 | train |
bitesofcode/projexui | projexui/widgets/xserialedit.py | XSerialEdit.goBack | def goBack(self):
"""
Moves the cursor to the end of the previous editor
"""
index = self.indexOf(self.currentEditor())
if index == -1:
return
previous = self.editorAt(index - 1)
if previous:
previous.setFocus()
previous.setCursorPosition(self.sectionLength()) | python | def goBack(self):
"""
Moves the cursor to the end of the previous editor
"""
index = self.indexOf(self.currentEditor())
if index == -1:
return
previous = self.editorAt(index - 1)
if previous:
previous.setFocus()
previous.setCursorPosition(self.sectionLength()) | [
"def",
"goBack",
"(",
"self",
")",
":",
"index",
"=",
"self",
".",
"indexOf",
"(",
"self",
".",
"currentEditor",
"(",
")",
")",
"if",
"index",
"==",
"-",
"1",
":",
"return",
"previous",
"=",
"self",
".",
"editorAt",
"(",
"index",
"-",
"1",
")",
"if",
"previous",
":",
"previous",
".",
"setFocus",
"(",
")",
"previous",
".",
"setCursorPosition",
"(",
"self",
".",
"sectionLength",
"(",
")",
")"
] | Moves the cursor to the end of the previous editor | [
"Moves",
"the",
"cursor",
"to",
"the",
"end",
"of",
"the",
"previous",
"editor"
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xserialedit.py#L206-L217 | train |
bitesofcode/projexui | projexui/widgets/xserialedit.py | XSerialEdit.goForward | def goForward(self):
"""
Moves the cursor to the beginning of the next editor.
"""
index = self.indexOf(self.currentEditor())
if index == -1:
return
next = self.editorAt(index + 1)
if next:
next.setFocus()
next.setCursorPosition(0) | python | def goForward(self):
"""
Moves the cursor to the beginning of the next editor.
"""
index = self.indexOf(self.currentEditor())
if index == -1:
return
next = self.editorAt(index + 1)
if next:
next.setFocus()
next.setCursorPosition(0) | [
"def",
"goForward",
"(",
"self",
")",
":",
"index",
"=",
"self",
".",
"indexOf",
"(",
"self",
".",
"currentEditor",
"(",
")",
")",
"if",
"index",
"==",
"-",
"1",
":",
"return",
"next",
"=",
"self",
".",
"editorAt",
"(",
"index",
"+",
"1",
")",
"if",
"next",
":",
"next",
".",
"setFocus",
"(",
")",
"next",
".",
"setCursorPosition",
"(",
"0",
")"
] | Moves the cursor to the beginning of the next editor. | [
"Moves",
"the",
"cursor",
"to",
"the",
"beginning",
"of",
"the",
"next",
"editor",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xserialedit.py#L219-L230 | train |
bitesofcode/projexui | projexui/widgets/xserialedit.py | XSerialEdit.selectAll | def selectAll(self):
"""
Selects the text within all the editors.
"""
self.blockEditorHandling(True)
for editor in self.editors():
editor.selectAll()
self.blockEditorHandling(False) | python | def selectAll(self):
"""
Selects the text within all the editors.
"""
self.blockEditorHandling(True)
for editor in self.editors():
editor.selectAll()
self.blockEditorHandling(False) | [
"def",
"selectAll",
"(",
"self",
")",
":",
"self",
".",
"blockEditorHandling",
"(",
"True",
")",
"for",
"editor",
"in",
"self",
".",
"editors",
"(",
")",
":",
"editor",
".",
"selectAll",
"(",
")",
"self",
".",
"blockEditorHandling",
"(",
"False",
")"
] | Selects the text within all the editors. | [
"Selects",
"the",
"text",
"within",
"all",
"the",
"editors",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xserialedit.py#L315-L322 | train |
johnnoone/aioconsul | aioconsul/client/agent_endpoint.py | AgentEndpoint.disable | async def disable(self, reason=None):
"""Enters maintenance mode
Parameters:
reason (str): Reason of disabling
Returns:
bool: ``True`` on success
"""
params = {"enable": True, "reason": reason}
response = await self._api.put("/v1/agent/maintenance", params=params)
return response.status == 200 | python | async def disable(self, reason=None):
"""Enters maintenance mode
Parameters:
reason (str): Reason of disabling
Returns:
bool: ``True`` on success
"""
params = {"enable": True, "reason": reason}
response = await self._api.put("/v1/agent/maintenance", params=params)
return response.status == 200 | [
"async",
"def",
"disable",
"(",
"self",
",",
"reason",
"=",
"None",
")",
":",
"params",
"=",
"{",
"\"enable\"",
":",
"True",
",",
"\"reason\"",
":",
"reason",
"}",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/agent/maintenance\"",
",",
"params",
"=",
"params",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Enters maintenance mode
Parameters:
reason (str): Reason of disabling
Returns:
bool: ``True`` on success | [
"Enters",
"maintenance",
"mode"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/agent_endpoint.py#L86-L96 | train |
johnnoone/aioconsul | aioconsul/client/agent_endpoint.py | AgentEndpoint.enable | async def enable(self, reason=None):
"""Resumes normal operation
Parameters:
reason (str): Reason of enabling
Returns:
bool: ``True`` on success
"""
params = {"enable": False, "reason": reason}
response = await self._api.put("/v1/agent/maintenance", params=params)
return response.status == 200 | python | async def enable(self, reason=None):
"""Resumes normal operation
Parameters:
reason (str): Reason of enabling
Returns:
bool: ``True`` on success
"""
params = {"enable": False, "reason": reason}
response = await self._api.put("/v1/agent/maintenance", params=params)
return response.status == 200 | [
"async",
"def",
"enable",
"(",
"self",
",",
"reason",
"=",
"None",
")",
":",
"params",
"=",
"{",
"\"enable\"",
":",
"False",
",",
"\"reason\"",
":",
"reason",
"}",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/agent/maintenance\"",
",",
"params",
"=",
"params",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Resumes normal operation
Parameters:
reason (str): Reason of enabling
Returns:
bool: ``True`` on success | [
"Resumes",
"normal",
"operation"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/agent_endpoint.py#L98-L108 | train |
fitnr/buoyant | buoyant/timezone.py | parse_datetime | def parse_datetime(dt):
'''Parse an ISO datetime, which Python does buggily.'''
d = datetime.strptime(dt[:-1], ISOFORMAT)
if dt[-1:] == 'Z':
return timezone('utc').localize(d)
else:
return d | python | def parse_datetime(dt):
'''Parse an ISO datetime, which Python does buggily.'''
d = datetime.strptime(dt[:-1], ISOFORMAT)
if dt[-1:] == 'Z':
return timezone('utc').localize(d)
else:
return d | [
"def",
"parse_datetime",
"(",
"dt",
")",
":",
"d",
"=",
"datetime",
".",
"strptime",
"(",
"dt",
"[",
":",
"-",
"1",
"]",
",",
"ISOFORMAT",
")",
"if",
"dt",
"[",
"-",
"1",
":",
"]",
"==",
"'Z'",
":",
"return",
"timezone",
"(",
"'utc'",
")",
".",
"localize",
"(",
"d",
")",
"else",
":",
"return",
"d"
] | Parse an ISO datetime, which Python does buggily. | [
"Parse",
"an",
"ISO",
"datetime",
"which",
"Python",
"does",
"buggily",
"."
] | ef7a74f9ebd4774629508ccf2c9abb43aa0235c9 | https://github.com/fitnr/buoyant/blob/ef7a74f9ebd4774629508ccf2c9abb43aa0235c9/buoyant/timezone.py#L21-L28 | train |
bitesofcode/projexui | projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py | XOrbBrowserWidget.refreshRecords | def refreshRecords( self ):
"""
Refreshes the records being loaded by this browser.
"""
table_type = self.tableType()
if ( not table_type ):
self._records = RecordSet()
return False
search = nativestring(self.uiSearchTXT.text())
query = self.query().copy()
terms, search_query = Q.fromSearch(search)
if ( search_query ):
query &= search_query
self._records = table_type.select(where = query).search(terms)
return True | python | def refreshRecords( self ):
"""
Refreshes the records being loaded by this browser.
"""
table_type = self.tableType()
if ( not table_type ):
self._records = RecordSet()
return False
search = nativestring(self.uiSearchTXT.text())
query = self.query().copy()
terms, search_query = Q.fromSearch(search)
if ( search_query ):
query &= search_query
self._records = table_type.select(where = query).search(terms)
return True | [
"def",
"refreshRecords",
"(",
"self",
")",
":",
"table_type",
"=",
"self",
".",
"tableType",
"(",
")",
"if",
"(",
"not",
"table_type",
")",
":",
"self",
".",
"_records",
"=",
"RecordSet",
"(",
")",
"return",
"False",
"search",
"=",
"nativestring",
"(",
"self",
".",
"uiSearchTXT",
".",
"text",
"(",
")",
")",
"query",
"=",
"self",
".",
"query",
"(",
")",
".",
"copy",
"(",
")",
"terms",
",",
"search_query",
"=",
"Q",
".",
"fromSearch",
"(",
"search",
")",
"if",
"(",
"search_query",
")",
":",
"query",
"&=",
"search_query",
"self",
".",
"_records",
"=",
"table_type",
".",
"select",
"(",
"where",
"=",
"query",
")",
".",
"search",
"(",
"terms",
")",
"return",
"True"
] | Refreshes the records being loaded by this browser. | [
"Refreshes",
"the",
"records",
"being",
"loaded",
"by",
"this",
"browser",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py#L476-L494 | train |
bitesofcode/projexui | projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py | XOrbBrowserWidget.refreshResults | def refreshResults( self ):
"""
Joins together the queries from the fixed system, the search, and the
query builder to generate a query for the browser to display.
"""
if ( self.currentMode() == XOrbBrowserWidget.Mode.Detail ):
self.refreshDetails()
elif ( self.currentMode() == XOrbBrowserWidget.Mode.Card ):
self.refreshCards()
else:
self.refreshThumbnails() | python | def refreshResults( self ):
"""
Joins together the queries from the fixed system, the search, and the
query builder to generate a query for the browser to display.
"""
if ( self.currentMode() == XOrbBrowserWidget.Mode.Detail ):
self.refreshDetails()
elif ( self.currentMode() == XOrbBrowserWidget.Mode.Card ):
self.refreshCards()
else:
self.refreshThumbnails() | [
"def",
"refreshResults",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"currentMode",
"(",
")",
"==",
"XOrbBrowserWidget",
".",
"Mode",
".",
"Detail",
")",
":",
"self",
".",
"refreshDetails",
"(",
")",
"elif",
"(",
"self",
".",
"currentMode",
"(",
")",
"==",
"XOrbBrowserWidget",
".",
"Mode",
".",
"Card",
")",
":",
"self",
".",
"refreshCards",
"(",
")",
"else",
":",
"self",
".",
"refreshThumbnails",
"(",
")"
] | Joins together the queries from the fixed system, the search, and the
query builder to generate a query for the browser to display. | [
"Joins",
"together",
"the",
"queries",
"from",
"the",
"fixed",
"system",
"the",
"search",
"and",
"the",
"query",
"builder",
"to",
"generate",
"a",
"query",
"for",
"the",
"browser",
"to",
"display",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py#L496-L506 | train |
bitesofcode/projexui | projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py | XOrbBrowserWidget.refreshCards | def refreshCards( self ):
"""
Refreshes the results for the cards view of the browser.
"""
cards = self.cardWidget()
factory = self.factory()
self.setUpdatesEnabled(False)
self.blockSignals(True)
cards.setUpdatesEnabled(False)
cards.blockSignals(True)
cards.clear()
QApplication.instance().processEvents()
if ( self.isGroupingActive() ):
grouping = self.records().grouped()
for groupName, records in sorted(grouping.items()):
self._loadCardGroup(groupName, records, cards)
else:
for record in self.records():
widget = factory.createCard(cards, record)
if ( not widget ):
continue
widget.adjustSize()
# create the card item
item = QTreeWidgetItem(cards)
item.setSizeHint(0, QSize(0, widget.height()))
cards.setItemWidget(item, 0, widget)
cards.setUpdatesEnabled(True)
cards.blockSignals(False)
self.setUpdatesEnabled(True)
self.blockSignals(False) | python | def refreshCards( self ):
"""
Refreshes the results for the cards view of the browser.
"""
cards = self.cardWidget()
factory = self.factory()
self.setUpdatesEnabled(False)
self.blockSignals(True)
cards.setUpdatesEnabled(False)
cards.blockSignals(True)
cards.clear()
QApplication.instance().processEvents()
if ( self.isGroupingActive() ):
grouping = self.records().grouped()
for groupName, records in sorted(grouping.items()):
self._loadCardGroup(groupName, records, cards)
else:
for record in self.records():
widget = factory.createCard(cards, record)
if ( not widget ):
continue
widget.adjustSize()
# create the card item
item = QTreeWidgetItem(cards)
item.setSizeHint(0, QSize(0, widget.height()))
cards.setItemWidget(item, 0, widget)
cards.setUpdatesEnabled(True)
cards.blockSignals(False)
self.setUpdatesEnabled(True)
self.blockSignals(False) | [
"def",
"refreshCards",
"(",
"self",
")",
":",
"cards",
"=",
"self",
".",
"cardWidget",
"(",
")",
"factory",
"=",
"self",
".",
"factory",
"(",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"False",
")",
"self",
".",
"blockSignals",
"(",
"True",
")",
"cards",
".",
"setUpdatesEnabled",
"(",
"False",
")",
"cards",
".",
"blockSignals",
"(",
"True",
")",
"cards",
".",
"clear",
"(",
")",
"QApplication",
".",
"instance",
"(",
")",
".",
"processEvents",
"(",
")",
"if",
"(",
"self",
".",
"isGroupingActive",
"(",
")",
")",
":",
"grouping",
"=",
"self",
".",
"records",
"(",
")",
".",
"grouped",
"(",
")",
"for",
"groupName",
",",
"records",
"in",
"sorted",
"(",
"grouping",
".",
"items",
"(",
")",
")",
":",
"self",
".",
"_loadCardGroup",
"(",
"groupName",
",",
"records",
",",
"cards",
")",
"else",
":",
"for",
"record",
"in",
"self",
".",
"records",
"(",
")",
":",
"widget",
"=",
"factory",
".",
"createCard",
"(",
"cards",
",",
"record",
")",
"if",
"(",
"not",
"widget",
")",
":",
"continue",
"widget",
".",
"adjustSize",
"(",
")",
"# create the card item\r",
"item",
"=",
"QTreeWidgetItem",
"(",
"cards",
")",
"item",
".",
"setSizeHint",
"(",
"0",
",",
"QSize",
"(",
"0",
",",
"widget",
".",
"height",
"(",
")",
")",
")",
"cards",
".",
"setItemWidget",
"(",
"item",
",",
"0",
",",
"widget",
")",
"cards",
".",
"setUpdatesEnabled",
"(",
"True",
")",
"cards",
".",
"blockSignals",
"(",
"False",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"True",
")",
"self",
".",
"blockSignals",
"(",
"False",
")"
] | Refreshes the results for the cards view of the browser. | [
"Refreshes",
"the",
"results",
"for",
"the",
"cards",
"view",
"of",
"the",
"browser",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py#L508-L546 | train |
bitesofcode/projexui | projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py | XOrbBrowserWidget.refreshDetails | def refreshDetails( self ):
"""
Refreshes the results for the details view of the browser.
"""
# start off by filtering based on the group selection
tree = self.uiRecordsTREE
tree.blockSignals(True)
tree.setRecordSet(self.records())
tree.blockSignals(False) | python | def refreshDetails( self ):
"""
Refreshes the results for the details view of the browser.
"""
# start off by filtering based on the group selection
tree = self.uiRecordsTREE
tree.blockSignals(True)
tree.setRecordSet(self.records())
tree.blockSignals(False) | [
"def",
"refreshDetails",
"(",
"self",
")",
":",
"# start off by filtering based on the group selection\r",
"tree",
"=",
"self",
".",
"uiRecordsTREE",
"tree",
".",
"blockSignals",
"(",
"True",
")",
"tree",
".",
"setRecordSet",
"(",
"self",
".",
"records",
"(",
")",
")",
"tree",
".",
"blockSignals",
"(",
"False",
")"
] | Refreshes the results for the details view of the browser. | [
"Refreshes",
"the",
"results",
"for",
"the",
"details",
"view",
"of",
"the",
"browser",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py#L548-L556 | train |
bitesofcode/projexui | projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py | XOrbBrowserWidget.refreshThumbnails | def refreshThumbnails( self ):
"""
Refreshes the thumbnails view of the browser.
"""
# clear existing items
widget = self.thumbnailWidget()
widget.setUpdatesEnabled(False)
widget.blockSignals(True)
widget.clear()
widget.setIconSize(self.thumbnailSize())
factory = self.factory()
# load grouped thumbnails (only allow 1 level of grouping)
if ( self.isGroupingActive() ):
grouping = self.records().grouped()
for groupName, records in sorted(grouping.items()):
self._loadThumbnailGroup(groupName, records)
# load ungrouped thumbnails
else:
# load the records into the thumbnail
for record in self.records():
thumbnail = factory.thumbnail(record)
text = factory.thumbnailText(record)
RecordListWidgetItem(thumbnail, text, record, widget)
widget.setUpdatesEnabled(True)
widget.blockSignals(False) | python | def refreshThumbnails( self ):
"""
Refreshes the thumbnails view of the browser.
"""
# clear existing items
widget = self.thumbnailWidget()
widget.setUpdatesEnabled(False)
widget.blockSignals(True)
widget.clear()
widget.setIconSize(self.thumbnailSize())
factory = self.factory()
# load grouped thumbnails (only allow 1 level of grouping)
if ( self.isGroupingActive() ):
grouping = self.records().grouped()
for groupName, records in sorted(grouping.items()):
self._loadThumbnailGroup(groupName, records)
# load ungrouped thumbnails
else:
# load the records into the thumbnail
for record in self.records():
thumbnail = factory.thumbnail(record)
text = factory.thumbnailText(record)
RecordListWidgetItem(thumbnail, text, record, widget)
widget.setUpdatesEnabled(True)
widget.blockSignals(False) | [
"def",
"refreshThumbnails",
"(",
"self",
")",
":",
"# clear existing items\r",
"widget",
"=",
"self",
".",
"thumbnailWidget",
"(",
")",
"widget",
".",
"setUpdatesEnabled",
"(",
"False",
")",
"widget",
".",
"blockSignals",
"(",
"True",
")",
"widget",
".",
"clear",
"(",
")",
"widget",
".",
"setIconSize",
"(",
"self",
".",
"thumbnailSize",
"(",
")",
")",
"factory",
"=",
"self",
".",
"factory",
"(",
")",
"# load grouped thumbnails (only allow 1 level of grouping)\r",
"if",
"(",
"self",
".",
"isGroupingActive",
"(",
")",
")",
":",
"grouping",
"=",
"self",
".",
"records",
"(",
")",
".",
"grouped",
"(",
")",
"for",
"groupName",
",",
"records",
"in",
"sorted",
"(",
"grouping",
".",
"items",
"(",
")",
")",
":",
"self",
".",
"_loadThumbnailGroup",
"(",
"groupName",
",",
"records",
")",
"# load ungrouped thumbnails\r",
"else",
":",
"# load the records into the thumbnail\r",
"for",
"record",
"in",
"self",
".",
"records",
"(",
")",
":",
"thumbnail",
"=",
"factory",
".",
"thumbnail",
"(",
"record",
")",
"text",
"=",
"factory",
".",
"thumbnailText",
"(",
"record",
")",
"RecordListWidgetItem",
"(",
"thumbnail",
",",
"text",
",",
"record",
",",
"widget",
")",
"widget",
".",
"setUpdatesEnabled",
"(",
"True",
")",
"widget",
".",
"blockSignals",
"(",
"False",
")"
] | Refreshes the thumbnails view of the browser. | [
"Refreshes",
"the",
"thumbnails",
"view",
"of",
"the",
"browser",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py#L558-L587 | train |
bitesofcode/projexui | projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py | XOrbBrowserWidget.showGroupMenu | def showGroupMenu( self ):
"""
Displays the group menu to the user for modification.
"""
group_active = self.isGroupingActive()
group_by = self.groupBy()
menu = XMenu(self)
menu.setTitle('Grouping Options')
menu.setShowTitle(True)
menu.addAction('Edit Advanced Grouping')
menu.addSeparator()
action = menu.addAction('No Grouping')
action.setCheckable(True)
action.setChecked(not group_active)
action = menu.addAction('Advanced')
action.setCheckable(True)
action.setChecked(group_by == self.GroupByAdvancedKey and group_active)
if ( group_by == self.GroupByAdvancedKey ):
font = action.font()
font.setBold(True)
action.setFont(font)
menu.addSeparator()
# add dynamic options from the table schema
tableType = self.tableType()
if ( tableType ):
columns = tableType.schema().columns()
columns.sort(key = lambda x: x.displayName())
for column in columns:
action = menu.addAction(column.displayName())
action.setCheckable(True)
action.setChecked(group_by == column.displayName() and
group_active)
if ( column.displayName() == group_by ):
font = action.font()
font.setBold(True)
action.setFont(font)
point = QPoint(0, self.uiGroupOptionsBTN.height())
action = menu.exec_(self.uiGroupOptionsBTN.mapToGlobal(point))
if ( not action ):
return
elif ( action.text() == 'Edit Advanced Grouping' ):
print 'edit advanced grouping options'
elif ( action.text() == 'No Grouping' ):
self.setGroupingActive(False)
elif ( action.text() == 'Advanced' ):
self.uiGroupBTN.blockSignals(True)
self.setGroupBy(self.GroupByAdvancedKey)
self.setGroupingActive(True)
self.uiGroupBTN.blockSignals(False)
self.refreshResults()
else:
self.uiGroupBTN.blockSignals(True)
self.setGroupBy(nativestring(action.text()))
self.setGroupingActive(True)
self.uiGroupBTN.blockSignals(False)
self.refreshResults() | python | def showGroupMenu( self ):
"""
Displays the group menu to the user for modification.
"""
group_active = self.isGroupingActive()
group_by = self.groupBy()
menu = XMenu(self)
menu.setTitle('Grouping Options')
menu.setShowTitle(True)
menu.addAction('Edit Advanced Grouping')
menu.addSeparator()
action = menu.addAction('No Grouping')
action.setCheckable(True)
action.setChecked(not group_active)
action = menu.addAction('Advanced')
action.setCheckable(True)
action.setChecked(group_by == self.GroupByAdvancedKey and group_active)
if ( group_by == self.GroupByAdvancedKey ):
font = action.font()
font.setBold(True)
action.setFont(font)
menu.addSeparator()
# add dynamic options from the table schema
tableType = self.tableType()
if ( tableType ):
columns = tableType.schema().columns()
columns.sort(key = lambda x: x.displayName())
for column in columns:
action = menu.addAction(column.displayName())
action.setCheckable(True)
action.setChecked(group_by == column.displayName() and
group_active)
if ( column.displayName() == group_by ):
font = action.font()
font.setBold(True)
action.setFont(font)
point = QPoint(0, self.uiGroupOptionsBTN.height())
action = menu.exec_(self.uiGroupOptionsBTN.mapToGlobal(point))
if ( not action ):
return
elif ( action.text() == 'Edit Advanced Grouping' ):
print 'edit advanced grouping options'
elif ( action.text() == 'No Grouping' ):
self.setGroupingActive(False)
elif ( action.text() == 'Advanced' ):
self.uiGroupBTN.blockSignals(True)
self.setGroupBy(self.GroupByAdvancedKey)
self.setGroupingActive(True)
self.uiGroupBTN.blockSignals(False)
self.refreshResults()
else:
self.uiGroupBTN.blockSignals(True)
self.setGroupBy(nativestring(action.text()))
self.setGroupingActive(True)
self.uiGroupBTN.blockSignals(False)
self.refreshResults() | [
"def",
"showGroupMenu",
"(",
"self",
")",
":",
"group_active",
"=",
"self",
".",
"isGroupingActive",
"(",
")",
"group_by",
"=",
"self",
".",
"groupBy",
"(",
")",
"menu",
"=",
"XMenu",
"(",
"self",
")",
"menu",
".",
"setTitle",
"(",
"'Grouping Options'",
")",
"menu",
".",
"setShowTitle",
"(",
"True",
")",
"menu",
".",
"addAction",
"(",
"'Edit Advanced Grouping'",
")",
"menu",
".",
"addSeparator",
"(",
")",
"action",
"=",
"menu",
".",
"addAction",
"(",
"'No Grouping'",
")",
"action",
".",
"setCheckable",
"(",
"True",
")",
"action",
".",
"setChecked",
"(",
"not",
"group_active",
")",
"action",
"=",
"menu",
".",
"addAction",
"(",
"'Advanced'",
")",
"action",
".",
"setCheckable",
"(",
"True",
")",
"action",
".",
"setChecked",
"(",
"group_by",
"==",
"self",
".",
"GroupByAdvancedKey",
"and",
"group_active",
")",
"if",
"(",
"group_by",
"==",
"self",
".",
"GroupByAdvancedKey",
")",
":",
"font",
"=",
"action",
".",
"font",
"(",
")",
"font",
".",
"setBold",
"(",
"True",
")",
"action",
".",
"setFont",
"(",
"font",
")",
"menu",
".",
"addSeparator",
"(",
")",
"# add dynamic options from the table schema\r",
"tableType",
"=",
"self",
".",
"tableType",
"(",
")",
"if",
"(",
"tableType",
")",
":",
"columns",
"=",
"tableType",
".",
"schema",
"(",
")",
".",
"columns",
"(",
")",
"columns",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
".",
"displayName",
"(",
")",
")",
"for",
"column",
"in",
"columns",
":",
"action",
"=",
"menu",
".",
"addAction",
"(",
"column",
".",
"displayName",
"(",
")",
")",
"action",
".",
"setCheckable",
"(",
"True",
")",
"action",
".",
"setChecked",
"(",
"group_by",
"==",
"column",
".",
"displayName",
"(",
")",
"and",
"group_active",
")",
"if",
"(",
"column",
".",
"displayName",
"(",
")",
"==",
"group_by",
")",
":",
"font",
"=",
"action",
".",
"font",
"(",
")",
"font",
".",
"setBold",
"(",
"True",
")",
"action",
".",
"setFont",
"(",
"font",
")",
"point",
"=",
"QPoint",
"(",
"0",
",",
"self",
".",
"uiGroupOptionsBTN",
".",
"height",
"(",
")",
")",
"action",
"=",
"menu",
".",
"exec_",
"(",
"self",
".",
"uiGroupOptionsBTN",
".",
"mapToGlobal",
"(",
"point",
")",
")",
"if",
"(",
"not",
"action",
")",
":",
"return",
"elif",
"(",
"action",
".",
"text",
"(",
")",
"==",
"'Edit Advanced Grouping'",
")",
":",
"print",
"'edit advanced grouping options'",
"elif",
"(",
"action",
".",
"text",
"(",
")",
"==",
"'No Grouping'",
")",
":",
"self",
".",
"setGroupingActive",
"(",
"False",
")",
"elif",
"(",
"action",
".",
"text",
"(",
")",
"==",
"'Advanced'",
")",
":",
"self",
".",
"uiGroupBTN",
".",
"blockSignals",
"(",
"True",
")",
"self",
".",
"setGroupBy",
"(",
"self",
".",
"GroupByAdvancedKey",
")",
"self",
".",
"setGroupingActive",
"(",
"True",
")",
"self",
".",
"uiGroupBTN",
".",
"blockSignals",
"(",
"False",
")",
"self",
".",
"refreshResults",
"(",
")",
"else",
":",
"self",
".",
"uiGroupBTN",
".",
"blockSignals",
"(",
"True",
")",
"self",
".",
"setGroupBy",
"(",
"nativestring",
"(",
"action",
".",
"text",
"(",
")",
")",
")",
"self",
".",
"setGroupingActive",
"(",
"True",
")",
"self",
".",
"uiGroupBTN",
".",
"blockSignals",
"(",
"False",
")",
"self",
".",
"refreshResults",
"(",
")"
] | Displays the group menu to the user for modification. | [
"Displays",
"the",
"group",
"menu",
"to",
"the",
"user",
"for",
"modification",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbbrowserwidget/xorbbrowserwidget.py#L743-L811 | train |
musashiXXX/django-clamav-upload | clamav_upload/__init__.py | get_settings | def get_settings():
"""
This function returns a dict containing default settings
"""
s = getattr(settings, 'CLAMAV_UPLOAD', {})
s = {
'CONTENT_TYPE_CHECK_ENABLED': s.get('CONTENT_TYPE_CHECK_ENABLED', False),
# LAST_HANDLER is not a user configurable option; we return
# it with the settings dict simply because it's convenient.
'LAST_HANDLER': getattr(settings, 'FILE_UPLOAD_HANDLERS')[-1]
}
return s | python | def get_settings():
"""
This function returns a dict containing default settings
"""
s = getattr(settings, 'CLAMAV_UPLOAD', {})
s = {
'CONTENT_TYPE_CHECK_ENABLED': s.get('CONTENT_TYPE_CHECK_ENABLED', False),
# LAST_HANDLER is not a user configurable option; we return
# it with the settings dict simply because it's convenient.
'LAST_HANDLER': getattr(settings, 'FILE_UPLOAD_HANDLERS')[-1]
}
return s | [
"def",
"get_settings",
"(",
")",
":",
"s",
"=",
"getattr",
"(",
"settings",
",",
"'CLAMAV_UPLOAD'",
",",
"{",
"}",
")",
"s",
"=",
"{",
"'CONTENT_TYPE_CHECK_ENABLED'",
":",
"s",
".",
"get",
"(",
"'CONTENT_TYPE_CHECK_ENABLED'",
",",
"False",
")",
",",
"# LAST_HANDLER is not a user configurable option; we return",
"# it with the settings dict simply because it's convenient.",
"'LAST_HANDLER'",
":",
"getattr",
"(",
"settings",
",",
"'FILE_UPLOAD_HANDLERS'",
")",
"[",
"-",
"1",
"]",
"}",
"return",
"s"
] | This function returns a dict containing default settings | [
"This",
"function",
"returns",
"a",
"dict",
"containing",
"default",
"settings"
] | 00ea8baaa127d98ffb0919aaa2c3aeec9bb58fd5 | https://github.com/musashiXXX/django-clamav-upload/blob/00ea8baaa127d98ffb0919aaa2c3aeec9bb58fd5/clamav_upload/__init__.py#L21-L32 | train |
bitesofcode/projexui | projexui/xsettings.py | XmlFormat.clear | def clear(self):
"""
Clears the settings for this XML format.
"""
self._xroot = ElementTree.Element('settings')
self._xroot.set('version', '1.0')
self._xstack = [self._xroot] | python | def clear(self):
"""
Clears the settings for this XML format.
"""
self._xroot = ElementTree.Element('settings')
self._xroot.set('version', '1.0')
self._xstack = [self._xroot] | [
"def",
"clear",
"(",
"self",
")",
":",
"self",
".",
"_xroot",
"=",
"ElementTree",
".",
"Element",
"(",
"'settings'",
")",
"self",
".",
"_xroot",
".",
"set",
"(",
"'version'",
",",
"'1.0'",
")",
"self",
".",
"_xstack",
"=",
"[",
"self",
".",
"_xroot",
"]"
] | Clears the settings for this XML format. | [
"Clears",
"the",
"settings",
"for",
"this",
"XML",
"format",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xsettings.py#L119-L125 | train |
bitesofcode/projexui | projexui/xsettings.py | XSettings.clear | def clear(self):
"""
Clears out all the settings for this instance.
"""
if self._customFormat:
self._customFormat.clear()
else:
super(XSettings, self).clear() | python | def clear(self):
"""
Clears out all the settings for this instance.
"""
if self._customFormat:
self._customFormat.clear()
else:
super(XSettings, self).clear() | [
"def",
"clear",
"(",
"self",
")",
":",
"if",
"self",
".",
"_customFormat",
":",
"self",
".",
"_customFormat",
".",
"clear",
"(",
")",
"else",
":",
"super",
"(",
"XSettings",
",",
"self",
")",
".",
"clear",
"(",
")"
] | Clears out all the settings for this instance. | [
"Clears",
"out",
"all",
"the",
"settings",
"for",
"this",
"instance",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xsettings.py#L519-L526 | train |
bitesofcode/projexui | projexui/xsettings.py | XSettings.endGroup | def endGroup(self):
"""
Ends the current group of xml data.
"""
if self._customFormat:
self._customFormat.endGroup()
else:
super(XSettings, self).endGroup() | python | def endGroup(self):
"""
Ends the current group of xml data.
"""
if self._customFormat:
self._customFormat.endGroup()
else:
super(XSettings, self).endGroup() | [
"def",
"endGroup",
"(",
"self",
")",
":",
"if",
"self",
".",
"_customFormat",
":",
"self",
".",
"_customFormat",
".",
"endGroup",
"(",
")",
"else",
":",
"super",
"(",
"XSettings",
",",
"self",
")",
".",
"endGroup",
"(",
")"
] | Ends the current group of xml data. | [
"Ends",
"the",
"current",
"group",
"of",
"xml",
"data",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xsettings.py#L548-L555 | train |
bitesofcode/projexui | projexui/xsettings.py | XSettings.load | def load(self):
"""
Loads the settings from disk for this XSettings object, if it is a custom format.
"""
# load the custom format
if self._customFormat and os.path.exists(self.fileName()):
self._customFormat.load(self.fileName()) | python | def load(self):
"""
Loads the settings from disk for this XSettings object, if it is a custom format.
"""
# load the custom format
if self._customFormat and os.path.exists(self.fileName()):
self._customFormat.load(self.fileName()) | [
"def",
"load",
"(",
"self",
")",
":",
"# load the custom format\r",
"if",
"self",
".",
"_customFormat",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"fileName",
"(",
")",
")",
":",
"self",
".",
"_customFormat",
".",
"load",
"(",
"self",
".",
"fileName",
"(",
")",
")"
] | Loads the settings from disk for this XSettings object, if it is a custom format. | [
"Loads",
"the",
"settings",
"from",
"disk",
"for",
"this",
"XSettings",
"object",
"if",
"it",
"is",
"a",
"custom",
"format",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xsettings.py#L557-L563 | train |
bitesofcode/projexui | projexui/xsettings.py | XSettings.sync | def sync(self):
"""
Syncs the information for this settings out to the file system.
"""
if self._customFormat:
self._customFormat.save(self.fileName())
else:
super(XSettings, self).sync() | python | def sync(self):
"""
Syncs the information for this settings out to the file system.
"""
if self._customFormat:
self._customFormat.save(self.fileName())
else:
super(XSettings, self).sync() | [
"def",
"sync",
"(",
"self",
")",
":",
"if",
"self",
".",
"_customFormat",
":",
"self",
".",
"_customFormat",
".",
"save",
"(",
"self",
".",
"fileName",
"(",
")",
")",
"else",
":",
"super",
"(",
"XSettings",
",",
"self",
")",
".",
"sync",
"(",
")"
] | Syncs the information for this settings out to the file system. | [
"Syncs",
"the",
"information",
"for",
"this",
"settings",
"out",
"to",
"the",
"file",
"system",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xsettings.py#L608-L615 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbquerycontainer.py | XOrbQueryContainer.clear | def clear(self):
"""
Clears out the widgets for this query builder.
"""
layout = self._entryWidget.layout()
for i in range(layout.count() - 1):
widget = layout.itemAt(i).widget()
widget.close() | python | def clear(self):
"""
Clears out the widgets for this query builder.
"""
layout = self._entryWidget.layout()
for i in range(layout.count() - 1):
widget = layout.itemAt(i).widget()
widget.close() | [
"def",
"clear",
"(",
"self",
")",
":",
"layout",
"=",
"self",
".",
"_entryWidget",
".",
"layout",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"layout",
".",
"count",
"(",
")",
"-",
"1",
")",
":",
"widget",
"=",
"layout",
".",
"itemAt",
"(",
"i",
")",
".",
"widget",
"(",
")",
"widget",
".",
"close",
"(",
")"
] | Clears out the widgets for this query builder. | [
"Clears",
"out",
"the",
"widgets",
"for",
"this",
"query",
"builder",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbquerycontainer.py#L83-L90 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbquerycontainer.py | XOrbQueryContainer.moveDown | def moveDown(self, entry):
"""
Moves the current query down one entry.
"""
if not entry:
return
entries = self.entries()
next = entries[entries.index(entry) + 1]
entry_q = entry.query()
next_q = next.query()
next.setQuery(entry_q)
entry.setQuery(next_q) | python | def moveDown(self, entry):
"""
Moves the current query down one entry.
"""
if not entry:
return
entries = self.entries()
next = entries[entries.index(entry) + 1]
entry_q = entry.query()
next_q = next.query()
next.setQuery(entry_q)
entry.setQuery(next_q) | [
"def",
"moveDown",
"(",
"self",
",",
"entry",
")",
":",
"if",
"not",
"entry",
":",
"return",
"entries",
"=",
"self",
".",
"entries",
"(",
")",
"next",
"=",
"entries",
"[",
"entries",
".",
"index",
"(",
"entry",
")",
"+",
"1",
"]",
"entry_q",
"=",
"entry",
".",
"query",
"(",
")",
"next_q",
"=",
"next",
".",
"query",
"(",
")",
"next",
".",
"setQuery",
"(",
"entry_q",
")",
"entry",
".",
"setQuery",
"(",
"next_q",
")"
] | Moves the current query down one entry. | [
"Moves",
"the",
"current",
"query",
"down",
"one",
"entry",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbquerycontainer.py#L168-L182 | train |
bitesofcode/projexui | projexui/widgets/xganttwidget/xganttwidget.py | XGanttWidget._selectTree | def _selectTree( self ):
"""
Matches the tree selection to the views selection.
"""
self.uiGanttTREE.blockSignals(True)
self.uiGanttTREE.clearSelection()
for item in self.uiGanttVIEW.scene().selectedItems():
item.treeItem().setSelected(True)
self.uiGanttTREE.blockSignals(False) | python | def _selectTree( self ):
"""
Matches the tree selection to the views selection.
"""
self.uiGanttTREE.blockSignals(True)
self.uiGanttTREE.clearSelection()
for item in self.uiGanttVIEW.scene().selectedItems():
item.treeItem().setSelected(True)
self.uiGanttTREE.blockSignals(False) | [
"def",
"_selectTree",
"(",
"self",
")",
":",
"self",
".",
"uiGanttTREE",
".",
"blockSignals",
"(",
"True",
")",
"self",
".",
"uiGanttTREE",
".",
"clearSelection",
"(",
")",
"for",
"item",
"in",
"self",
".",
"uiGanttVIEW",
".",
"scene",
"(",
")",
".",
"selectedItems",
"(",
")",
":",
"item",
".",
"treeItem",
"(",
")",
".",
"setSelected",
"(",
"True",
")",
"self",
".",
"uiGanttTREE",
".",
"blockSignals",
"(",
"False",
")"
] | Matches the tree selection to the views selection. | [
"Matches",
"the",
"tree",
"selection",
"to",
"the",
"views",
"selection",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xganttwidget/xganttwidget.py#L158-L166 | train |
bitesofcode/projexui | projexui/widgets/xganttwidget/xganttwidget.py | XGanttWidget._selectView | def _selectView( self ):
"""
Matches the view selection to the trees selection.
"""
scene = self.uiGanttVIEW.scene()
scene.blockSignals(True)
scene.clearSelection()
for item in self.uiGanttTREE.selectedItems():
item.viewItem().setSelected(True)
scene.blockSignals(False)
curr_item = self.uiGanttTREE.currentItem()
vitem = curr_item.viewItem()
if vitem:
self.uiGanttVIEW.centerOn(vitem) | python | def _selectView( self ):
"""
Matches the view selection to the trees selection.
"""
scene = self.uiGanttVIEW.scene()
scene.blockSignals(True)
scene.clearSelection()
for item in self.uiGanttTREE.selectedItems():
item.viewItem().setSelected(True)
scene.blockSignals(False)
curr_item = self.uiGanttTREE.currentItem()
vitem = curr_item.viewItem()
if vitem:
self.uiGanttVIEW.centerOn(vitem) | [
"def",
"_selectView",
"(",
"self",
")",
":",
"scene",
"=",
"self",
".",
"uiGanttVIEW",
".",
"scene",
"(",
")",
"scene",
".",
"blockSignals",
"(",
"True",
")",
"scene",
".",
"clearSelection",
"(",
")",
"for",
"item",
"in",
"self",
".",
"uiGanttTREE",
".",
"selectedItems",
"(",
")",
":",
"item",
".",
"viewItem",
"(",
")",
".",
"setSelected",
"(",
"True",
")",
"scene",
".",
"blockSignals",
"(",
"False",
")",
"curr_item",
"=",
"self",
".",
"uiGanttTREE",
".",
"currentItem",
"(",
")",
"vitem",
"=",
"curr_item",
".",
"viewItem",
"(",
")",
"if",
"vitem",
":",
"self",
".",
"uiGanttVIEW",
".",
"centerOn",
"(",
"vitem",
")"
] | Matches the view selection to the trees selection. | [
"Matches",
"the",
"view",
"selection",
"to",
"the",
"trees",
"selection",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xganttwidget/xganttwidget.py#L168-L183 | train |
bitesofcode/projexui | projexui/widgets/xganttwidget/xganttwidget.py | XGanttWidget._updateViewRect | def _updateViewRect( self ):
"""
Updates the view rect to match the current tree value.
"""
if not self.updatesEnabled():
return
header_h = self._cellHeight * 2
rect = self.uiGanttVIEW.scene().sceneRect()
sbar_max = self.uiGanttTREE.verticalScrollBar().maximum()
sbar_max += self.uiGanttTREE.viewport().height() + header_h
widget_max = self.uiGanttVIEW.height()
widget_max -= (self.uiGanttVIEW.horizontalScrollBar().height() + 10)
rect.setHeight(max(widget_max, sbar_max))
self.uiGanttVIEW.scene().setSceneRect(rect) | python | def _updateViewRect( self ):
"""
Updates the view rect to match the current tree value.
"""
if not self.updatesEnabled():
return
header_h = self._cellHeight * 2
rect = self.uiGanttVIEW.scene().sceneRect()
sbar_max = self.uiGanttTREE.verticalScrollBar().maximum()
sbar_max += self.uiGanttTREE.viewport().height() + header_h
widget_max = self.uiGanttVIEW.height()
widget_max -= (self.uiGanttVIEW.horizontalScrollBar().height() + 10)
rect.setHeight(max(widget_max, sbar_max))
self.uiGanttVIEW.scene().setSceneRect(rect) | [
"def",
"_updateViewRect",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"updatesEnabled",
"(",
")",
":",
"return",
"header_h",
"=",
"self",
".",
"_cellHeight",
"*",
"2",
"rect",
"=",
"self",
".",
"uiGanttVIEW",
".",
"scene",
"(",
")",
".",
"sceneRect",
"(",
")",
"sbar_max",
"=",
"self",
".",
"uiGanttTREE",
".",
"verticalScrollBar",
"(",
")",
".",
"maximum",
"(",
")",
"sbar_max",
"+=",
"self",
".",
"uiGanttTREE",
".",
"viewport",
"(",
")",
".",
"height",
"(",
")",
"+",
"header_h",
"widget_max",
"=",
"self",
".",
"uiGanttVIEW",
".",
"height",
"(",
")",
"widget_max",
"-=",
"(",
"self",
".",
"uiGanttVIEW",
".",
"horizontalScrollBar",
"(",
")",
".",
"height",
"(",
")",
"+",
"10",
")",
"rect",
".",
"setHeight",
"(",
"max",
"(",
"widget_max",
",",
"sbar_max",
")",
")",
"self",
".",
"uiGanttVIEW",
".",
"scene",
"(",
")",
".",
"setSceneRect",
"(",
"rect",
")"
] | Updates the view rect to match the current tree value. | [
"Updates",
"the",
"view",
"rect",
"to",
"match",
"the",
"current",
"tree",
"value",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xganttwidget/xganttwidget.py#L185-L200 | train |
bitesofcode/projexui | projexui/widgets/xganttwidget/xganttwidget.py | XGanttWidget.syncView | def syncView(self):
"""
Syncs all the items to the view.
"""
if not self.updatesEnabled():
return
for item in self.topLevelItems():
try:
item.syncView(recursive=True)
except AttributeError:
continue | python | def syncView(self):
"""
Syncs all the items to the view.
"""
if not self.updatesEnabled():
return
for item in self.topLevelItems():
try:
item.syncView(recursive=True)
except AttributeError:
continue | [
"def",
"syncView",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"updatesEnabled",
"(",
")",
":",
"return",
"for",
"item",
"in",
"self",
".",
"topLevelItems",
"(",
")",
":",
"try",
":",
"item",
".",
"syncView",
"(",
"recursive",
"=",
"True",
")",
"except",
"AttributeError",
":",
"continue"
] | Syncs all the items to the view. | [
"Syncs",
"all",
"the",
"items",
"to",
"the",
"view",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xganttwidget/xganttwidget.py#L627-L638 | train |
tueda/python-form | form/datapath.py | get_data_path | def get_data_path(package, resource):
# type: (str, str) -> str
"""Return the full file path of a resource of a package."""
loader = pkgutil.get_loader(package)
if loader is None or not hasattr(loader, 'get_data'):
raise PackageResourceError("Failed to load package: '{0}'".format(
package))
mod = sys.modules.get(package) or loader.load_module(package)
if mod is None or not hasattr(mod, '__file__'):
raise PackageResourceError("Failed to load module: '{0}'".format(
package))
parts = resource.split('/')
parts.insert(0, os.path.dirname(mod.__file__))
resource_name = os.path.join(*parts)
return resource_name | python | def get_data_path(package, resource):
# type: (str, str) -> str
"""Return the full file path of a resource of a package."""
loader = pkgutil.get_loader(package)
if loader is None or not hasattr(loader, 'get_data'):
raise PackageResourceError("Failed to load package: '{0}'".format(
package))
mod = sys.modules.get(package) or loader.load_module(package)
if mod is None or not hasattr(mod, '__file__'):
raise PackageResourceError("Failed to load module: '{0}'".format(
package))
parts = resource.split('/')
parts.insert(0, os.path.dirname(mod.__file__))
resource_name = os.path.join(*parts)
return resource_name | [
"def",
"get_data_path",
"(",
"package",
",",
"resource",
")",
":",
"# type: (str, str) -> str",
"loader",
"=",
"pkgutil",
".",
"get_loader",
"(",
"package",
")",
"if",
"loader",
"is",
"None",
"or",
"not",
"hasattr",
"(",
"loader",
",",
"'get_data'",
")",
":",
"raise",
"PackageResourceError",
"(",
"\"Failed to load package: '{0}'\"",
".",
"format",
"(",
"package",
")",
")",
"mod",
"=",
"sys",
".",
"modules",
".",
"get",
"(",
"package",
")",
"or",
"loader",
".",
"load_module",
"(",
"package",
")",
"if",
"mod",
"is",
"None",
"or",
"not",
"hasattr",
"(",
"mod",
",",
"'__file__'",
")",
":",
"raise",
"PackageResourceError",
"(",
"\"Failed to load module: '{0}'\"",
".",
"format",
"(",
"package",
")",
")",
"parts",
"=",
"resource",
".",
"split",
"(",
"'/'",
")",
"parts",
".",
"insert",
"(",
"0",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"mod",
".",
"__file__",
")",
")",
"resource_name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"*",
"parts",
")",
"return",
"resource_name"
] | Return the full file path of a resource of a package. | [
"Return",
"the",
"full",
"file",
"path",
"of",
"a",
"resource",
"of",
"a",
"package",
"."
] | 1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b | https://github.com/tueda/python-form/blob/1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b/form/datapath.py#L11-L25 | train |
bitesofcode/projexui | projexui/widgets/xloggerwidget/xloggerwidget.py | XLoggerWidget.scrollToEnd | def scrollToEnd(self):
"""
Scrolls to the end for this console edit.
"""
vsbar = self.verticalScrollBar()
vsbar.setValue(vsbar.maximum())
hbar = self.horizontalScrollBar()
hbar.setValue(0) | python | def scrollToEnd(self):
"""
Scrolls to the end for this console edit.
"""
vsbar = self.verticalScrollBar()
vsbar.setValue(vsbar.maximum())
hbar = self.horizontalScrollBar()
hbar.setValue(0) | [
"def",
"scrollToEnd",
"(",
"self",
")",
":",
"vsbar",
"=",
"self",
".",
"verticalScrollBar",
"(",
")",
"vsbar",
".",
"setValue",
"(",
"vsbar",
".",
"maximum",
"(",
")",
")",
"hbar",
"=",
"self",
".",
"horizontalScrollBar",
"(",
")",
"hbar",
".",
"setValue",
"(",
"0",
")"
] | Scrolls to the end for this console edit. | [
"Scrolls",
"to",
"the",
"end",
"for",
"this",
"console",
"edit",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xloggerwidget/xloggerwidget.py#L423-L431 | train |
bitesofcode/projexui | projexui/widgets/xorbgridedit/xorbgridedit.py | XOrbGridEdit.assignQuery | def assignQuery(self):
"""
Assigns the query from the query widget to the edit.
"""
self.uiRecordTREE.setQuery(self._queryWidget.query(), autoRefresh=True) | python | def assignQuery(self):
"""
Assigns the query from the query widget to the edit.
"""
self.uiRecordTREE.setQuery(self._queryWidget.query(), autoRefresh=True) | [
"def",
"assignQuery",
"(",
"self",
")",
":",
"self",
".",
"uiRecordTREE",
".",
"setQuery",
"(",
"self",
".",
"_queryWidget",
".",
"query",
"(",
")",
",",
"autoRefresh",
"=",
"True",
")"
] | Assigns the query from the query widget to the edit. | [
"Assigns",
"the",
"query",
"from",
"the",
"query",
"widget",
"to",
"the",
"edit",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbgridedit/xorbgridedit.py#L115-L119 | train |
bitesofcode/projexui | projexui/widgets/xorbgridedit/xorbgridedit.py | XOrbGridEdit.refresh | def refresh(self):
"""
Commits changes stored in the interface to the database.
"""
table = self.tableType()
if table:
table.markTableCacheExpired()
self.uiRecordTREE.searchRecords(self.uiSearchTXT.text()) | python | def refresh(self):
"""
Commits changes stored in the interface to the database.
"""
table = self.tableType()
if table:
table.markTableCacheExpired()
self.uiRecordTREE.searchRecords(self.uiSearchTXT.text()) | [
"def",
"refresh",
"(",
"self",
")",
":",
"table",
"=",
"self",
".",
"tableType",
"(",
")",
"if",
"table",
":",
"table",
".",
"markTableCacheExpired",
"(",
")",
"self",
".",
"uiRecordTREE",
".",
"searchRecords",
"(",
"self",
".",
"uiSearchTXT",
".",
"text",
"(",
")",
")"
] | Commits changes stored in the interface to the database. | [
"Commits",
"changes",
"stored",
"in",
"the",
"interface",
"to",
"the",
"database",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbgridedit/xorbgridedit.py#L186-L194 | train |
evansd/django-envsettings | envsettings/base.py | URLSettingsBase.parse | def parse(self, url):
"""
Return a configuration dict from a URL
"""
parsed_url = urlparse.urlparse(url)
try:
default_config = self.CONFIG[parsed_url.scheme]
except KeyError:
raise ValueError(
'unrecognised URL scheme for {}: {}'.format(
self.__class__.__name__, url))
handler = self.get_handler_for_scheme(parsed_url.scheme)
config = copy.deepcopy(default_config)
return handler(parsed_url, config) | python | def parse(self, url):
"""
Return a configuration dict from a URL
"""
parsed_url = urlparse.urlparse(url)
try:
default_config = self.CONFIG[parsed_url.scheme]
except KeyError:
raise ValueError(
'unrecognised URL scheme for {}: {}'.format(
self.__class__.__name__, url))
handler = self.get_handler_for_scheme(parsed_url.scheme)
config = copy.deepcopy(default_config)
return handler(parsed_url, config) | [
"def",
"parse",
"(",
"self",
",",
"url",
")",
":",
"parsed_url",
"=",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
"try",
":",
"default_config",
"=",
"self",
".",
"CONFIG",
"[",
"parsed_url",
".",
"scheme",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"'unrecognised URL scheme for {}: {}'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"url",
")",
")",
"handler",
"=",
"self",
".",
"get_handler_for_scheme",
"(",
"parsed_url",
".",
"scheme",
")",
"config",
"=",
"copy",
".",
"deepcopy",
"(",
"default_config",
")",
"return",
"handler",
"(",
"parsed_url",
",",
"config",
")"
] | Return a configuration dict from a URL | [
"Return",
"a",
"configuration",
"dict",
"from",
"a",
"URL"
] | 541932af261d5369f211f836a238dc020ee316e8 | https://github.com/evansd/django-envsettings/blob/541932af261d5369f211f836a238dc020ee316e8/envsettings/base.py#L91-L104 | train |
evansd/django-envsettings | envsettings/base.py | URLSettingsBase.get_auto_config | def get_auto_config(self):
"""
Walk over all available auto_config methods, passing them the current
environment and seeing if they return a configuration URL
"""
methods = [m for m in dir(self) if m.startswith('auto_config_')]
for method_name in sorted(methods):
auto_config_method = getattr(self, method_name)
url = auto_config_method(self.env)
if url:
return url | python | def get_auto_config(self):
"""
Walk over all available auto_config methods, passing them the current
environment and seeing if they return a configuration URL
"""
methods = [m for m in dir(self) if m.startswith('auto_config_')]
for method_name in sorted(methods):
auto_config_method = getattr(self, method_name)
url = auto_config_method(self.env)
if url:
return url | [
"def",
"get_auto_config",
"(",
"self",
")",
":",
"methods",
"=",
"[",
"m",
"for",
"m",
"in",
"dir",
"(",
"self",
")",
"if",
"m",
".",
"startswith",
"(",
"'auto_config_'",
")",
"]",
"for",
"method_name",
"in",
"sorted",
"(",
"methods",
")",
":",
"auto_config_method",
"=",
"getattr",
"(",
"self",
",",
"method_name",
")",
"url",
"=",
"auto_config_method",
"(",
"self",
".",
"env",
")",
"if",
"url",
":",
"return",
"url"
] | Walk over all available auto_config methods, passing them the current
environment and seeing if they return a configuration URL | [
"Walk",
"over",
"all",
"available",
"auto_config",
"methods",
"passing",
"them",
"the",
"current",
"environment",
"and",
"seeing",
"if",
"they",
"return",
"a",
"configuration",
"URL"
] | 541932af261d5369f211f836a238dc020ee316e8 | https://github.com/evansd/django-envsettings/blob/541932af261d5369f211f836a238dc020ee316e8/envsettings/base.py#L114-L124 | train |
intelsdi-x/snap-plugin-lib-py | snap_plugin/v1/processor_proxy.py | _ProcessorProxy.Process | def Process(self, request, context):
"""Dispatches the request to the plugins process method"""
LOG.debug("Process called")
try:
metrics = self.plugin.process(
[Metric(pb=m) for m in request.Metrics],
ConfigMap(pb=request.Config)
)
return MetricsReply(metrics=[m.pb for m in metrics])
except Exception as err:
msg = "message: {}\n\nstack trace: {}".format(
err, traceback.format_exc())
return MetricsReply(metrics=[], error=msg) | python | def Process(self, request, context):
"""Dispatches the request to the plugins process method"""
LOG.debug("Process called")
try:
metrics = self.plugin.process(
[Metric(pb=m) for m in request.Metrics],
ConfigMap(pb=request.Config)
)
return MetricsReply(metrics=[m.pb for m in metrics])
except Exception as err:
msg = "message: {}\n\nstack trace: {}".format(
err, traceback.format_exc())
return MetricsReply(metrics=[], error=msg) | [
"def",
"Process",
"(",
"self",
",",
"request",
",",
"context",
")",
":",
"LOG",
".",
"debug",
"(",
"\"Process called\"",
")",
"try",
":",
"metrics",
"=",
"self",
".",
"plugin",
".",
"process",
"(",
"[",
"Metric",
"(",
"pb",
"=",
"m",
")",
"for",
"m",
"in",
"request",
".",
"Metrics",
"]",
",",
"ConfigMap",
"(",
"pb",
"=",
"request",
".",
"Config",
")",
")",
"return",
"MetricsReply",
"(",
"metrics",
"=",
"[",
"m",
".",
"pb",
"for",
"m",
"in",
"metrics",
"]",
")",
"except",
"Exception",
"as",
"err",
":",
"msg",
"=",
"\"message: {}\\n\\nstack trace: {}\"",
".",
"format",
"(",
"err",
",",
"traceback",
".",
"format_exc",
"(",
")",
")",
"return",
"MetricsReply",
"(",
"metrics",
"=",
"[",
"]",
",",
"error",
"=",
"msg",
")"
] | Dispatches the request to the plugins process method | [
"Dispatches",
"the",
"request",
"to",
"the",
"plugins",
"process",
"method"
] | 8da5d00ac5f9d2b48a7239563ac7788209891ca4 | https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/processor_proxy.py#L36-L48 | train |
johnnoone/aioconsul | aioconsul/client/members_endpoint.py | MembersEndpoint.join | async def join(self, address, *, wan=None):
"""Triggers the local agent to join a node
Parameters:
address (str): Address of node
wan (bool): Attempt to join using the WAN pool
Returns:
bool: ``True`` on success
This endpoint is used to instruct the agent to attempt to connect to
a given address. For agents running in server mode, providing ``wan``
parameter causes the agent to attempt to join using the WAN pool.
"""
response = await self._api.get("/v1/agent/join", address,
params={"wan": wan})
return response.status == 200 | python | async def join(self, address, *, wan=None):
"""Triggers the local agent to join a node
Parameters:
address (str): Address of node
wan (bool): Attempt to join using the WAN pool
Returns:
bool: ``True`` on success
This endpoint is used to instruct the agent to attempt to connect to
a given address. For agents running in server mode, providing ``wan``
parameter causes the agent to attempt to join using the WAN pool.
"""
response = await self._api.get("/v1/agent/join", address,
params={"wan": wan})
return response.status == 200 | [
"async",
"def",
"join",
"(",
"self",
",",
"address",
",",
"*",
",",
"wan",
"=",
"None",
")",
":",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/agent/join\"",
",",
"address",
",",
"params",
"=",
"{",
"\"wan\"",
":",
"wan",
"}",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Triggers the local agent to join a node
Parameters:
address (str): Address of node
wan (bool): Attempt to join using the WAN pool
Returns:
bool: ``True`` on success
This endpoint is used to instruct the agent to attempt to connect to
a given address. For agents running in server mode, providing ``wan``
parameter causes the agent to attempt to join using the WAN pool. | [
"Triggers",
"the",
"local",
"agent",
"to",
"join",
"a",
"node"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/members_endpoint.py#L44-L59 | train |
johnnoone/aioconsul | aioconsul/client/members_endpoint.py | MembersEndpoint.force_leave | async def force_leave(self, node):
"""Forces removal of a node
Parameters:
node (ObjectID): Node name
Returns:
bool: ``True`` on success
This endpoint is used to instruct the agent to force a node into the
``left`` state. If a node fails unexpectedly, then it will be in a
``failed`` state. Once in the ``failed`` state, Consul will attempt to
reconnect, and the services and checks belonging to that node will not
be cleaned up. Forcing a node into the ``left`` state allows its old
entries to be removed.
"""
node_id = extract_attr(node, keys=["Node", "ID"])
response = await self._get("/v1/agent/force-leave", node_id)
return response.status == 200 | python | async def force_leave(self, node):
"""Forces removal of a node
Parameters:
node (ObjectID): Node name
Returns:
bool: ``True`` on success
This endpoint is used to instruct the agent to force a node into the
``left`` state. If a node fails unexpectedly, then it will be in a
``failed`` state. Once in the ``failed`` state, Consul will attempt to
reconnect, and the services and checks belonging to that node will not
be cleaned up. Forcing a node into the ``left`` state allows its old
entries to be removed.
"""
node_id = extract_attr(node, keys=["Node", "ID"])
response = await self._get("/v1/agent/force-leave", node_id)
return response.status == 200 | [
"async",
"def",
"force_leave",
"(",
"self",
",",
"node",
")",
":",
"node_id",
"=",
"extract_attr",
"(",
"node",
",",
"keys",
"=",
"[",
"\"Node\"",
",",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_get",
"(",
"\"/v1/agent/force-leave\"",
",",
"node_id",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Forces removal of a node
Parameters:
node (ObjectID): Node name
Returns:
bool: ``True`` on success
This endpoint is used to instruct the agent to force a node into the
``left`` state. If a node fails unexpectedly, then it will be in a
``failed`` state. Once in the ``failed`` state, Consul will attempt to
reconnect, and the services and checks belonging to that node will not
be cleaned up. Forcing a node into the ``left`` state allows its old
entries to be removed. | [
"Forces",
"removal",
"of",
"a",
"node"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/members_endpoint.py#L61-L78 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewpanelmenu.py | XViewBaseMenu.gotoNext | def gotoNext(self):
"""
Goes to the next panel tab.
"""
index = self._currentPanel.currentIndex() + 1
if ( self._currentPanel.count() == index ):
index = 0
self._currentPanel.setCurrentIndex(index) | python | def gotoNext(self):
"""
Goes to the next panel tab.
"""
index = self._currentPanel.currentIndex() + 1
if ( self._currentPanel.count() == index ):
index = 0
self._currentPanel.setCurrentIndex(index) | [
"def",
"gotoNext",
"(",
"self",
")",
":",
"index",
"=",
"self",
".",
"_currentPanel",
".",
"currentIndex",
"(",
")",
"+",
"1",
"if",
"(",
"self",
".",
"_currentPanel",
".",
"count",
"(",
")",
"==",
"index",
")",
":",
"index",
"=",
"0",
"self",
".",
"_currentPanel",
".",
"setCurrentIndex",
"(",
"index",
")"
] | Goes to the next panel tab. | [
"Goes",
"to",
"the",
"next",
"panel",
"tab",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewpanelmenu.py#L136-L144 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewpanelmenu.py | XViewBaseMenu.gotoPrevious | def gotoPrevious(self):
"""
Goes to the previous panel tab.
"""
index = self._currentPanel.currentIndex() - 1
if index < 0:
index = self._currentPanel.count() - 1
self._currentPanel.setCurrentIndex(index) | python | def gotoPrevious(self):
"""
Goes to the previous panel tab.
"""
index = self._currentPanel.currentIndex() - 1
if index < 0:
index = self._currentPanel.count() - 1
self._currentPanel.setCurrentIndex(index) | [
"def",
"gotoPrevious",
"(",
"self",
")",
":",
"index",
"=",
"self",
".",
"_currentPanel",
".",
"currentIndex",
"(",
")",
"-",
"1",
"if",
"index",
"<",
"0",
":",
"index",
"=",
"self",
".",
"_currentPanel",
".",
"count",
"(",
")",
"-",
"1",
"self",
".",
"_currentPanel",
".",
"setCurrentIndex",
"(",
"index",
")"
] | Goes to the previous panel tab. | [
"Goes",
"to",
"the",
"previous",
"panel",
"tab",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewpanelmenu.py#L146-L154 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewpanelmenu.py | XViewBaseMenu.newPanelTab | def newPanelTab(self):
"""
Creates a new panel with a copy of the current widget.
"""
view = self._currentPanel.currentView()
# duplicate the current view
if view:
new_view = view.duplicate(self._currentPanel)
self._currentPanel.addTab(new_view, new_view.windowTitle()) | python | def newPanelTab(self):
"""
Creates a new panel with a copy of the current widget.
"""
view = self._currentPanel.currentView()
# duplicate the current view
if view:
new_view = view.duplicate(self._currentPanel)
self._currentPanel.addTab(new_view, new_view.windowTitle()) | [
"def",
"newPanelTab",
"(",
"self",
")",
":",
"view",
"=",
"self",
".",
"_currentPanel",
".",
"currentView",
"(",
")",
"# duplicate the current view",
"if",
"view",
":",
"new_view",
"=",
"view",
".",
"duplicate",
"(",
"self",
".",
"_currentPanel",
")",
"self",
".",
"_currentPanel",
".",
"addTab",
"(",
"new_view",
",",
"new_view",
".",
"windowTitle",
"(",
")",
")"
] | Creates a new panel with a copy of the current widget. | [
"Creates",
"a",
"new",
"panel",
"with",
"a",
"copy",
"of",
"the",
"current",
"widget",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewpanelmenu.py#L156-L165 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewpanelmenu.py | XViewBaseMenu.renamePanel | def renamePanel(self):
"""
Prompts the user for a custom name for the current panel tab.
"""
index = self._currentPanel.currentIndex()
title = self._currentPanel.tabText(index)
new_title, accepted = QInputDialog.getText( self,
'Rename Tab',
'Name:',
QLineEdit.Normal,
title )
if accepted:
widget = self._currentPanel.currentView()
widget.setWindowTitle(new_title)
self._currentPanel.setTabText(index, new_title) | python | def renamePanel(self):
"""
Prompts the user for a custom name for the current panel tab.
"""
index = self._currentPanel.currentIndex()
title = self._currentPanel.tabText(index)
new_title, accepted = QInputDialog.getText( self,
'Rename Tab',
'Name:',
QLineEdit.Normal,
title )
if accepted:
widget = self._currentPanel.currentView()
widget.setWindowTitle(new_title)
self._currentPanel.setTabText(index, new_title) | [
"def",
"renamePanel",
"(",
"self",
")",
":",
"index",
"=",
"self",
".",
"_currentPanel",
".",
"currentIndex",
"(",
")",
"title",
"=",
"self",
".",
"_currentPanel",
".",
"tabText",
"(",
"index",
")",
"new_title",
",",
"accepted",
"=",
"QInputDialog",
".",
"getText",
"(",
"self",
",",
"'Rename Tab'",
",",
"'Name:'",
",",
"QLineEdit",
".",
"Normal",
",",
"title",
")",
"if",
"accepted",
":",
"widget",
"=",
"self",
".",
"_currentPanel",
".",
"currentView",
"(",
")",
"widget",
".",
"setWindowTitle",
"(",
"new_title",
")",
"self",
".",
"_currentPanel",
".",
"setTabText",
"(",
"index",
",",
"new_title",
")"
] | Prompts the user for a custom name for the current panel tab. | [
"Prompts",
"the",
"user",
"for",
"a",
"custom",
"name",
"for",
"the",
"current",
"panel",
"tab",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewpanelmenu.py#L167-L183 | train |
bitesofcode/projexui | projexui/widgets/xenumbox.py | XEnumBox.reload | def reload(self):
"""
Reloads the contents for this box.
"""
enum = self._enum
if not enum:
return
self.clear()
if not self.isRequired():
self.addItem('')
if self.sortByKey():
self.addItems(sorted(enum.keys()))
else:
items = enum.items()
items.sort(key = lambda x: x[1])
self.addItems(map(lambda x: x[0], items)) | python | def reload(self):
"""
Reloads the contents for this box.
"""
enum = self._enum
if not enum:
return
self.clear()
if not self.isRequired():
self.addItem('')
if self.sortByKey():
self.addItems(sorted(enum.keys()))
else:
items = enum.items()
items.sort(key = lambda x: x[1])
self.addItems(map(lambda x: x[0], items)) | [
"def",
"reload",
"(",
"self",
")",
":",
"enum",
"=",
"self",
".",
"_enum",
"if",
"not",
"enum",
":",
"return",
"self",
".",
"clear",
"(",
")",
"if",
"not",
"self",
".",
"isRequired",
"(",
")",
":",
"self",
".",
"addItem",
"(",
"''",
")",
"if",
"self",
".",
"sortByKey",
"(",
")",
":",
"self",
".",
"addItems",
"(",
"sorted",
"(",
"enum",
".",
"keys",
"(",
")",
")",
")",
"else",
":",
"items",
"=",
"enum",
".",
"items",
"(",
")",
"items",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
")",
"self",
".",
"addItems",
"(",
"map",
"(",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
",",
"items",
")",
")"
] | Reloads the contents for this box. | [
"Reloads",
"the",
"contents",
"for",
"this",
"box",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xenumbox.py#L124-L143 | train |
bitesofcode/projexui | projexui/widgets/ximageslider/ximageslider.py | XImageSlider.recalculate | def recalculate(self):
"""
Recalcualtes the slider scene for this widget.
"""
# recalculate the scene geometry
scene = self.scene()
w = self.calculateSceneWidth()
scene.setSceneRect(0, 0, w, self.height())
# recalculate the item layout
spacing = self.spacing()
x = self.width() / 4.0
y = self.height() / 2.0
for item in self.items():
pmap = item.pixmap()
item.setPos(x, y - pmap.height() / 1.5)
x += pmap.size().width() + spacing | python | def recalculate(self):
"""
Recalcualtes the slider scene for this widget.
"""
# recalculate the scene geometry
scene = self.scene()
w = self.calculateSceneWidth()
scene.setSceneRect(0, 0, w, self.height())
# recalculate the item layout
spacing = self.spacing()
x = self.width() / 4.0
y = self.height() / 2.0
for item in self.items():
pmap = item.pixmap()
item.setPos(x, y - pmap.height() / 1.5)
x += pmap.size().width() + spacing | [
"def",
"recalculate",
"(",
"self",
")",
":",
"# recalculate the scene geometry\r",
"scene",
"=",
"self",
".",
"scene",
"(",
")",
"w",
"=",
"self",
".",
"calculateSceneWidth",
"(",
")",
"scene",
".",
"setSceneRect",
"(",
"0",
",",
"0",
",",
"w",
",",
"self",
".",
"height",
"(",
")",
")",
"# recalculate the item layout\r",
"spacing",
"=",
"self",
".",
"spacing",
"(",
")",
"x",
"=",
"self",
".",
"width",
"(",
")",
"/",
"4.0",
"y",
"=",
"self",
".",
"height",
"(",
")",
"/",
"2.0",
"for",
"item",
"in",
"self",
".",
"items",
"(",
")",
":",
"pmap",
"=",
"item",
".",
"pixmap",
"(",
")",
"item",
".",
"setPos",
"(",
"x",
",",
"y",
"-",
"pmap",
".",
"height",
"(",
")",
"/",
"1.5",
")",
"x",
"+=",
"pmap",
".",
"size",
"(",
")",
".",
"width",
"(",
")",
"+",
"spacing"
] | Recalcualtes the slider scene for this widget. | [
"Recalcualtes",
"the",
"slider",
"scene",
"for",
"this",
"widget",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/ximageslider/ximageslider.py#L106-L122 | train |
ReneNulschDE/mercedesmejsonpy | mercedesmejsonpy/controller.py | Controller._check_token | def _check_token(self):
""" Simple Mercedes me API.
"""
need_token = (self._token_info is None or
self.auth_handler.is_token_expired(self._token_info))
if need_token:
new_token = \
self.auth_handler.refresh_access_token(
self._token_info['refresh_token'])
# skip when refresh failed
if new_token is None:
return
self._token_info = new_token
self._auth_header = {"content-type": "application/json",
"Authorization": "Bearer {}".format(
self._token_info.get('access_token'))} | python | def _check_token(self):
""" Simple Mercedes me API.
"""
need_token = (self._token_info is None or
self.auth_handler.is_token_expired(self._token_info))
if need_token:
new_token = \
self.auth_handler.refresh_access_token(
self._token_info['refresh_token'])
# skip when refresh failed
if new_token is None:
return
self._token_info = new_token
self._auth_header = {"content-type": "application/json",
"Authorization": "Bearer {}".format(
self._token_info.get('access_token'))} | [
"def",
"_check_token",
"(",
"self",
")",
":",
"need_token",
"=",
"(",
"self",
".",
"_token_info",
"is",
"None",
"or",
"self",
".",
"auth_handler",
".",
"is_token_expired",
"(",
"self",
".",
"_token_info",
")",
")",
"if",
"need_token",
":",
"new_token",
"=",
"self",
".",
"auth_handler",
".",
"refresh_access_token",
"(",
"self",
".",
"_token_info",
"[",
"'refresh_token'",
"]",
")",
"# skip when refresh failed",
"if",
"new_token",
"is",
"None",
":",
"return",
"self",
".",
"_token_info",
"=",
"new_token",
"self",
".",
"_auth_header",
"=",
"{",
"\"content-type\"",
":",
"\"application/json\"",
",",
"\"Authorization\"",
":",
"\"Bearer {}\"",
".",
"format",
"(",
"self",
".",
"_token_info",
".",
"get",
"(",
"'access_token'",
")",
")",
"}"
] | Simple Mercedes me API. | [
"Simple",
"Mercedes",
"me",
"API",
"."
] | 0618a0b49d6bb46599d11a8f66dc8d08d112ceec | https://github.com/ReneNulschDE/mercedesmejsonpy/blob/0618a0b49d6bb46599d11a8f66dc8d08d112ceec/mercedesmejsonpy/controller.py#L159-L176 | train |
ReneNulschDE/mercedesmejsonpy | mercedesmejsonpy/controller.py | Controller.get_location | def get_location(self, car_id):
""" get refreshed location information.
"""
_LOGGER.debug("get_location for %s called", car_id)
api_result = self._retrieve_api_result(car_id, API_LOCATION)
_LOGGER.debug("get_location result: %s", api_result)
location = Location()
for loc_option in LOCATION_OPTIONS:
curr_loc_option = api_result.get(loc_option)
value = CarAttribute(
curr_loc_option.get("value"),
curr_loc_option.get("retrievalstatus"),
curr_loc_option.get("timestamp"))
setattr(location, loc_option, value)
return location | python | def get_location(self, car_id):
""" get refreshed location information.
"""
_LOGGER.debug("get_location for %s called", car_id)
api_result = self._retrieve_api_result(car_id, API_LOCATION)
_LOGGER.debug("get_location result: %s", api_result)
location = Location()
for loc_option in LOCATION_OPTIONS:
curr_loc_option = api_result.get(loc_option)
value = CarAttribute(
curr_loc_option.get("value"),
curr_loc_option.get("retrievalstatus"),
curr_loc_option.get("timestamp"))
setattr(location, loc_option, value)
return location | [
"def",
"get_location",
"(",
"self",
",",
"car_id",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"get_location for %s called\"",
",",
"car_id",
")",
"api_result",
"=",
"self",
".",
"_retrieve_api_result",
"(",
"car_id",
",",
"API_LOCATION",
")",
"_LOGGER",
".",
"debug",
"(",
"\"get_location result: %s\"",
",",
"api_result",
")",
"location",
"=",
"Location",
"(",
")",
"for",
"loc_option",
"in",
"LOCATION_OPTIONS",
":",
"curr_loc_option",
"=",
"api_result",
".",
"get",
"(",
"loc_option",
")",
"value",
"=",
"CarAttribute",
"(",
"curr_loc_option",
".",
"get",
"(",
"\"value\"",
")",
",",
"curr_loc_option",
".",
"get",
"(",
"\"retrievalstatus\"",
")",
",",
"curr_loc_option",
".",
"get",
"(",
"\"timestamp\"",
")",
")",
"setattr",
"(",
"location",
",",
"loc_option",
",",
"value",
")",
"return",
"location"
] | get refreshed location information. | [
"get",
"refreshed",
"location",
"information",
"."
] | 0618a0b49d6bb46599d11a8f66dc8d08d112ceec | https://github.com/ReneNulschDE/mercedesmejsonpy/blob/0618a0b49d6bb46599d11a8f66dc8d08d112ceec/mercedesmejsonpy/controller.py#L233-L254 | train |
johnnoone/aioconsul | aioconsul/client/acl_endpoint.py | ACLEndpoint.create | async def create(self, token):
"""Creates a new token with a given policy
Parameters:
token (Object): Token specification
Returns:
Object: token ID
The create endpoint is used to make a new token.
A token has a name, a type, and a set of ACL rules.
The request body may take the form::
{
"Name": "my-app-token",
"Type": "client",
"Rules": ""
}
None of the fields are mandatory. The **Name** and **Rules** fields
default to being blank, and the **Type** defaults to "client".
**Name** is opaque to Consul. To aid human operators, it should
be a meaningful indicator of the ACL's purpose.
**Type** is either **client** or **management**. A management token
is comparable to a root user and has the ability to perform any action
including creating, modifying and deleting ACLs.
**ID** field may be provided, and if omitted a random UUID will be
generated.
The format of **Rules** is
`documented here <https://www.consul.io/docs/internals/acl.html>`_.
A successful response body will return the **ID** of the newly
created ACL, like so::
{
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e"
}
"""
token = encode_token(token)
response = await self._api.put("/v1/acl/create", data=token)
return response.body | python | async def create(self, token):
"""Creates a new token with a given policy
Parameters:
token (Object): Token specification
Returns:
Object: token ID
The create endpoint is used to make a new token.
A token has a name, a type, and a set of ACL rules.
The request body may take the form::
{
"Name": "my-app-token",
"Type": "client",
"Rules": ""
}
None of the fields are mandatory. The **Name** and **Rules** fields
default to being blank, and the **Type** defaults to "client".
**Name** is opaque to Consul. To aid human operators, it should
be a meaningful indicator of the ACL's purpose.
**Type** is either **client** or **management**. A management token
is comparable to a root user and has the ability to perform any action
including creating, modifying and deleting ACLs.
**ID** field may be provided, and if omitted a random UUID will be
generated.
The format of **Rules** is
`documented here <https://www.consul.io/docs/internals/acl.html>`_.
A successful response body will return the **ID** of the newly
created ACL, like so::
{
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e"
}
"""
token = encode_token(token)
response = await self._api.put("/v1/acl/create", data=token)
return response.body | [
"async",
"def",
"create",
"(",
"self",
",",
"token",
")",
":",
"token",
"=",
"encode_token",
"(",
"token",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/acl/create\"",
",",
"data",
"=",
"token",
")",
"return",
"response",
".",
"body"
] | Creates a new token with a given policy
Parameters:
token (Object): Token specification
Returns:
Object: token ID
The create endpoint is used to make a new token.
A token has a name, a type, and a set of ACL rules.
The request body may take the form::
{
"Name": "my-app-token",
"Type": "client",
"Rules": ""
}
None of the fields are mandatory. The **Name** and **Rules** fields
default to being blank, and the **Type** defaults to "client".
**Name** is opaque to Consul. To aid human operators, it should
be a meaningful indicator of the ACL's purpose.
**Type** is either **client** or **management**. A management token
is comparable to a root user and has the ability to perform any action
including creating, modifying and deleting ACLs.
**ID** field may be provided, and if omitted a random UUID will be
generated.
The format of **Rules** is
`documented here <https://www.consul.io/docs/internals/acl.html>`_.
A successful response body will return the **ID** of the newly
created ACL, like so::
{
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e"
} | [
"Creates",
"a",
"new",
"token",
"with",
"a",
"given",
"policy"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/acl_endpoint.py#L15-L59 | train |
johnnoone/aioconsul | aioconsul/client/acl_endpoint.py | ACLEndpoint.destroy | async def destroy(self, token):
"""Destroys a given token.
Parameters:
token (ObjectID): Token ID
Returns:
bool: ``True`` on success
"""
token_id = extract_attr(token, keys=["ID"])
response = await self._api.put("/v1/acl/destroy", token_id)
return response.body | python | async def destroy(self, token):
"""Destroys a given token.
Parameters:
token (ObjectID): Token ID
Returns:
bool: ``True`` on success
"""
token_id = extract_attr(token, keys=["ID"])
response = await self._api.put("/v1/acl/destroy", token_id)
return response.body | [
"async",
"def",
"destroy",
"(",
"self",
",",
"token",
")",
":",
"token_id",
"=",
"extract_attr",
"(",
"token",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/acl/destroy\"",
",",
"token_id",
")",
"return",
"response",
".",
"body"
] | Destroys a given token.
Parameters:
token (ObjectID): Token ID
Returns:
bool: ``True`` on success | [
"Destroys",
"a",
"given",
"token",
"."
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/acl_endpoint.py#L93-L103 | train |
johnnoone/aioconsul | aioconsul/client/acl_endpoint.py | ACLEndpoint.info | async def info(self, token):
"""Queries the policy of a given token.
Parameters:
token (ObjectID): Token ID
Returns:
ObjectMeta: where value is token
Raises:
NotFound:
It returns a body like this::
{
"CreateIndex": 3,
"ModifyIndex": 3,
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "Client Token",
"Type": "client",
"Rules": {
"key": {
"": {
"policy": "read"
},
"private/": {
"policy": "deny"
}
}
}
}
"""
token_id = extract_attr(token, keys=["ID"])
response = await self._api.get("/v1/acl/info", token_id)
meta = extract_meta(response.headers)
try:
result = decode_token(response.body[0])
except IndexError:
raise NotFound(response.body, meta=meta)
return consul(result, meta=meta) | python | async def info(self, token):
"""Queries the policy of a given token.
Parameters:
token (ObjectID): Token ID
Returns:
ObjectMeta: where value is token
Raises:
NotFound:
It returns a body like this::
{
"CreateIndex": 3,
"ModifyIndex": 3,
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "Client Token",
"Type": "client",
"Rules": {
"key": {
"": {
"policy": "read"
},
"private/": {
"policy": "deny"
}
}
}
}
"""
token_id = extract_attr(token, keys=["ID"])
response = await self._api.get("/v1/acl/info", token_id)
meta = extract_meta(response.headers)
try:
result = decode_token(response.body[0])
except IndexError:
raise NotFound(response.body, meta=meta)
return consul(result, meta=meta) | [
"async",
"def",
"info",
"(",
"self",
",",
"token",
")",
":",
"token_id",
"=",
"extract_attr",
"(",
"token",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/acl/info\"",
",",
"token_id",
")",
"meta",
"=",
"extract_meta",
"(",
"response",
".",
"headers",
")",
"try",
":",
"result",
"=",
"decode_token",
"(",
"response",
".",
"body",
"[",
"0",
"]",
")",
"except",
"IndexError",
":",
"raise",
"NotFound",
"(",
"response",
".",
"body",
",",
"meta",
"=",
"meta",
")",
"return",
"consul",
"(",
"result",
",",
"meta",
"=",
"meta",
")"
] | Queries the policy of a given token.
Parameters:
token (ObjectID): Token ID
Returns:
ObjectMeta: where value is token
Raises:
NotFound:
It returns a body like this::
{
"CreateIndex": 3,
"ModifyIndex": 3,
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "Client Token",
"Type": "client",
"Rules": {
"key": {
"": {
"policy": "read"
},
"private/": {
"policy": "deny"
}
}
}
} | [
"Queries",
"the",
"policy",
"of",
"a",
"given",
"token",
"."
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/acl_endpoint.py#L107-L144 | train |
johnnoone/aioconsul | aioconsul/client/acl_endpoint.py | ACLEndpoint.clone | async def clone(self, token):
"""Creates a new token by cloning an existing token
Parameters:
token (ObjectID): Token ID
Returns:
ObjectMeta: where value is token ID
This allows a token to serve as a template for others, making it
simple to generate new tokens without complex rule management.
As with create, a successful response body will return the ID of the
newly created ACL, like so::
{
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e"
}
"""
token_id = extract_attr(token, keys=["ID"])
response = await self._api.put("/v1/acl/clone", token_id)
return consul(response) | python | async def clone(self, token):
"""Creates a new token by cloning an existing token
Parameters:
token (ObjectID): Token ID
Returns:
ObjectMeta: where value is token ID
This allows a token to serve as a template for others, making it
simple to generate new tokens without complex rule management.
As with create, a successful response body will return the ID of the
newly created ACL, like so::
{
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e"
}
"""
token_id = extract_attr(token, keys=["ID"])
response = await self._api.put("/v1/acl/clone", token_id)
return consul(response) | [
"async",
"def",
"clone",
"(",
"self",
",",
"token",
")",
":",
"token_id",
"=",
"extract_attr",
"(",
"token",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/acl/clone\"",
",",
"token_id",
")",
"return",
"consul",
"(",
"response",
")"
] | Creates a new token by cloning an existing token
Parameters:
token (ObjectID): Token ID
Returns:
ObjectMeta: where value is token ID
This allows a token to serve as a template for others, making it
simple to generate new tokens without complex rule management.
As with create, a successful response body will return the ID of the
newly created ACL, like so::
{
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e"
} | [
"Creates",
"a",
"new",
"token",
"by",
"cloning",
"an",
"existing",
"token"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/acl_endpoint.py#L146-L166 | train |
johnnoone/aioconsul | aioconsul/client/acl_endpoint.py | ACLEndpoint.items | async def items(self):
"""Lists all the active tokens
Returns:
ObjectMeta: where value is a list of tokens
It returns a body like this::
[
{
"CreateIndex": 3,
"ModifyIndex": 3,
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "Client Token",
"Type": "client",
"Rules": {
"key": {
"": { "policy": "read" },
"private/": { "policy": "deny" }
}
}
}
]
"""
response = await self._api.get("/v1/acl/list")
results = [decode_token(r) for r in response.body]
return consul(results, meta=extract_meta(response.headers)) | python | async def items(self):
"""Lists all the active tokens
Returns:
ObjectMeta: where value is a list of tokens
It returns a body like this::
[
{
"CreateIndex": 3,
"ModifyIndex": 3,
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "Client Token",
"Type": "client",
"Rules": {
"key": {
"": { "policy": "read" },
"private/": { "policy": "deny" }
}
}
}
]
"""
response = await self._api.get("/v1/acl/list")
results = [decode_token(r) for r in response.body]
return consul(results, meta=extract_meta(response.headers)) | [
"async",
"def",
"items",
"(",
"self",
")",
":",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/acl/list\"",
")",
"results",
"=",
"[",
"decode_token",
"(",
"r",
")",
"for",
"r",
"in",
"response",
".",
"body",
"]",
"return",
"consul",
"(",
"results",
",",
"meta",
"=",
"extract_meta",
"(",
"response",
".",
"headers",
")",
")"
] | Lists all the active tokens
Returns:
ObjectMeta: where value is a list of tokens
It returns a body like this::
[
{
"CreateIndex": 3,
"ModifyIndex": 3,
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "Client Token",
"Type": "client",
"Rules": {
"key": {
"": { "policy": "read" },
"private/": { "policy": "deny" }
}
}
}
] | [
"Lists",
"all",
"the",
"active",
"tokens"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/acl_endpoint.py#L168-L194 | train |
johnnoone/aioconsul | aioconsul/client/acl_endpoint.py | ACLEndpoint.replication | async def replication(self, *, dc=None):
"""Checks status of ACL replication
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: Replication information
Returns the status of the ACL replication process in the datacenter.
This is intended to be used by operators, or by automation checking
the health of ACL replication.
By default, the datacenter of the agent is queried; however, the dc
can be provided using the "dc" parameter.
It returns a body like this::
{
"Enabled": True,
"Running": True,
"SourceDatacenter": "dc1",
"ReplicatedIndex": 1976,
"LastSuccess": datetime(2016, 8, 5, 6, 28, 58, tzinfo=tzutc()),
"LastError": datetime(2016, 8, 5, 6, 28, 58, tzinfo=tzutc())
}
**Enabled** reports whether ACL replication is enabled for the
datacenter.
**Running** reports whether the ACL replication process is running.
The process may take approximately 60 seconds to begin running after
a leader election occurs.
**SourceDatacenter** is the authoritative ACL datacenter that ACLs
are being replicated from, and will match the acl_datacenter
configuration.
**ReplicatedIndex** is the last index that was successfully replicated.
You can compare this to the Index meta returned by the items() endpoint
to determine if the replication process has gotten all available ACLs.
Note that replication runs as a background process approximately every
30 seconds, and that local updates are rate limited to 100
updates/second, so so it may take several minutes to perform the
initial sync of a large set of ACLs. After the initial sync, replica
lag should be on the order of about 30 seconds.
**LastSuccess** is the UTC time of the last successful sync operation.
Note that since ACL replication is done with a blocking query, this
may not update for up to 5 minutes if there have been no ACL changes
to replicate. A zero value of "0001-01-01T00:00:00Z" will be present
if no sync has been successful.
**LastError** is the UTC time of the last error encountered during a
sync operation. If this time is later than LastSuccess, you can assume
the replication process is not in a good state. A zero value of
"0001-01-01T00:00:00Z" will be present if no sync has resulted in an
error.
"""
params = {"dc": dc}
response = await self._api.get("/v1/acl/replication", params=params)
return response.body | python | async def replication(self, *, dc=None):
"""Checks status of ACL replication
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: Replication information
Returns the status of the ACL replication process in the datacenter.
This is intended to be used by operators, or by automation checking
the health of ACL replication.
By default, the datacenter of the agent is queried; however, the dc
can be provided using the "dc" parameter.
It returns a body like this::
{
"Enabled": True,
"Running": True,
"SourceDatacenter": "dc1",
"ReplicatedIndex": 1976,
"LastSuccess": datetime(2016, 8, 5, 6, 28, 58, tzinfo=tzutc()),
"LastError": datetime(2016, 8, 5, 6, 28, 58, tzinfo=tzutc())
}
**Enabled** reports whether ACL replication is enabled for the
datacenter.
**Running** reports whether the ACL replication process is running.
The process may take approximately 60 seconds to begin running after
a leader election occurs.
**SourceDatacenter** is the authoritative ACL datacenter that ACLs
are being replicated from, and will match the acl_datacenter
configuration.
**ReplicatedIndex** is the last index that was successfully replicated.
You can compare this to the Index meta returned by the items() endpoint
to determine if the replication process has gotten all available ACLs.
Note that replication runs as a background process approximately every
30 seconds, and that local updates are rate limited to 100
updates/second, so so it may take several minutes to perform the
initial sync of a large set of ACLs. After the initial sync, replica
lag should be on the order of about 30 seconds.
**LastSuccess** is the UTC time of the last successful sync operation.
Note that since ACL replication is done with a blocking query, this
may not update for up to 5 minutes if there have been no ACL changes
to replicate. A zero value of "0001-01-01T00:00:00Z" will be present
if no sync has been successful.
**LastError** is the UTC time of the last error encountered during a
sync operation. If this time is later than LastSuccess, you can assume
the replication process is not in a good state. A zero value of
"0001-01-01T00:00:00Z" will be present if no sync has resulted in an
error.
"""
params = {"dc": dc}
response = await self._api.get("/v1/acl/replication", params=params)
return response.body | [
"async",
"def",
"replication",
"(",
"self",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/acl/replication\"",
",",
"params",
"=",
"params",
")",
"return",
"response",
".",
"body"
] | Checks status of ACL replication
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: Replication information
Returns the status of the ACL replication process in the datacenter.
This is intended to be used by operators, or by automation checking
the health of ACL replication.
By default, the datacenter of the agent is queried; however, the dc
can be provided using the "dc" parameter.
It returns a body like this::
{
"Enabled": True,
"Running": True,
"SourceDatacenter": "dc1",
"ReplicatedIndex": 1976,
"LastSuccess": datetime(2016, 8, 5, 6, 28, 58, tzinfo=tzutc()),
"LastError": datetime(2016, 8, 5, 6, 28, 58, tzinfo=tzutc())
}
**Enabled** reports whether ACL replication is enabled for the
datacenter.
**Running** reports whether the ACL replication process is running.
The process may take approximately 60 seconds to begin running after
a leader election occurs.
**SourceDatacenter** is the authoritative ACL datacenter that ACLs
are being replicated from, and will match the acl_datacenter
configuration.
**ReplicatedIndex** is the last index that was successfully replicated.
You can compare this to the Index meta returned by the items() endpoint
to determine if the replication process has gotten all available ACLs.
Note that replication runs as a background process approximately every
30 seconds, and that local updates are rate limited to 100
updates/second, so so it may take several minutes to perform the
initial sync of a large set of ACLs. After the initial sync, replica
lag should be on the order of about 30 seconds.
**LastSuccess** is the UTC time of the last successful sync operation.
Note that since ACL replication is done with a blocking query, this
may not update for up to 5 minutes if there have been no ACL changes
to replicate. A zero value of "0001-01-01T00:00:00Z" will be present
if no sync has been successful.
**LastError** is the UTC time of the last error encountered during a
sync operation. If this time is later than LastSuccess, you can assume
the replication process is not in a good state. A zero value of
"0001-01-01T00:00:00Z" will be present if no sync has resulted in an
error. | [
"Checks",
"status",
"of",
"ACL",
"replication"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/acl_endpoint.py#L196-L257 | train |
mikhaildubov/AST-text-analysis | east/asts/utils.py | make_unique_endings | def make_unique_endings(strings_collection):
"""
Make each string in the collection end with a unique character.
Essential for correct builiding of a generalized annotated suffix tree.
Returns the updated strings collection, encoded in Unicode.
max strings_collection ~ 1.100.000
"""
res = []
for i in range(len(strings_collection)):
# NOTE(msdubov): a trick to handle 'narrow' python installation issues.
hex_code = hex(consts.String.UNICODE_SPECIAL_SYMBOLS_START+i)
hex_code = r"\U" + "0" * (8 - len(hex_code) + 2) + hex_code[2:]
res.append(strings_collection[i] + hex_code.decode("unicode-escape"))
return res | python | def make_unique_endings(strings_collection):
"""
Make each string in the collection end with a unique character.
Essential for correct builiding of a generalized annotated suffix tree.
Returns the updated strings collection, encoded in Unicode.
max strings_collection ~ 1.100.000
"""
res = []
for i in range(len(strings_collection)):
# NOTE(msdubov): a trick to handle 'narrow' python installation issues.
hex_code = hex(consts.String.UNICODE_SPECIAL_SYMBOLS_START+i)
hex_code = r"\U" + "0" * (8 - len(hex_code) + 2) + hex_code[2:]
res.append(strings_collection[i] + hex_code.decode("unicode-escape"))
return res | [
"def",
"make_unique_endings",
"(",
"strings_collection",
")",
":",
"res",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"strings_collection",
")",
")",
":",
"# NOTE(msdubov): a trick to handle 'narrow' python installation issues.",
"hex_code",
"=",
"hex",
"(",
"consts",
".",
"String",
".",
"UNICODE_SPECIAL_SYMBOLS_START",
"+",
"i",
")",
"hex_code",
"=",
"r\"\\U\"",
"+",
"\"0\"",
"*",
"(",
"8",
"-",
"len",
"(",
"hex_code",
")",
"+",
"2",
")",
"+",
"hex_code",
"[",
"2",
":",
"]",
"res",
".",
"append",
"(",
"strings_collection",
"[",
"i",
"]",
"+",
"hex_code",
".",
"decode",
"(",
"\"unicode-escape\"",
")",
")",
"return",
"res"
] | Make each string in the collection end with a unique character.
Essential for correct builiding of a generalized annotated suffix tree.
Returns the updated strings collection, encoded in Unicode.
max strings_collection ~ 1.100.000 | [
"Make",
"each",
"string",
"in",
"the",
"collection",
"end",
"with",
"a",
"unique",
"character",
".",
"Essential",
"for",
"correct",
"builiding",
"of",
"a",
"generalized",
"annotated",
"suffix",
"tree",
".",
"Returns",
"the",
"updated",
"strings",
"collection",
"encoded",
"in",
"Unicode",
"."
] | 055ad8d2492c100bbbaa25309ec1074bdf1dfaa5 | https://github.com/mikhaildubov/AST-text-analysis/blob/055ad8d2492c100bbbaa25309ec1074bdf1dfaa5/east/asts/utils.py#L25-L40 | train |
johnnoone/aioconsul | aioconsul/client/coordinate_endpoint.py | CoordinateEndpoint.datacenters | async def datacenters(self):
"""Queries for WAN coordinates of Consul servers
Returns:
Mapping: WAN network coordinates for all Consul
servers, organized by DCs.
It returns a body like this::
{
"dc1": {
"Datacenter": "dc1",
"Coordinates": [
{
"Node": "agent-one",
"Coord": {
"Adjustment": 0,
"Error": 1.5,
"Height": 0,
"Vec": [0,0,0,0,0,0,0,0]
}
}
]
}
}
This endpoint serves data out of the server's local Serf data about
the WAN, so its results may vary as requests are handled by different
servers in the cluster.
Also, it does not support blocking queries or any consistency modes.
"""
response = await self._api.get("/v1/coordinate/datacenters")
return {data["Datacenter"]: data for data in response.body} | python | async def datacenters(self):
"""Queries for WAN coordinates of Consul servers
Returns:
Mapping: WAN network coordinates for all Consul
servers, organized by DCs.
It returns a body like this::
{
"dc1": {
"Datacenter": "dc1",
"Coordinates": [
{
"Node": "agent-one",
"Coord": {
"Adjustment": 0,
"Error": 1.5,
"Height": 0,
"Vec": [0,0,0,0,0,0,0,0]
}
}
]
}
}
This endpoint serves data out of the server's local Serf data about
the WAN, so its results may vary as requests are handled by different
servers in the cluster.
Also, it does not support blocking queries or any consistency modes.
"""
response = await self._api.get("/v1/coordinate/datacenters")
return {data["Datacenter"]: data for data in response.body} | [
"async",
"def",
"datacenters",
"(",
"self",
")",
":",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/coordinate/datacenters\"",
")",
"return",
"{",
"data",
"[",
"\"Datacenter\"",
"]",
":",
"data",
"for",
"data",
"in",
"response",
".",
"body",
"}"
] | Queries for WAN coordinates of Consul servers
Returns:
Mapping: WAN network coordinates for all Consul
servers, organized by DCs.
It returns a body like this::
{
"dc1": {
"Datacenter": "dc1",
"Coordinates": [
{
"Node": "agent-one",
"Coord": {
"Adjustment": 0,
"Error": 1.5,
"Height": 0,
"Vec": [0,0,0,0,0,0,0,0]
}
}
]
}
}
This endpoint serves data out of the server's local Serf data about
the WAN, so its results may vary as requests are handled by different
servers in the cluster.
Also, it does not support blocking queries or any consistency modes. | [
"Queries",
"for",
"WAN",
"coordinates",
"of",
"Consul",
"servers"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/coordinate_endpoint.py#L15-L48 | train |
bitesofcode/projexui | projexui/widgets/xlistwidget.py | XListWidget.resizeToContents | def resizeToContents(self):
"""
Resizes the list widget to fit its contents vertically.
"""
if self.count():
item = self.item(self.count() - 1)
rect = self.visualItemRect(item)
height = rect.bottom() + 8
height = max(28, height)
self.setFixedHeight(height)
else:
self.setFixedHeight(self.minimumHeight()) | python | def resizeToContents(self):
"""
Resizes the list widget to fit its contents vertically.
"""
if self.count():
item = self.item(self.count() - 1)
rect = self.visualItemRect(item)
height = rect.bottom() + 8
height = max(28, height)
self.setFixedHeight(height)
else:
self.setFixedHeight(self.minimumHeight()) | [
"def",
"resizeToContents",
"(",
"self",
")",
":",
"if",
"self",
".",
"count",
"(",
")",
":",
"item",
"=",
"self",
".",
"item",
"(",
"self",
".",
"count",
"(",
")",
"-",
"1",
")",
"rect",
"=",
"self",
".",
"visualItemRect",
"(",
"item",
")",
"height",
"=",
"rect",
".",
"bottom",
"(",
")",
"+",
"8",
"height",
"=",
"max",
"(",
"28",
",",
"height",
")",
"self",
".",
"setFixedHeight",
"(",
"height",
")",
"else",
":",
"self",
".",
"setFixedHeight",
"(",
"self",
".",
"minimumHeight",
"(",
")",
")"
] | Resizes the list widget to fit its contents vertically. | [
"Resizes",
"the",
"list",
"widget",
"to",
"fit",
"its",
"contents",
"vertically",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xlistwidget.py#L535-L546 | train |
talkincode/txradius | txradius/openvpn/daemon.py | parse_status_file | def parse_status_file(status_file,nas_addr):
''' parse openvpn status log
'''
session_users = {}
flag1 = False
flag2 = False
with open(status_file) as stlines:
for line in stlines:
if line.startswith("Common Name"):
flag1 = True
continue
if line.startswith("ROUTING TABLE"):
flag1 = False
continue
if line.startswith("Virtual Address"):
flag2 = True
continue
if line.startswith("GLOBAL STATS"):
flag2 = False
continue
if flag1:
try:
username,realaddr,inbytes,outbytes,_ = line.split(',')
realip,realport = realaddr.split(':')
session_id = md5(nas_addr + realip + realport).hexdigest()
session_users.setdefault(session_id, {}).update(dict(
session_id=session_id,
username=username,
realip=realip,
realport=realport,
inbytes=inbytes,
outbytes=outbytes
))
except:
traceback.print_exc()
if flag2:
try:
userip,username,realaddr,_ = line.split(',')
realip,realport = realaddr.split(':')
session_id = md5(nas_addr + realip + realport).hexdigest()
session_users.setdefault(session_id, {}).update(dict(
session_id=session_id,
username=username,
realip=realip,
realport=realport,
userip=userip,
))
except:
traceback.print_exc()
return session_users | python | def parse_status_file(status_file,nas_addr):
''' parse openvpn status log
'''
session_users = {}
flag1 = False
flag2 = False
with open(status_file) as stlines:
for line in stlines:
if line.startswith("Common Name"):
flag1 = True
continue
if line.startswith("ROUTING TABLE"):
flag1 = False
continue
if line.startswith("Virtual Address"):
flag2 = True
continue
if line.startswith("GLOBAL STATS"):
flag2 = False
continue
if flag1:
try:
username,realaddr,inbytes,outbytes,_ = line.split(',')
realip,realport = realaddr.split(':')
session_id = md5(nas_addr + realip + realport).hexdigest()
session_users.setdefault(session_id, {}).update(dict(
session_id=session_id,
username=username,
realip=realip,
realport=realport,
inbytes=inbytes,
outbytes=outbytes
))
except:
traceback.print_exc()
if flag2:
try:
userip,username,realaddr,_ = line.split(',')
realip,realport = realaddr.split(':')
session_id = md5(nas_addr + realip + realport).hexdigest()
session_users.setdefault(session_id, {}).update(dict(
session_id=session_id,
username=username,
realip=realip,
realport=realport,
userip=userip,
))
except:
traceback.print_exc()
return session_users | [
"def",
"parse_status_file",
"(",
"status_file",
",",
"nas_addr",
")",
":",
"session_users",
"=",
"{",
"}",
"flag1",
"=",
"False",
"flag2",
"=",
"False",
"with",
"open",
"(",
"status_file",
")",
"as",
"stlines",
":",
"for",
"line",
"in",
"stlines",
":",
"if",
"line",
".",
"startswith",
"(",
"\"Common Name\"",
")",
":",
"flag1",
"=",
"True",
"continue",
"if",
"line",
".",
"startswith",
"(",
"\"ROUTING TABLE\"",
")",
":",
"flag1",
"=",
"False",
"continue",
"if",
"line",
".",
"startswith",
"(",
"\"Virtual Address\"",
")",
":",
"flag2",
"=",
"True",
"continue",
"if",
"line",
".",
"startswith",
"(",
"\"GLOBAL STATS\"",
")",
":",
"flag2",
"=",
"False",
"continue",
"if",
"flag1",
":",
"try",
":",
"username",
",",
"realaddr",
",",
"inbytes",
",",
"outbytes",
",",
"_",
"=",
"line",
".",
"split",
"(",
"','",
")",
"realip",
",",
"realport",
"=",
"realaddr",
".",
"split",
"(",
"':'",
")",
"session_id",
"=",
"md5",
"(",
"nas_addr",
"+",
"realip",
"+",
"realport",
")",
".",
"hexdigest",
"(",
")",
"session_users",
".",
"setdefault",
"(",
"session_id",
",",
"{",
"}",
")",
".",
"update",
"(",
"dict",
"(",
"session_id",
"=",
"session_id",
",",
"username",
"=",
"username",
",",
"realip",
"=",
"realip",
",",
"realport",
"=",
"realport",
",",
"inbytes",
"=",
"inbytes",
",",
"outbytes",
"=",
"outbytes",
")",
")",
"except",
":",
"traceback",
".",
"print_exc",
"(",
")",
"if",
"flag2",
":",
"try",
":",
"userip",
",",
"username",
",",
"realaddr",
",",
"_",
"=",
"line",
".",
"split",
"(",
"','",
")",
"realip",
",",
"realport",
"=",
"realaddr",
".",
"split",
"(",
"':'",
")",
"session_id",
"=",
"md5",
"(",
"nas_addr",
"+",
"realip",
"+",
"realport",
")",
".",
"hexdigest",
"(",
")",
"session_users",
".",
"setdefault",
"(",
"session_id",
",",
"{",
"}",
")",
".",
"update",
"(",
"dict",
"(",
"session_id",
"=",
"session_id",
",",
"username",
"=",
"username",
",",
"realip",
"=",
"realip",
",",
"realport",
"=",
"realport",
",",
"userip",
"=",
"userip",
",",
")",
")",
"except",
":",
"traceback",
".",
"print_exc",
"(",
")",
"return",
"session_users"
] | parse openvpn status log | [
"parse",
"openvpn",
"status",
"log"
] | b86fdbc9be41183680b82b07d3a8e8ea10926e01 | https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/openvpn/daemon.py#L21-L77 | train |
talkincode/txradius | txradius/openvpn/daemon.py | update_status | def update_status(dbfile,status_file,nas_addr):
''' update status db
'''
try:
total = 0
params = []
for sid, su in parse_status_file(status_file, nas_addr).items():
if 'session_id' in su and 'inbytes' in su and 'outbytes' in su:
params.append((su['inbytes'],su['outbytes'],su['session_id']))
total += 1
statusdb.batch_update_client(dbfile,params)
log.msg('update_status total = %s' % total)
except Exception, e:
log.err('batch update status error')
log.err(e) | python | def update_status(dbfile,status_file,nas_addr):
''' update status db
'''
try:
total = 0
params = []
for sid, su in parse_status_file(status_file, nas_addr).items():
if 'session_id' in su and 'inbytes' in su and 'outbytes' in su:
params.append((su['inbytes'],su['outbytes'],su['session_id']))
total += 1
statusdb.batch_update_client(dbfile,params)
log.msg('update_status total = %s' % total)
except Exception, e:
log.err('batch update status error')
log.err(e) | [
"def",
"update_status",
"(",
"dbfile",
",",
"status_file",
",",
"nas_addr",
")",
":",
"try",
":",
"total",
"=",
"0",
"params",
"=",
"[",
"]",
"for",
"sid",
",",
"su",
"in",
"parse_status_file",
"(",
"status_file",
",",
"nas_addr",
")",
".",
"items",
"(",
")",
":",
"if",
"'session_id'",
"in",
"su",
"and",
"'inbytes'",
"in",
"su",
"and",
"'outbytes'",
"in",
"su",
":",
"params",
".",
"append",
"(",
"(",
"su",
"[",
"'inbytes'",
"]",
",",
"su",
"[",
"'outbytes'",
"]",
",",
"su",
"[",
"'session_id'",
"]",
")",
")",
"total",
"+=",
"1",
"statusdb",
".",
"batch_update_client",
"(",
"dbfile",
",",
"params",
")",
"log",
".",
"msg",
"(",
"'update_status total = %s'",
"%",
"total",
")",
"except",
"Exception",
",",
"e",
":",
"log",
".",
"err",
"(",
"'batch update status error'",
")",
"log",
".",
"err",
"(",
"e",
")"
] | update status db | [
"update",
"status",
"db"
] | b86fdbc9be41183680b82b07d3a8e8ea10926e01 | https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/openvpn/daemon.py#L79-L93 | train |
talkincode/txradius | txradius/openvpn/daemon.py | accounting | def accounting(dbfile,config):
''' update radius accounting
'''
try:
nas_id = config.get('DEFAULT', 'nas_id')
nas_addr = config.get('DEFAULT', 'nas_addr')
secret = config.get('DEFAULT', 'radius_secret')
radius_addr = config.get('DEFAULT', 'radius_addr')
radius_acct_port = config.getint('DEFAULT', 'radius_acct_port')
radius_timeout = config.getint('DEFAULT', 'radius_timeout')
status_dbfile = config.get('DEFAULT', 'statusdb')
clients = statusdb.query_client(status_dbfile)
ctime = int(time.time())
for cli in clients:
if (ctime - int(cli['uptime'])) < int(cli['acct_interval']):
continue
session_id = cli['session_id']
req = {'User-Name':cli['username']}
req['Acct-Status-Type'] = ACCT_UPDATE
req['Acct-Session-Id'] = session_id
req["Acct-Output-Octets"] = int(cli['outbytes'])
req["Acct-Input-Octets"] = int(cli['inbytes'])
req['Acct-Session-Time'] = (ctime - int(cli['ctime']))
req["NAS-IP-Address"] = nas_addr
req["NAS-Port-Id"] = '0/0/0:0.0'
req["NAS-Port"] = 0
req["Service-Type"] = "Login-User"
req["NAS-Identifier"] = nas_id
req["Called-Station-Id"] = '00:00:00:00:00:00'
req["Calling-Station-Id"] = '00:00:00:00:00:00'
req["Framed-IP-Address"] = cli['userip']
def update_uptime(radresp):
statusdb.update_client_uptime(status_dbfile,session_id)
log.msg('online<%s> client accounting update'%session_id)
def onresp(r):
try:
update_uptime(r)
except Exception as e:
log.err('online update uptime error')
log.err(e)
d = client.send_acct(str(secret), get_dictionary(), radius_addr,
acctport=radius_acct_port, debug=True,**req)
d.addCallbacks(onresp,log.err)
except Exception, e:
log.err('accounting error')
log.err(e) | python | def accounting(dbfile,config):
''' update radius accounting
'''
try:
nas_id = config.get('DEFAULT', 'nas_id')
nas_addr = config.get('DEFAULT', 'nas_addr')
secret = config.get('DEFAULT', 'radius_secret')
radius_addr = config.get('DEFAULT', 'radius_addr')
radius_acct_port = config.getint('DEFAULT', 'radius_acct_port')
radius_timeout = config.getint('DEFAULT', 'radius_timeout')
status_dbfile = config.get('DEFAULT', 'statusdb')
clients = statusdb.query_client(status_dbfile)
ctime = int(time.time())
for cli in clients:
if (ctime - int(cli['uptime'])) < int(cli['acct_interval']):
continue
session_id = cli['session_id']
req = {'User-Name':cli['username']}
req['Acct-Status-Type'] = ACCT_UPDATE
req['Acct-Session-Id'] = session_id
req["Acct-Output-Octets"] = int(cli['outbytes'])
req["Acct-Input-Octets"] = int(cli['inbytes'])
req['Acct-Session-Time'] = (ctime - int(cli['ctime']))
req["NAS-IP-Address"] = nas_addr
req["NAS-Port-Id"] = '0/0/0:0.0'
req["NAS-Port"] = 0
req["Service-Type"] = "Login-User"
req["NAS-Identifier"] = nas_id
req["Called-Station-Id"] = '00:00:00:00:00:00'
req["Calling-Station-Id"] = '00:00:00:00:00:00'
req["Framed-IP-Address"] = cli['userip']
def update_uptime(radresp):
statusdb.update_client_uptime(status_dbfile,session_id)
log.msg('online<%s> client accounting update'%session_id)
def onresp(r):
try:
update_uptime(r)
except Exception as e:
log.err('online update uptime error')
log.err(e)
d = client.send_acct(str(secret), get_dictionary(), radius_addr,
acctport=radius_acct_port, debug=True,**req)
d.addCallbacks(onresp,log.err)
except Exception, e:
log.err('accounting error')
log.err(e) | [
"def",
"accounting",
"(",
"dbfile",
",",
"config",
")",
":",
"try",
":",
"nas_id",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_id'",
")",
"nas_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_addr'",
")",
"secret",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'radius_secret'",
")",
"radius_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'radius_addr'",
")",
"radius_acct_port",
"=",
"config",
".",
"getint",
"(",
"'DEFAULT'",
",",
"'radius_acct_port'",
")",
"radius_timeout",
"=",
"config",
".",
"getint",
"(",
"'DEFAULT'",
",",
"'radius_timeout'",
")",
"status_dbfile",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'statusdb'",
")",
"clients",
"=",
"statusdb",
".",
"query_client",
"(",
"status_dbfile",
")",
"ctime",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"for",
"cli",
"in",
"clients",
":",
"if",
"(",
"ctime",
"-",
"int",
"(",
"cli",
"[",
"'uptime'",
"]",
")",
")",
"<",
"int",
"(",
"cli",
"[",
"'acct_interval'",
"]",
")",
":",
"continue",
"session_id",
"=",
"cli",
"[",
"'session_id'",
"]",
"req",
"=",
"{",
"'User-Name'",
":",
"cli",
"[",
"'username'",
"]",
"}",
"req",
"[",
"'Acct-Status-Type'",
"]",
"=",
"ACCT_UPDATE",
"req",
"[",
"'Acct-Session-Id'",
"]",
"=",
"session_id",
"req",
"[",
"\"Acct-Output-Octets\"",
"]",
"=",
"int",
"(",
"cli",
"[",
"'outbytes'",
"]",
")",
"req",
"[",
"\"Acct-Input-Octets\"",
"]",
"=",
"int",
"(",
"cli",
"[",
"'inbytes'",
"]",
")",
"req",
"[",
"'Acct-Session-Time'",
"]",
"=",
"(",
"ctime",
"-",
"int",
"(",
"cli",
"[",
"'ctime'",
"]",
")",
")",
"req",
"[",
"\"NAS-IP-Address\"",
"]",
"=",
"nas_addr",
"req",
"[",
"\"NAS-Port-Id\"",
"]",
"=",
"'0/0/0:0.0'",
"req",
"[",
"\"NAS-Port\"",
"]",
"=",
"0",
"req",
"[",
"\"Service-Type\"",
"]",
"=",
"\"Login-User\"",
"req",
"[",
"\"NAS-Identifier\"",
"]",
"=",
"nas_id",
"req",
"[",
"\"Called-Station-Id\"",
"]",
"=",
"'00:00:00:00:00:00'",
"req",
"[",
"\"Calling-Station-Id\"",
"]",
"=",
"'00:00:00:00:00:00'",
"req",
"[",
"\"Framed-IP-Address\"",
"]",
"=",
"cli",
"[",
"'userip'",
"]",
"def",
"update_uptime",
"(",
"radresp",
")",
":",
"statusdb",
".",
"update_client_uptime",
"(",
"status_dbfile",
",",
"session_id",
")",
"log",
".",
"msg",
"(",
"'online<%s> client accounting update'",
"%",
"session_id",
")",
"def",
"onresp",
"(",
"r",
")",
":",
"try",
":",
"update_uptime",
"(",
"r",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"err",
"(",
"'online update uptime error'",
")",
"log",
".",
"err",
"(",
"e",
")",
"d",
"=",
"client",
".",
"send_acct",
"(",
"str",
"(",
"secret",
")",
",",
"get_dictionary",
"(",
")",
",",
"radius_addr",
",",
"acctport",
"=",
"radius_acct_port",
",",
"debug",
"=",
"True",
",",
"*",
"*",
"req",
")",
"d",
".",
"addCallbacks",
"(",
"onresp",
",",
"log",
".",
"err",
")",
"except",
"Exception",
",",
"e",
":",
"log",
".",
"err",
"(",
"'accounting error'",
")",
"log",
".",
"err",
"(",
"e",
")"
] | update radius accounting | [
"update",
"radius",
"accounting"
] | b86fdbc9be41183680b82b07d3a8e8ea10926e01 | https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/openvpn/daemon.py#L95-L147 | train |
talkincode/txradius | txradius/openvpn/daemon.py | main | def main(conf):
""" OpenVPN status daemon
"""
config = init_config(conf)
nas_addr = config.get('DEFAULT', 'nas_addr')
status_file = config.get('DEFAULT', 'statusfile')
status_dbfile = config.get('DEFAULT', 'statusdb')
nas_coa_port = config.get('DEFAULT', 'nas_coa_port')
def do_update_status_task():
d = deferToThread(update_status, status_dbfile, status_file, nas_addr)
d.addCallback(log.msg,'do_update_status_task done!')
d.addErrback(log.err)
reactor.callLater(60.0,do_update_status_task)
def do_accounting_task():
d = deferToThread(accounting, status_dbfile, config)
d.addCallback(log.msg,'do_accounting_task done!')
d.addErrback(log.err)
reactor.callLater(60.0,do_accounting_task)
do_update_status_task()
do_accounting_task()
coa_protocol = Authorized(config)
reactor.listenUDP(int(nas_coa_port), coa_protocol, interface='0.0.0.0')
reactor.run() | python | def main(conf):
""" OpenVPN status daemon
"""
config = init_config(conf)
nas_addr = config.get('DEFAULT', 'nas_addr')
status_file = config.get('DEFAULT', 'statusfile')
status_dbfile = config.get('DEFAULT', 'statusdb')
nas_coa_port = config.get('DEFAULT', 'nas_coa_port')
def do_update_status_task():
d = deferToThread(update_status, status_dbfile, status_file, nas_addr)
d.addCallback(log.msg,'do_update_status_task done!')
d.addErrback(log.err)
reactor.callLater(60.0,do_update_status_task)
def do_accounting_task():
d = deferToThread(accounting, status_dbfile, config)
d.addCallback(log.msg,'do_accounting_task done!')
d.addErrback(log.err)
reactor.callLater(60.0,do_accounting_task)
do_update_status_task()
do_accounting_task()
coa_protocol = Authorized(config)
reactor.listenUDP(int(nas_coa_port), coa_protocol, interface='0.0.0.0')
reactor.run() | [
"def",
"main",
"(",
"conf",
")",
":",
"config",
"=",
"init_config",
"(",
"conf",
")",
"nas_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_addr'",
")",
"status_file",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'statusfile'",
")",
"status_dbfile",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'statusdb'",
")",
"nas_coa_port",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_coa_port'",
")",
"def",
"do_update_status_task",
"(",
")",
":",
"d",
"=",
"deferToThread",
"(",
"update_status",
",",
"status_dbfile",
",",
"status_file",
",",
"nas_addr",
")",
"d",
".",
"addCallback",
"(",
"log",
".",
"msg",
",",
"'do_update_status_task done!'",
")",
"d",
".",
"addErrback",
"(",
"log",
".",
"err",
")",
"reactor",
".",
"callLater",
"(",
"60.0",
",",
"do_update_status_task",
")",
"def",
"do_accounting_task",
"(",
")",
":",
"d",
"=",
"deferToThread",
"(",
"accounting",
",",
"status_dbfile",
",",
"config",
")",
"d",
".",
"addCallback",
"(",
"log",
".",
"msg",
",",
"'do_accounting_task done!'",
")",
"d",
".",
"addErrback",
"(",
"log",
".",
"err",
")",
"reactor",
".",
"callLater",
"(",
"60.0",
",",
"do_accounting_task",
")",
"do_update_status_task",
"(",
")",
"do_accounting_task",
"(",
")",
"coa_protocol",
"=",
"Authorized",
"(",
"config",
")",
"reactor",
".",
"listenUDP",
"(",
"int",
"(",
"nas_coa_port",
")",
",",
"coa_protocol",
",",
"interface",
"=",
"'0.0.0.0'",
")",
"reactor",
".",
"run",
"(",
")"
] | OpenVPN status daemon | [
"OpenVPN",
"status",
"daemon"
] | b86fdbc9be41183680b82b07d3a8e8ea10926e01 | https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/openvpn/daemon.py#L202-L228 | train |
neithere/eav-django | eav/managers.py | BaseEntityManager._filter_by_simple_schema | def _filter_by_simple_schema(self, qs, lookup, sublookup, value, schema):
"""
Filters given entity queryset by an attribute which is linked to given
schema and has given value in the field for schema's datatype.
"""
value_lookup = 'attrs__value_%s' % schema.datatype
if sublookup:
value_lookup = '%s__%s' % (value_lookup, sublookup)
return {
'attrs__schema': schema,
str(value_lookup): value
} | python | def _filter_by_simple_schema(self, qs, lookup, sublookup, value, schema):
"""
Filters given entity queryset by an attribute which is linked to given
schema and has given value in the field for schema's datatype.
"""
value_lookup = 'attrs__value_%s' % schema.datatype
if sublookup:
value_lookup = '%s__%s' % (value_lookup, sublookup)
return {
'attrs__schema': schema,
str(value_lookup): value
} | [
"def",
"_filter_by_simple_schema",
"(",
"self",
",",
"qs",
",",
"lookup",
",",
"sublookup",
",",
"value",
",",
"schema",
")",
":",
"value_lookup",
"=",
"'attrs__value_%s'",
"%",
"schema",
".",
"datatype",
"if",
"sublookup",
":",
"value_lookup",
"=",
"'%s__%s'",
"%",
"(",
"value_lookup",
",",
"sublookup",
")",
"return",
"{",
"'attrs__schema'",
":",
"schema",
",",
"str",
"(",
"value_lookup",
")",
":",
"value",
"}"
] | Filters given entity queryset by an attribute which is linked to given
schema and has given value in the field for schema's datatype. | [
"Filters",
"given",
"entity",
"queryset",
"by",
"an",
"attribute",
"which",
"is",
"linked",
"to",
"given",
"schema",
"and",
"has",
"given",
"value",
"in",
"the",
"field",
"for",
"schema",
"s",
"datatype",
"."
] | 7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7 | https://github.com/neithere/eav-django/blob/7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7/eav/managers.py#L121-L132 | train |
neithere/eav-django | eav/managers.py | BaseEntityManager._filter_by_m2m_schema | def _filter_by_m2m_schema(self, qs, lookup, sublookup, value, schema, model=None):
"""
Filters given entity queryset by an attribute which is linked to given
many-to-many schema.
"""
model = model or self.model
schemata = dict((s.name, s) for s in model.get_schemata_for_model()) # TODO cache this dict, see above too
try:
schema = schemata[lookup]
except KeyError:
# TODO: smarter error message, i.e. how could this happen and what to do
raise ValueError(u'Could not find schema for lookup "%s"' % lookup)
sublookup = '__%s'%sublookup if sublookup else ''
return {
'attrs__schema': schema,
'attrs__choice%s'%sublookup: value, # TODO: can we filter by id, not name?
} | python | def _filter_by_m2m_schema(self, qs, lookup, sublookup, value, schema, model=None):
"""
Filters given entity queryset by an attribute which is linked to given
many-to-many schema.
"""
model = model or self.model
schemata = dict((s.name, s) for s in model.get_schemata_for_model()) # TODO cache this dict, see above too
try:
schema = schemata[lookup]
except KeyError:
# TODO: smarter error message, i.e. how could this happen and what to do
raise ValueError(u'Could not find schema for lookup "%s"' % lookup)
sublookup = '__%s'%sublookup if sublookup else ''
return {
'attrs__schema': schema,
'attrs__choice%s'%sublookup: value, # TODO: can we filter by id, not name?
} | [
"def",
"_filter_by_m2m_schema",
"(",
"self",
",",
"qs",
",",
"lookup",
",",
"sublookup",
",",
"value",
",",
"schema",
",",
"model",
"=",
"None",
")",
":",
"model",
"=",
"model",
"or",
"self",
".",
"model",
"schemata",
"=",
"dict",
"(",
"(",
"s",
".",
"name",
",",
"s",
")",
"for",
"s",
"in",
"model",
".",
"get_schemata_for_model",
"(",
")",
")",
"# TODO cache this dict, see above too",
"try",
":",
"schema",
"=",
"schemata",
"[",
"lookup",
"]",
"except",
"KeyError",
":",
"# TODO: smarter error message, i.e. how could this happen and what to do",
"raise",
"ValueError",
"(",
"u'Could not find schema for lookup \"%s\"'",
"%",
"lookup",
")",
"sublookup",
"=",
"'__%s'",
"%",
"sublookup",
"if",
"sublookup",
"else",
"''",
"return",
"{",
"'attrs__schema'",
":",
"schema",
",",
"'attrs__choice%s'",
"%",
"sublookup",
":",
"value",
",",
"# TODO: can we filter by id, not name?",
"}"
] | Filters given entity queryset by an attribute which is linked to given
many-to-many schema. | [
"Filters",
"given",
"entity",
"queryset",
"by",
"an",
"attribute",
"which",
"is",
"linked",
"to",
"given",
"many",
"-",
"to",
"-",
"many",
"schema",
"."
] | 7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7 | https://github.com/neithere/eav-django/blob/7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7/eav/managers.py#L173-L189 | train |
neithere/eav-django | eav/managers.py | BaseEntityManager.create | def create(self, **kwargs):
"""
Creates entity instance and related Attr instances.
Note that while entity instances may filter schemata by fields, that
filtering does not take place here. Attribute of any schema will be saved
successfully as long as such schema exists.
Note that we cannot create attribute with no pre-defined schema because
we must know attribute type in order to properly put value into the DB.
"""
fields = self.model._meta.get_all_field_names()
schemata = dict((s.name, s) for s in self.model.get_schemata_for_model())
# check if all attributes are known
possible_names = set(fields) | set(schemata.keys())
wrong_names = set(kwargs.keys()) - possible_names
if wrong_names:
raise NameError('Cannot create %s: unknown attribute(s) "%s". '
'Available fields: (%s). Available schemata: (%s).'
% (self.model._meta.object_name, '", "'.join(wrong_names),
', '.join(fields), ', '.join(schemata)))
# init entity with fields
instance = self.model(**dict((k,v) for k,v in kwargs.items() if k in fields))
# set attributes; instance will check schemata on save
for name, value in kwargs.items():
setattr(instance, name, value)
# save instance and EAV attributes
instance.save(force_insert=True)
return instance | python | def create(self, **kwargs):
"""
Creates entity instance and related Attr instances.
Note that while entity instances may filter schemata by fields, that
filtering does not take place here. Attribute of any schema will be saved
successfully as long as such schema exists.
Note that we cannot create attribute with no pre-defined schema because
we must know attribute type in order to properly put value into the DB.
"""
fields = self.model._meta.get_all_field_names()
schemata = dict((s.name, s) for s in self.model.get_schemata_for_model())
# check if all attributes are known
possible_names = set(fields) | set(schemata.keys())
wrong_names = set(kwargs.keys()) - possible_names
if wrong_names:
raise NameError('Cannot create %s: unknown attribute(s) "%s". '
'Available fields: (%s). Available schemata: (%s).'
% (self.model._meta.object_name, '", "'.join(wrong_names),
', '.join(fields), ', '.join(schemata)))
# init entity with fields
instance = self.model(**dict((k,v) for k,v in kwargs.items() if k in fields))
# set attributes; instance will check schemata on save
for name, value in kwargs.items():
setattr(instance, name, value)
# save instance and EAV attributes
instance.save(force_insert=True)
return instance | [
"def",
"create",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"fields",
"=",
"self",
".",
"model",
".",
"_meta",
".",
"get_all_field_names",
"(",
")",
"schemata",
"=",
"dict",
"(",
"(",
"s",
".",
"name",
",",
"s",
")",
"for",
"s",
"in",
"self",
".",
"model",
".",
"get_schemata_for_model",
"(",
")",
")",
"# check if all attributes are known",
"possible_names",
"=",
"set",
"(",
"fields",
")",
"|",
"set",
"(",
"schemata",
".",
"keys",
"(",
")",
")",
"wrong_names",
"=",
"set",
"(",
"kwargs",
".",
"keys",
"(",
")",
")",
"-",
"possible_names",
"if",
"wrong_names",
":",
"raise",
"NameError",
"(",
"'Cannot create %s: unknown attribute(s) \"%s\". '",
"'Available fields: (%s). Available schemata: (%s).'",
"%",
"(",
"self",
".",
"model",
".",
"_meta",
".",
"object_name",
",",
"'\", \"'",
".",
"join",
"(",
"wrong_names",
")",
",",
"', '",
".",
"join",
"(",
"fields",
")",
",",
"', '",
".",
"join",
"(",
"schemata",
")",
")",
")",
"# init entity with fields",
"instance",
"=",
"self",
".",
"model",
"(",
"*",
"*",
"dict",
"(",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
"if",
"k",
"in",
"fields",
")",
")",
"# set attributes; instance will check schemata on save",
"for",
"name",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"instance",
",",
"name",
",",
"value",
")",
"# save instance and EAV attributes",
"instance",
".",
"save",
"(",
"force_insert",
"=",
"True",
")",
"return",
"instance"
] | Creates entity instance and related Attr instances.
Note that while entity instances may filter schemata by fields, that
filtering does not take place here. Attribute of any schema will be saved
successfully as long as such schema exists.
Note that we cannot create attribute with no pre-defined schema because
we must know attribute type in order to properly put value into the DB. | [
"Creates",
"entity",
"instance",
"and",
"related",
"Attr",
"instances",
"."
] | 7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7 | https://github.com/neithere/eav-django/blob/7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7/eav/managers.py#L191-L225 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewprofilemanagermenu.py | XViewProfileManagerMenu.removeProfile | def removeProfile( self ):
"""
Removes the current profile from the system.
"""
manager = self.parent()
prof = manager.currentProfile()
opts = QMessageBox.Yes | QMessageBox.No
question = 'Are you sure you want to remove "%s"?' % prof.name()
answer = QMessageBox.question( self, 'Remove Profile', question, opts)
if ( answer == QMessageBox.Yes ):
manager.removeProfile(prof) | python | def removeProfile( self ):
"""
Removes the current profile from the system.
"""
manager = self.parent()
prof = manager.currentProfile()
opts = QMessageBox.Yes | QMessageBox.No
question = 'Are you sure you want to remove "%s"?' % prof.name()
answer = QMessageBox.question( self, 'Remove Profile', question, opts)
if ( answer == QMessageBox.Yes ):
manager.removeProfile(prof) | [
"def",
"removeProfile",
"(",
"self",
")",
":",
"manager",
"=",
"self",
".",
"parent",
"(",
")",
"prof",
"=",
"manager",
".",
"currentProfile",
"(",
")",
"opts",
"=",
"QMessageBox",
".",
"Yes",
"|",
"QMessageBox",
".",
"No",
"question",
"=",
"'Are you sure you want to remove \"%s\"?'",
"%",
"prof",
".",
"name",
"(",
")",
"answer",
"=",
"QMessageBox",
".",
"question",
"(",
"self",
",",
"'Remove Profile'",
",",
"question",
",",
"opts",
")",
"if",
"(",
"answer",
"==",
"QMessageBox",
".",
"Yes",
")",
":",
"manager",
".",
"removeProfile",
"(",
"prof",
")"
] | Removes the current profile from the system. | [
"Removes",
"the",
"current",
"profile",
"from",
"the",
"system",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewprofilemanagermenu.py#L44-L55 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewprofilemanagermenu.py | XViewProfileManagerMenu.saveProfile | def saveProfile( self ):
"""
Saves the current profile to the current settings from the view widget.
"""
manager = self.parent()
prof = manager.currentProfile()
# save the current profile
save_prof = manager.viewWidget().saveProfile()
prof.setXmlElement(save_prof.xmlElement()) | python | def saveProfile( self ):
"""
Saves the current profile to the current settings from the view widget.
"""
manager = self.parent()
prof = manager.currentProfile()
# save the current profile
save_prof = manager.viewWidget().saveProfile()
prof.setXmlElement(save_prof.xmlElement()) | [
"def",
"saveProfile",
"(",
"self",
")",
":",
"manager",
"=",
"self",
".",
"parent",
"(",
")",
"prof",
"=",
"manager",
".",
"currentProfile",
"(",
")",
"# save the current profile",
"save_prof",
"=",
"manager",
".",
"viewWidget",
"(",
")",
".",
"saveProfile",
"(",
")",
"prof",
".",
"setXmlElement",
"(",
"save_prof",
".",
"xmlElement",
"(",
")",
")"
] | Saves the current profile to the current settings from the view widget. | [
"Saves",
"the",
"current",
"profile",
"to",
"the",
"current",
"settings",
"from",
"the",
"view",
"widget",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewprofilemanagermenu.py#L57-L66 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewprofilemanagermenu.py | XViewProfileManagerMenu.saveProfileAs | def saveProfileAs( self ):
"""
Saves the current profile as a new profile to the manager.
"""
name, ok = QInputDialog.getText(self, 'Create Profile', 'Name:')
if ( not name ):
return
manager = self.parent()
prof = manager.viewWidget().saveProfile()
prof.setName(nativestring(name))
self.parent().addProfile(prof) | python | def saveProfileAs( self ):
"""
Saves the current profile as a new profile to the manager.
"""
name, ok = QInputDialog.getText(self, 'Create Profile', 'Name:')
if ( not name ):
return
manager = self.parent()
prof = manager.viewWidget().saveProfile()
prof.setName(nativestring(name))
self.parent().addProfile(prof) | [
"def",
"saveProfileAs",
"(",
"self",
")",
":",
"name",
",",
"ok",
"=",
"QInputDialog",
".",
"getText",
"(",
"self",
",",
"'Create Profile'",
",",
"'Name:'",
")",
"if",
"(",
"not",
"name",
")",
":",
"return",
"manager",
"=",
"self",
".",
"parent",
"(",
")",
"prof",
"=",
"manager",
".",
"viewWidget",
"(",
")",
".",
"saveProfile",
"(",
")",
"prof",
".",
"setName",
"(",
"nativestring",
"(",
"name",
")",
")",
"self",
".",
"parent",
"(",
")",
".",
"addProfile",
"(",
"prof",
")"
] | Saves the current profile as a new profile to the manager. | [
"Saves",
"the",
"current",
"profile",
"as",
"a",
"new",
"profile",
"to",
"the",
"manager",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewprofilemanagermenu.py#L68-L79 | train |
Gbps/fastlog | fastlog/hexdump.py | hexdump | def hexdump(logger, s, width=16, skip=True, hexii=False, begin=0, highlight=None):
r"""
Return a hexdump-dump of a string.
Arguments:
logger(FastLogger): Logger object
s(str): The data to hexdump.
width(int): The number of characters per line
skip(bool): Set to True, if repeated lines should be replaced by a "*"
hexii(bool): Set to True, if a hexii-dump should be returned instead of a hexdump.
begin(int): Offset of the first byte to print in the left column
highlight(iterable): Byte values to highlight.
Returns:
A hexdump-dump in the form of a string.
Examples:
>>> print hexdump("abc")
00000000 61 62 63 │abc│
00000003
>>> print hexdump('A'*32)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AAAA│
*
00000020
>>> print hexdump('A'*32, width=8)
00000000 41 41 41 41 41 41 41 41 │AAAA│AAAA│
*
00000020
>>> print hexdump(list(map(chr, range(256))))
00000000 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f │····│····│····│····│
00000010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f │····│····│····│····│
00000020 20 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f │ !"#│$%&'│()*+│,-./│
00000030 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d 3e 3f │0123│4567│89:;│<=>?│
00000040 40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f │@ABC│DEFG│HIJK│LMNO│
00000050 50 51 52 53 54 55 56 57 58 59 5a 5b 5c 5d 5e 5f │PQRS│TUVW│XYZ[│\]^_│
00000060 60 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f │`abc│defg│hijk│lmno│
00000070 70 71 72 73 74 75 76 77 78 79 7a 7b 7c 7d 7e 7f │pqrs│tuvw│xyz{│|}~·│
00000080 80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f │····│····│····│····│
00000090 90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f │····│····│····│····│
000000a0 a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 aa ab ac ad ae af │····│····│····│····│
000000b0 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 ba bb bc bd be bf │····│····│····│····│
000000c0 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 ca cb cc cd ce cf │····│····│····│····│
000000d0 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 da db dc dd de df │····│····│····│····│
000000e0 e0 e1 e2 e3 e4 e5 e6 e7 e8 e9 ea eb ec ed ee ef │····│····│····│····│
000000f0 f0 f1 f2 f3 f4 f5 f6 f7 f8 f9 fa fb fc fd fe ff │····│····│····│····│
00000100
>>> print hexdump(list(map(chr, range(256))), hexii=True)
00000000 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f │
00000010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f │
00000020 20 .! ." .# .$ .% .& .' .( .) .* .+ ., .- .. ./ │
00000030 .0 .1 .2 .3 .4 .5 .6 .7 .8 .9 .: .; .< .= .> .? │
00000040 .@ .A .B .C .D .E .F .G .H .I .J .K .L .M .N .O │
00000050 .P .Q .R .S .T .U .V .W .X .Y .Z .[ .\ .] .^ ._ │
00000060 .` .a .b .c .d .e .f .g .h .i .j .k .l .m .n .o │
00000070 .p .q .r .s .t .u .v .w .x .y .z .{ .| .} .~ 7f │
00000080 80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f │
00000090 90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f │
000000a0 a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 aa ab ac ad ae af │
000000b0 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 ba bb bc bd be bf │
000000c0 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 ca cb cc cd ce cf │
000000d0 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 da db dc dd de df │
000000e0 e0 e1 e2 e3 e4 e5 e6 e7 e8 e9 ea eb ec ed ee ef │
000000f0 f0 f1 f2 f3 f4 f5 f6 f7 f8 f9 fa fb fc fd fe ## │
00000100
>>> print hexdump('X' * 64)
00000000 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
*
00000040
>>> print hexdump('X' * 64, skip=False)
00000000 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000010 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000020 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000030 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000040
>>> print hexdump(fit({0x10: 'X'*0x20, 0x50-1: '\xff'*20}, length=0xc0) + '\x00'*32, cyclic=1, hexii=1)
00000000 .a .a .a .a .b .a .a .a .c .a .a .a .d .a .a .a │
00000010 .X .X .X .X .X .X .X .X .X .X .X .X .X .X .X .X │
*
00000030 .m .a .a .a .n .a .a .a .o .a .a .a .p .a .a .a │
00000040 .q .a .a .a .r .a .a .a .s .a .a .a .t .a .a ## │
00000050 ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## │
00000060 ## ## ## .a .z .a .a .b .b .a .a .b .c .a .a .b │
00000070 .d .a .a .b .e .a .a .b .f .a .a .b .g .a .a .b │
*
000000c0 │
*
000000e0
>>> print hexdump('A'*16, width=9)
00000000 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│A│
00000009 41 41 41 41 41 41 41 │AAAA│AAA│
00000010
>>> print hexdump('A'*16, width=10)
00000000 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AA│
0000000a 41 41 41 41 41 41 │AAAA│AA│
00000010
>>> print hexdump('A'*16, width=11)
00000000 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAA│
0000000b 41 41 41 41 41 │AAAA│A│
00000010
>>> print hexdump('A'*16, width=12)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│
0000000c 41 41 41 41 │AAAA││
00000010
>>> print hexdump('A'*16, width=13)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│A│
0000000d 41 41 41 │AAA│
00000010
>>> print hexdump('A'*16, width=14)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AA│
0000000e 41 41 │AA│
00000010
>>> print hexdump('A'*16, width=15)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AAA│
0000000f 41 │A│
00000010
"""
s = _flat(s)
return '\n'.join(hexdump_iter(logger, StringIO(s),
width,
skip,
hexii,
begin,
highlight)) | python | def hexdump(logger, s, width=16, skip=True, hexii=False, begin=0, highlight=None):
r"""
Return a hexdump-dump of a string.
Arguments:
logger(FastLogger): Logger object
s(str): The data to hexdump.
width(int): The number of characters per line
skip(bool): Set to True, if repeated lines should be replaced by a "*"
hexii(bool): Set to True, if a hexii-dump should be returned instead of a hexdump.
begin(int): Offset of the first byte to print in the left column
highlight(iterable): Byte values to highlight.
Returns:
A hexdump-dump in the form of a string.
Examples:
>>> print hexdump("abc")
00000000 61 62 63 │abc│
00000003
>>> print hexdump('A'*32)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AAAA│
*
00000020
>>> print hexdump('A'*32, width=8)
00000000 41 41 41 41 41 41 41 41 │AAAA│AAAA│
*
00000020
>>> print hexdump(list(map(chr, range(256))))
00000000 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f │····│····│····│····│
00000010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f │····│····│····│····│
00000020 20 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f │ !"#│$%&'│()*+│,-./│
00000030 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d 3e 3f │0123│4567│89:;│<=>?│
00000040 40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f │@ABC│DEFG│HIJK│LMNO│
00000050 50 51 52 53 54 55 56 57 58 59 5a 5b 5c 5d 5e 5f │PQRS│TUVW│XYZ[│\]^_│
00000060 60 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f │`abc│defg│hijk│lmno│
00000070 70 71 72 73 74 75 76 77 78 79 7a 7b 7c 7d 7e 7f │pqrs│tuvw│xyz{│|}~·│
00000080 80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f │····│····│····│····│
00000090 90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f │····│····│····│····│
000000a0 a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 aa ab ac ad ae af │····│····│····│····│
000000b0 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 ba bb bc bd be bf │····│····│····│····│
000000c0 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 ca cb cc cd ce cf │····│····│····│····│
000000d0 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 da db dc dd de df │····│····│····│····│
000000e0 e0 e1 e2 e3 e4 e5 e6 e7 e8 e9 ea eb ec ed ee ef │····│····│····│····│
000000f0 f0 f1 f2 f3 f4 f5 f6 f7 f8 f9 fa fb fc fd fe ff │····│····│····│····│
00000100
>>> print hexdump(list(map(chr, range(256))), hexii=True)
00000000 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f │
00000010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f │
00000020 20 .! ." .# .$ .% .& .' .( .) .* .+ ., .- .. ./ │
00000030 .0 .1 .2 .3 .4 .5 .6 .7 .8 .9 .: .; .< .= .> .? │
00000040 .@ .A .B .C .D .E .F .G .H .I .J .K .L .M .N .O │
00000050 .P .Q .R .S .T .U .V .W .X .Y .Z .[ .\ .] .^ ._ │
00000060 .` .a .b .c .d .e .f .g .h .i .j .k .l .m .n .o │
00000070 .p .q .r .s .t .u .v .w .x .y .z .{ .| .} .~ 7f │
00000080 80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f │
00000090 90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f │
000000a0 a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 aa ab ac ad ae af │
000000b0 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 ba bb bc bd be bf │
000000c0 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 ca cb cc cd ce cf │
000000d0 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 da db dc dd de df │
000000e0 e0 e1 e2 e3 e4 e5 e6 e7 e8 e9 ea eb ec ed ee ef │
000000f0 f0 f1 f2 f3 f4 f5 f6 f7 f8 f9 fa fb fc fd fe ## │
00000100
>>> print hexdump('X' * 64)
00000000 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
*
00000040
>>> print hexdump('X' * 64, skip=False)
00000000 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000010 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000020 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000030 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000040
>>> print hexdump(fit({0x10: 'X'*0x20, 0x50-1: '\xff'*20}, length=0xc0) + '\x00'*32, cyclic=1, hexii=1)
00000000 .a .a .a .a .b .a .a .a .c .a .a .a .d .a .a .a │
00000010 .X .X .X .X .X .X .X .X .X .X .X .X .X .X .X .X │
*
00000030 .m .a .a .a .n .a .a .a .o .a .a .a .p .a .a .a │
00000040 .q .a .a .a .r .a .a .a .s .a .a .a .t .a .a ## │
00000050 ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## │
00000060 ## ## ## .a .z .a .a .b .b .a .a .b .c .a .a .b │
00000070 .d .a .a .b .e .a .a .b .f .a .a .b .g .a .a .b │
*
000000c0 │
*
000000e0
>>> print hexdump('A'*16, width=9)
00000000 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│A│
00000009 41 41 41 41 41 41 41 │AAAA│AAA│
00000010
>>> print hexdump('A'*16, width=10)
00000000 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AA│
0000000a 41 41 41 41 41 41 │AAAA│AA│
00000010
>>> print hexdump('A'*16, width=11)
00000000 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAA│
0000000b 41 41 41 41 41 │AAAA│A│
00000010
>>> print hexdump('A'*16, width=12)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│
0000000c 41 41 41 41 │AAAA││
00000010
>>> print hexdump('A'*16, width=13)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│A│
0000000d 41 41 41 │AAA│
00000010
>>> print hexdump('A'*16, width=14)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AA│
0000000e 41 41 │AA│
00000010
>>> print hexdump('A'*16, width=15)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AAA│
0000000f 41 │A│
00000010
"""
s = _flat(s)
return '\n'.join(hexdump_iter(logger, StringIO(s),
width,
skip,
hexii,
begin,
highlight)) | [
"def",
"hexdump",
"(",
"logger",
",",
"s",
",",
"width",
"=",
"16",
",",
"skip",
"=",
"True",
",",
"hexii",
"=",
"False",
",",
"begin",
"=",
"0",
",",
"highlight",
"=",
"None",
")",
":",
"s",
"=",
"_flat",
"(",
"s",
")",
"return",
"'\\n'",
".",
"join",
"(",
"hexdump_iter",
"(",
"logger",
",",
"StringIO",
"(",
"s",
")",
",",
"width",
",",
"skip",
",",
"hexii",
",",
"begin",
",",
"highlight",
")",
")"
] | r"""
Return a hexdump-dump of a string.
Arguments:
logger(FastLogger): Logger object
s(str): The data to hexdump.
width(int): The number of characters per line
skip(bool): Set to True, if repeated lines should be replaced by a "*"
hexii(bool): Set to True, if a hexii-dump should be returned instead of a hexdump.
begin(int): Offset of the first byte to print in the left column
highlight(iterable): Byte values to highlight.
Returns:
A hexdump-dump in the form of a string.
Examples:
>>> print hexdump("abc")
00000000 61 62 63 │abc│
00000003
>>> print hexdump('A'*32)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AAAA│
*
00000020
>>> print hexdump('A'*32, width=8)
00000000 41 41 41 41 41 41 41 41 │AAAA│AAAA│
*
00000020
>>> print hexdump(list(map(chr, range(256))))
00000000 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f │····│····│····│····│
00000010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f │····│····│····│····│
00000020 20 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f │ !"#│$%&'│()*+│,-./│
00000030 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d 3e 3f │0123│4567│89:;│<=>?│
00000040 40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f │@ABC│DEFG│HIJK│LMNO│
00000050 50 51 52 53 54 55 56 57 58 59 5a 5b 5c 5d 5e 5f │PQRS│TUVW│XYZ[│\]^_│
00000060 60 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f │`abc│defg│hijk│lmno│
00000070 70 71 72 73 74 75 76 77 78 79 7a 7b 7c 7d 7e 7f │pqrs│tuvw│xyz{│|}~·│
00000080 80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f │····│····│····│····│
00000090 90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f │····│····│····│····│
000000a0 a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 aa ab ac ad ae af │····│····│····│····│
000000b0 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 ba bb bc bd be bf │····│····│····│····│
000000c0 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 ca cb cc cd ce cf │····│····│····│····│
000000d0 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 da db dc dd de df │····│····│····│····│
000000e0 e0 e1 e2 e3 e4 e5 e6 e7 e8 e9 ea eb ec ed ee ef │····│····│····│····│
000000f0 f0 f1 f2 f3 f4 f5 f6 f7 f8 f9 fa fb fc fd fe ff │····│····│····│····│
00000100
>>> print hexdump(list(map(chr, range(256))), hexii=True)
00000000 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f │
00000010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f │
00000020 20 .! ." .# .$ .% .& .' .( .) .* .+ ., .- .. ./ │
00000030 .0 .1 .2 .3 .4 .5 .6 .7 .8 .9 .: .; .< .= .> .? │
00000040 .@ .A .B .C .D .E .F .G .H .I .J .K .L .M .N .O │
00000050 .P .Q .R .S .T .U .V .W .X .Y .Z .[ .\ .] .^ ._ │
00000060 .` .a .b .c .d .e .f .g .h .i .j .k .l .m .n .o │
00000070 .p .q .r .s .t .u .v .w .x .y .z .{ .| .} .~ 7f │
00000080 80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f │
00000090 90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f │
000000a0 a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 aa ab ac ad ae af │
000000b0 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 ba bb bc bd be bf │
000000c0 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 ca cb cc cd ce cf │
000000d0 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 da db dc dd de df │
000000e0 e0 e1 e2 e3 e4 e5 e6 e7 e8 e9 ea eb ec ed ee ef │
000000f0 f0 f1 f2 f3 f4 f5 f6 f7 f8 f9 fa fb fc fd fe ## │
00000100
>>> print hexdump('X' * 64)
00000000 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
*
00000040
>>> print hexdump('X' * 64, skip=False)
00000000 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000010 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000020 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000030 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 │XXXX│XXXX│XXXX│XXXX│
00000040
>>> print hexdump(fit({0x10: 'X'*0x20, 0x50-1: '\xff'*20}, length=0xc0) + '\x00'*32, cyclic=1, hexii=1)
00000000 .a .a .a .a .b .a .a .a .c .a .a .a .d .a .a .a │
00000010 .X .X .X .X .X .X .X .X .X .X .X .X .X .X .X .X │
*
00000030 .m .a .a .a .n .a .a .a .o .a .a .a .p .a .a .a │
00000040 .q .a .a .a .r .a .a .a .s .a .a .a .t .a .a ## │
00000050 ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## │
00000060 ## ## ## .a .z .a .a .b .b .a .a .b .c .a .a .b │
00000070 .d .a .a .b .e .a .a .b .f .a .a .b .g .a .a .b │
*
000000c0 │
*
000000e0
>>> print hexdump('A'*16, width=9)
00000000 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│A│
00000009 41 41 41 41 41 41 41 │AAAA│AAA│
00000010
>>> print hexdump('A'*16, width=10)
00000000 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AA│
0000000a 41 41 41 41 41 41 │AAAA│AA│
00000010
>>> print hexdump('A'*16, width=11)
00000000 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAA│
0000000b 41 41 41 41 41 │AAAA│A│
00000010
>>> print hexdump('A'*16, width=12)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│
0000000c 41 41 41 41 │AAAA││
00000010
>>> print hexdump('A'*16, width=13)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│A│
0000000d 41 41 41 │AAA│
00000010
>>> print hexdump('A'*16, width=14)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AA│
0000000e 41 41 │AA│
00000010
>>> print hexdump('A'*16, width=15)
00000000 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 │AAAA│AAAA│AAAA│AAA│
0000000f 41 │A│
00000010 | [
"r",
"Return",
"a",
"hexdump",
"-",
"dump",
"of",
"a",
"string",
"."
] | 8edb2327d72191510302c4654ffaa1691fe31277 | https://github.com/Gbps/fastlog/blob/8edb2327d72191510302c4654ffaa1691fe31277/fastlog/hexdump.py#L183-L314 | train |
bitesofcode/projexui | projexui/widgets/xlineedit.py | XLineEdit.adjustText | def adjustText(self):
"""
Updates the text based on the current format options.
"""
pos = self.cursorPosition()
self.blockSignals(True)
super(XLineEdit, self).setText(self.formatText(self.text()))
self.setCursorPosition(pos)
self.blockSignals(False) | python | def adjustText(self):
"""
Updates the text based on the current format options.
"""
pos = self.cursorPosition()
self.blockSignals(True)
super(XLineEdit, self).setText(self.formatText(self.text()))
self.setCursorPosition(pos)
self.blockSignals(False) | [
"def",
"adjustText",
"(",
"self",
")",
":",
"pos",
"=",
"self",
".",
"cursorPosition",
"(",
")",
"self",
".",
"blockSignals",
"(",
"True",
")",
"super",
"(",
"XLineEdit",
",",
"self",
")",
".",
"setText",
"(",
"self",
".",
"formatText",
"(",
"self",
".",
"text",
"(",
")",
")",
")",
"self",
".",
"setCursorPosition",
"(",
"pos",
")",
"self",
".",
"blockSignals",
"(",
"False",
")"
] | Updates the text based on the current format options. | [
"Updates",
"the",
"text",
"based",
"on",
"the",
"current",
"format",
"options",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xlineedit.py#L110-L118 | train |
bitesofcode/projexui | projexui/widgets/xlineedit.py | XLineEdit.adjustButtons | def adjustButtons( self ):
"""
Adjusts the placement of the buttons for this line edit.
"""
y = 1
for btn in self.buttons():
btn.setIconSize(self.iconSize())
btn.setFixedSize(QSize(self.height() - 2, self.height() - 2))
# adjust the location for the left buttons
left_buttons = self._buttons.get(Qt.AlignLeft, [])
x = (self.cornerRadius() / 2.0) + 2
for btn in left_buttons:
btn.move(x, y)
x += btn.width()
# adjust the location for the right buttons
right_buttons = self._buttons.get(Qt.AlignRight, [])
w = self.width()
bwidth = sum([btn.width() for btn in right_buttons])
bwidth += (self.cornerRadius() / 2.0) + 1
for btn in right_buttons:
btn.move(w - bwidth, y)
bwidth -= btn.width()
self._buttonWidth = sum([btn.width() for btn in self.buttons()])
self.adjustTextMargins() | python | def adjustButtons( self ):
"""
Adjusts the placement of the buttons for this line edit.
"""
y = 1
for btn in self.buttons():
btn.setIconSize(self.iconSize())
btn.setFixedSize(QSize(self.height() - 2, self.height() - 2))
# adjust the location for the left buttons
left_buttons = self._buttons.get(Qt.AlignLeft, [])
x = (self.cornerRadius() / 2.0) + 2
for btn in left_buttons:
btn.move(x, y)
x += btn.width()
# adjust the location for the right buttons
right_buttons = self._buttons.get(Qt.AlignRight, [])
w = self.width()
bwidth = sum([btn.width() for btn in right_buttons])
bwidth += (self.cornerRadius() / 2.0) + 1
for btn in right_buttons:
btn.move(w - bwidth, y)
bwidth -= btn.width()
self._buttonWidth = sum([btn.width() for btn in self.buttons()])
self.adjustTextMargins() | [
"def",
"adjustButtons",
"(",
"self",
")",
":",
"y",
"=",
"1",
"for",
"btn",
"in",
"self",
".",
"buttons",
"(",
")",
":",
"btn",
".",
"setIconSize",
"(",
"self",
".",
"iconSize",
"(",
")",
")",
"btn",
".",
"setFixedSize",
"(",
"QSize",
"(",
"self",
".",
"height",
"(",
")",
"-",
"2",
",",
"self",
".",
"height",
"(",
")",
"-",
"2",
")",
")",
"# adjust the location for the left buttons",
"left_buttons",
"=",
"self",
".",
"_buttons",
".",
"get",
"(",
"Qt",
".",
"AlignLeft",
",",
"[",
"]",
")",
"x",
"=",
"(",
"self",
".",
"cornerRadius",
"(",
")",
"/",
"2.0",
")",
"+",
"2",
"for",
"btn",
"in",
"left_buttons",
":",
"btn",
".",
"move",
"(",
"x",
",",
"y",
")",
"x",
"+=",
"btn",
".",
"width",
"(",
")",
"# adjust the location for the right buttons",
"right_buttons",
"=",
"self",
".",
"_buttons",
".",
"get",
"(",
"Qt",
".",
"AlignRight",
",",
"[",
"]",
")",
"w",
"=",
"self",
".",
"width",
"(",
")",
"bwidth",
"=",
"sum",
"(",
"[",
"btn",
".",
"width",
"(",
")",
"for",
"btn",
"in",
"right_buttons",
"]",
")",
"bwidth",
"+=",
"(",
"self",
".",
"cornerRadius",
"(",
")",
"/",
"2.0",
")",
"+",
"1",
"for",
"btn",
"in",
"right_buttons",
":",
"btn",
".",
"move",
"(",
"w",
"-",
"bwidth",
",",
"y",
")",
"bwidth",
"-=",
"btn",
".",
"width",
"(",
")",
"self",
".",
"_buttonWidth",
"=",
"sum",
"(",
"[",
"btn",
".",
"width",
"(",
")",
"for",
"btn",
"in",
"self",
".",
"buttons",
"(",
")",
"]",
")",
"self",
".",
"adjustTextMargins",
"(",
")"
] | Adjusts the placement of the buttons for this line edit. | [
"Adjusts",
"the",
"placement",
"of",
"the",
"buttons",
"for",
"this",
"line",
"edit",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xlineedit.py#L152-L182 | train |
bitesofcode/projexui | projexui/widgets/xlineedit.py | XLineEdit.adjustTextMargins | def adjustTextMargins( self ):
"""
Adjusts the margins for the text based on the contents to be displayed.
"""
left_buttons = self._buttons.get(Qt.AlignLeft, [])
if left_buttons:
bwidth = left_buttons[-1].pos().x() + left_buttons[-1].width() - 4
else:
bwidth = 0 + (max(8, self.cornerRadius()) - 8)
ico = self.icon()
if ico and not ico.isNull():
bwidth += self.iconSize().width()
self.setTextMargins(bwidth, 0, 0, 0) | python | def adjustTextMargins( self ):
"""
Adjusts the margins for the text based on the contents to be displayed.
"""
left_buttons = self._buttons.get(Qt.AlignLeft, [])
if left_buttons:
bwidth = left_buttons[-1].pos().x() + left_buttons[-1].width() - 4
else:
bwidth = 0 + (max(8, self.cornerRadius()) - 8)
ico = self.icon()
if ico and not ico.isNull():
bwidth += self.iconSize().width()
self.setTextMargins(bwidth, 0, 0, 0) | [
"def",
"adjustTextMargins",
"(",
"self",
")",
":",
"left_buttons",
"=",
"self",
".",
"_buttons",
".",
"get",
"(",
"Qt",
".",
"AlignLeft",
",",
"[",
"]",
")",
"if",
"left_buttons",
":",
"bwidth",
"=",
"left_buttons",
"[",
"-",
"1",
"]",
".",
"pos",
"(",
")",
".",
"x",
"(",
")",
"+",
"left_buttons",
"[",
"-",
"1",
"]",
".",
"width",
"(",
")",
"-",
"4",
"else",
":",
"bwidth",
"=",
"0",
"+",
"(",
"max",
"(",
"8",
",",
"self",
".",
"cornerRadius",
"(",
")",
")",
"-",
"8",
")",
"ico",
"=",
"self",
".",
"icon",
"(",
")",
"if",
"ico",
"and",
"not",
"ico",
".",
"isNull",
"(",
")",
":",
"bwidth",
"+=",
"self",
".",
"iconSize",
"(",
")",
".",
"width",
"(",
")",
"self",
".",
"setTextMargins",
"(",
"bwidth",
",",
"0",
",",
"0",
",",
"0",
")"
] | Adjusts the margins for the text based on the contents to be displayed. | [
"Adjusts",
"the",
"margins",
"for",
"the",
"text",
"based",
"on",
"the",
"contents",
"to",
"be",
"displayed",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xlineedit.py#L184-L199 | train |
bitesofcode/projexui | projexui/widgets/xlineedit.py | XLineEdit.clear | def clear(self):
"""
Clears the text from the edit.
"""
super(XLineEdit, self).clear()
self.textEntered.emit('')
self.textChanged.emit('')
self.textEdited.emit('') | python | def clear(self):
"""
Clears the text from the edit.
"""
super(XLineEdit, self).clear()
self.textEntered.emit('')
self.textChanged.emit('')
self.textEdited.emit('') | [
"def",
"clear",
"(",
"self",
")",
":",
"super",
"(",
"XLineEdit",
",",
"self",
")",
".",
"clear",
"(",
")",
"self",
".",
"textEntered",
".",
"emit",
"(",
"''",
")",
"self",
".",
"textChanged",
".",
"emit",
"(",
"''",
")",
"self",
".",
"textEdited",
".",
"emit",
"(",
"''",
")"
] | Clears the text from the edit. | [
"Clears",
"the",
"text",
"from",
"the",
"edit",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xlineedit.py#L237-L245 | train |
Gbps/fastlog | fastlog/termcap.py | get | def get(cap, *args, **kwargs):
"""
Get a terminal capability exposes through the `curses` module.
"""
# Hack for readthedocs.org
if 'READTHEDOCS' in os.environ:
return ''
if kwargs != {}:
raise TypeError("get(): No such argument %r" % kwargs.popitem()[0])
if _cache == {}:
# Fix for BPython
try:
curses.setupterm()
except:
pass
s = _cache.get(cap)
if not s:
s = curses.tigetstr(cap)
if s == None:
s = curses.tigetnum(cap)
if s == -2:
s = curses.tigetflag(cap)
if s == -1:
# default to empty string so tparm doesn't fail
s = ''
else:
s = bool(s)
_cache[cap] = s
# if 's' is not set 'curses.tparm' will throw an error if given arguments
if args and s:
r = curses.tparm(s, *args)
return r.decode('utf-8')
else:
if isinstance(s, bytes):
return s.decode('utf-8')
else:
return s | python | def get(cap, *args, **kwargs):
"""
Get a terminal capability exposes through the `curses` module.
"""
# Hack for readthedocs.org
if 'READTHEDOCS' in os.environ:
return ''
if kwargs != {}:
raise TypeError("get(): No such argument %r" % kwargs.popitem()[0])
if _cache == {}:
# Fix for BPython
try:
curses.setupterm()
except:
pass
s = _cache.get(cap)
if not s:
s = curses.tigetstr(cap)
if s == None:
s = curses.tigetnum(cap)
if s == -2:
s = curses.tigetflag(cap)
if s == -1:
# default to empty string so tparm doesn't fail
s = ''
else:
s = bool(s)
_cache[cap] = s
# if 's' is not set 'curses.tparm' will throw an error if given arguments
if args and s:
r = curses.tparm(s, *args)
return r.decode('utf-8')
else:
if isinstance(s, bytes):
return s.decode('utf-8')
else:
return s | [
"def",
"get",
"(",
"cap",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Hack for readthedocs.org",
"if",
"'READTHEDOCS'",
"in",
"os",
".",
"environ",
":",
"return",
"''",
"if",
"kwargs",
"!=",
"{",
"}",
":",
"raise",
"TypeError",
"(",
"\"get(): No such argument %r\"",
"%",
"kwargs",
".",
"popitem",
"(",
")",
"[",
"0",
"]",
")",
"if",
"_cache",
"==",
"{",
"}",
":",
"# Fix for BPython",
"try",
":",
"curses",
".",
"setupterm",
"(",
")",
"except",
":",
"pass",
"s",
"=",
"_cache",
".",
"get",
"(",
"cap",
")",
"if",
"not",
"s",
":",
"s",
"=",
"curses",
".",
"tigetstr",
"(",
"cap",
")",
"if",
"s",
"==",
"None",
":",
"s",
"=",
"curses",
".",
"tigetnum",
"(",
"cap",
")",
"if",
"s",
"==",
"-",
"2",
":",
"s",
"=",
"curses",
".",
"tigetflag",
"(",
"cap",
")",
"if",
"s",
"==",
"-",
"1",
":",
"# default to empty string so tparm doesn't fail",
"s",
"=",
"''",
"else",
":",
"s",
"=",
"bool",
"(",
"s",
")",
"_cache",
"[",
"cap",
"]",
"=",
"s",
"# if 's' is not set 'curses.tparm' will throw an error if given arguments",
"if",
"args",
"and",
"s",
":",
"r",
"=",
"curses",
".",
"tparm",
"(",
"s",
",",
"*",
"args",
")",
"return",
"r",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"if",
"isinstance",
"(",
"s",
",",
"bytes",
")",
":",
"return",
"s",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"return",
"s"
] | Get a terminal capability exposes through the `curses` module. | [
"Get",
"a",
"terminal",
"capability",
"exposes",
"through",
"the",
"curses",
"module",
"."
] | 8edb2327d72191510302c4654ffaa1691fe31277 | https://github.com/Gbps/fastlog/blob/8edb2327d72191510302c4654ffaa1691fe31277/fastlog/termcap.py#L9-L49 | train |
andylockran/heatmiserV3 | heatmiserV3/connection.py | HeatmiserUH1.registerThermostat | def registerThermostat(self, thermostat):
"""Registers a thermostat with the UH1"""
try:
type(thermostat) == heatmiser.HeatmiserThermostat
if thermostat.address in self.thermostats.keys():
raise ValueError("Key already present")
else:
self.thermostats[thermostat.address] = thermostat
except ValueError:
pass
except Exception as e:
logging.info("You're not adding a HeatmiiserThermostat Object")
logging.info(e.message)
return self._serport | python | def registerThermostat(self, thermostat):
"""Registers a thermostat with the UH1"""
try:
type(thermostat) == heatmiser.HeatmiserThermostat
if thermostat.address in self.thermostats.keys():
raise ValueError("Key already present")
else:
self.thermostats[thermostat.address] = thermostat
except ValueError:
pass
except Exception as e:
logging.info("You're not adding a HeatmiiserThermostat Object")
logging.info(e.message)
return self._serport | [
"def",
"registerThermostat",
"(",
"self",
",",
"thermostat",
")",
":",
"try",
":",
"type",
"(",
"thermostat",
")",
"==",
"heatmiser",
".",
"HeatmiserThermostat",
"if",
"thermostat",
".",
"address",
"in",
"self",
".",
"thermostats",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"Key already present\"",
")",
"else",
":",
"self",
".",
"thermostats",
"[",
"thermostat",
".",
"address",
"]",
"=",
"thermostat",
"except",
"ValueError",
":",
"pass",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"info",
"(",
"\"You're not adding a HeatmiiserThermostat Object\"",
")",
"logging",
".",
"info",
"(",
"e",
".",
"message",
")",
"return",
"self",
".",
"_serport"
] | Registers a thermostat with the UH1 | [
"Registers",
"a",
"thermostat",
"with",
"the",
"UH1"
] | bd8638f5fd1f85d16c908020252f58a0cc4f6ac0 | https://github.com/andylockran/heatmiserV3/blob/bd8638f5fd1f85d16c908020252f58a0cc4f6ac0/heatmiserV3/connection.py#L49-L62 | train |
bitesofcode/projexui | projexui/menus/xrecentfilesmenu.py | XRecentFilesMenu.refresh | def refresh( self ):
"""
Clears out the actions for this menu and then loads the files.
"""
self.clear()
for i, filename in enumerate(self.filenames()):
name = '%i. %s' % (i+1, os.path.basename(filename))
action = self.addAction(name)
action.setData(wrapVariant(filename)) | python | def refresh( self ):
"""
Clears out the actions for this menu and then loads the files.
"""
self.clear()
for i, filename in enumerate(self.filenames()):
name = '%i. %s' % (i+1, os.path.basename(filename))
action = self.addAction(name)
action.setData(wrapVariant(filename)) | [
"def",
"refresh",
"(",
"self",
")",
":",
"self",
".",
"clear",
"(",
")",
"for",
"i",
",",
"filename",
"in",
"enumerate",
"(",
"self",
".",
"filenames",
"(",
")",
")",
":",
"name",
"=",
"'%i. %s'",
"%",
"(",
"i",
"+",
"1",
",",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
")",
"action",
"=",
"self",
".",
"addAction",
"(",
"name",
")",
"action",
".",
"setData",
"(",
"wrapVariant",
"(",
"filename",
")",
")"
] | Clears out the actions for this menu and then loads the files. | [
"Clears",
"out",
"the",
"actions",
"for",
"this",
"menu",
"and",
"then",
"loads",
"the",
"files",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/menus/xrecentfilesmenu.py#L84-L93 | train |
whiteclover/dbpy | db/query/insert.py | InsertQuery.values | def values(self, values):
"""The values for insert ,
it can be a dict row or list tuple row.
"""
if isinstance(values, dict):
l = []
for column in self._columns:
l.append(values[column])
self._values.append(tuple(l))
else:
self._values.append(values)
return self | python | def values(self, values):
"""The values for insert ,
it can be a dict row or list tuple row.
"""
if isinstance(values, dict):
l = []
for column in self._columns:
l.append(values[column])
self._values.append(tuple(l))
else:
self._values.append(values)
return self | [
"def",
"values",
"(",
"self",
",",
"values",
")",
":",
"if",
"isinstance",
"(",
"values",
",",
"dict",
")",
":",
"l",
"=",
"[",
"]",
"for",
"column",
"in",
"self",
".",
"_columns",
":",
"l",
".",
"append",
"(",
"values",
"[",
"column",
"]",
")",
"self",
".",
"_values",
".",
"append",
"(",
"tuple",
"(",
"l",
")",
")",
"else",
":",
"self",
".",
"_values",
".",
"append",
"(",
"values",
")",
"return",
"self"
] | The values for insert ,
it can be a dict row or list tuple row. | [
"The",
"values",
"for",
"insert",
"it",
"can",
"be",
"a",
"dict",
"row",
"or",
"list",
"tuple",
"row",
"."
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/insert.py#L36-L47 | train |
bitesofcode/projexui | projexui/widgets/xratingslider.py | XRatingSlider.adjustMinimumWidth | def adjustMinimumWidth( self ):
"""
Modifies the minimum width to factor in the size of the pixmaps and the
number for the maximum.
"""
pw = self.pixmapSize().width()
# allow 1 pixel space between the icons
self.setMinimumWidth(pw * self.maximum() + 3 * self.maximum()) | python | def adjustMinimumWidth( self ):
"""
Modifies the minimum width to factor in the size of the pixmaps and the
number for the maximum.
"""
pw = self.pixmapSize().width()
# allow 1 pixel space between the icons
self.setMinimumWidth(pw * self.maximum() + 3 * self.maximum()) | [
"def",
"adjustMinimumWidth",
"(",
"self",
")",
":",
"pw",
"=",
"self",
".",
"pixmapSize",
"(",
")",
".",
"width",
"(",
")",
"# allow 1 pixel space between the icons\r",
"self",
".",
"setMinimumWidth",
"(",
"pw",
"*",
"self",
".",
"maximum",
"(",
")",
"+",
"3",
"*",
"self",
".",
"maximum",
"(",
")",
")"
] | Modifies the minimum width to factor in the size of the pixmaps and the
number for the maximum. | [
"Modifies",
"the",
"minimum",
"width",
"to",
"factor",
"in",
"the",
"size",
"of",
"the",
"pixmaps",
"and",
"the",
"number",
"for",
"the",
"maximum",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xratingslider.py#L44-L52 | train |
talkincode/txradius | txradius/openvpn/client_disconnect.py | cli | def cli(conf):
""" OpenVPN client_disconnect method
"""
config = init_config(conf)
nas_id = config.get('DEFAULT', 'nas_id')
secret = config.get('DEFAULT', 'radius_secret')
nas_addr = config.get('DEFAULT', 'nas_addr')
radius_addr = config.get('DEFAULT', 'radius_addr')
radius_acct_port = config.getint('DEFAULT', 'radius_acct_port')
radius_timeout = config.getint('DEFAULT', 'radius_timeout')
status_dbfile = config.get('DEFAULT', 'statusdb')
username = os.environ.get('username')
userip = os.environ.get('ifconfig_pool_remote_ip')
realip = os.environ.get('trusted_ip')
realport = os.environ.get('trusted_port')
session_id = md5(nas_addr + realip + realport).hexdigest()
req = {'User-Name':username}
req['Acct-Status-Type'] = ACCT_STOP
req['Acct-Session-Id'] = session_id
req["Acct-Output-Octets"] = 0
req["Acct-Input-Octets"] = 0
req['Acct-Session-Time'] = 0
req["NAS-IP-Address"] = nas_addr
req["NAS-Port-Id"] = '0/0/0:0.0'
req["NAS-Port"] = 0
req["Service-Type"] = "Login-User"
req["NAS-Identifier"] = nas_id
req["Called-Station-Id"] = '00:00:00:00:00:00'
req["Calling-Station-Id"] = '00:00:00:00:00:00'
req["Framed-IP-Address"] = userip
def shutdown(exitcode=0):
reactor.addSystemEventTrigger('after', 'shutdown', os._exit,exitcode)
reactor.stop()
def onresp(r):
try:
statusdb.del_client(status_dbfile,session_id)
log.msg('delete online<%s> client from db'%session_id)
except Exception as e:
log.err('del client online error')
log.err(e)
shutdown(0)
def onerr(e):
log.err(e)
shutdown(1)
d = client.send_acct(str(secret), get_dictionary(), radius_addr,
acctport=radius_acct_port, debug=True,**req)
d.addCallbacks(onresp,onerr)
reactor.callLater(radius_timeout,shutdown,1)
reactor.run() | python | def cli(conf):
""" OpenVPN client_disconnect method
"""
config = init_config(conf)
nas_id = config.get('DEFAULT', 'nas_id')
secret = config.get('DEFAULT', 'radius_secret')
nas_addr = config.get('DEFAULT', 'nas_addr')
radius_addr = config.get('DEFAULT', 'radius_addr')
radius_acct_port = config.getint('DEFAULT', 'radius_acct_port')
radius_timeout = config.getint('DEFAULT', 'radius_timeout')
status_dbfile = config.get('DEFAULT', 'statusdb')
username = os.environ.get('username')
userip = os.environ.get('ifconfig_pool_remote_ip')
realip = os.environ.get('trusted_ip')
realport = os.environ.get('trusted_port')
session_id = md5(nas_addr + realip + realport).hexdigest()
req = {'User-Name':username}
req['Acct-Status-Type'] = ACCT_STOP
req['Acct-Session-Id'] = session_id
req["Acct-Output-Octets"] = 0
req["Acct-Input-Octets"] = 0
req['Acct-Session-Time'] = 0
req["NAS-IP-Address"] = nas_addr
req["NAS-Port-Id"] = '0/0/0:0.0'
req["NAS-Port"] = 0
req["Service-Type"] = "Login-User"
req["NAS-Identifier"] = nas_id
req["Called-Station-Id"] = '00:00:00:00:00:00'
req["Calling-Station-Id"] = '00:00:00:00:00:00'
req["Framed-IP-Address"] = userip
def shutdown(exitcode=0):
reactor.addSystemEventTrigger('after', 'shutdown', os._exit,exitcode)
reactor.stop()
def onresp(r):
try:
statusdb.del_client(status_dbfile,session_id)
log.msg('delete online<%s> client from db'%session_id)
except Exception as e:
log.err('del client online error')
log.err(e)
shutdown(0)
def onerr(e):
log.err(e)
shutdown(1)
d = client.send_acct(str(secret), get_dictionary(), radius_addr,
acctport=radius_acct_port, debug=True,**req)
d.addCallbacks(onresp,onerr)
reactor.callLater(radius_timeout,shutdown,1)
reactor.run() | [
"def",
"cli",
"(",
"conf",
")",
":",
"config",
"=",
"init_config",
"(",
"conf",
")",
"nas_id",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_id'",
")",
"secret",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'radius_secret'",
")",
"nas_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_addr'",
")",
"radius_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'radius_addr'",
")",
"radius_acct_port",
"=",
"config",
".",
"getint",
"(",
"'DEFAULT'",
",",
"'radius_acct_port'",
")",
"radius_timeout",
"=",
"config",
".",
"getint",
"(",
"'DEFAULT'",
",",
"'radius_timeout'",
")",
"status_dbfile",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'statusdb'",
")",
"username",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'username'",
")",
"userip",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'ifconfig_pool_remote_ip'",
")",
"realip",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'trusted_ip'",
")",
"realport",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'trusted_port'",
")",
"session_id",
"=",
"md5",
"(",
"nas_addr",
"+",
"realip",
"+",
"realport",
")",
".",
"hexdigest",
"(",
")",
"req",
"=",
"{",
"'User-Name'",
":",
"username",
"}",
"req",
"[",
"'Acct-Status-Type'",
"]",
"=",
"ACCT_STOP",
"req",
"[",
"'Acct-Session-Id'",
"]",
"=",
"session_id",
"req",
"[",
"\"Acct-Output-Octets\"",
"]",
"=",
"0",
"req",
"[",
"\"Acct-Input-Octets\"",
"]",
"=",
"0",
"req",
"[",
"'Acct-Session-Time'",
"]",
"=",
"0",
"req",
"[",
"\"NAS-IP-Address\"",
"]",
"=",
"nas_addr",
"req",
"[",
"\"NAS-Port-Id\"",
"]",
"=",
"'0/0/0:0.0'",
"req",
"[",
"\"NAS-Port\"",
"]",
"=",
"0",
"req",
"[",
"\"Service-Type\"",
"]",
"=",
"\"Login-User\"",
"req",
"[",
"\"NAS-Identifier\"",
"]",
"=",
"nas_id",
"req",
"[",
"\"Called-Station-Id\"",
"]",
"=",
"'00:00:00:00:00:00'",
"req",
"[",
"\"Calling-Station-Id\"",
"]",
"=",
"'00:00:00:00:00:00'",
"req",
"[",
"\"Framed-IP-Address\"",
"]",
"=",
"userip",
"def",
"shutdown",
"(",
"exitcode",
"=",
"0",
")",
":",
"reactor",
".",
"addSystemEventTrigger",
"(",
"'after'",
",",
"'shutdown'",
",",
"os",
".",
"_exit",
",",
"exitcode",
")",
"reactor",
".",
"stop",
"(",
")",
"def",
"onresp",
"(",
"r",
")",
":",
"try",
":",
"statusdb",
".",
"del_client",
"(",
"status_dbfile",
",",
"session_id",
")",
"log",
".",
"msg",
"(",
"'delete online<%s> client from db'",
"%",
"session_id",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"err",
"(",
"'del client online error'",
")",
"log",
".",
"err",
"(",
"e",
")",
"shutdown",
"(",
"0",
")",
"def",
"onerr",
"(",
"e",
")",
":",
"log",
".",
"err",
"(",
"e",
")",
"shutdown",
"(",
"1",
")",
"d",
"=",
"client",
".",
"send_acct",
"(",
"str",
"(",
"secret",
")",
",",
"get_dictionary",
"(",
")",
",",
"radius_addr",
",",
"acctport",
"=",
"radius_acct_port",
",",
"debug",
"=",
"True",
",",
"*",
"*",
"req",
")",
"d",
".",
"addCallbacks",
"(",
"onresp",
",",
"onerr",
")",
"reactor",
".",
"callLater",
"(",
"radius_timeout",
",",
"shutdown",
",",
"1",
")",
"reactor",
".",
"run",
"(",
")"
] | OpenVPN client_disconnect method | [
"OpenVPN",
"client_disconnect",
"method"
] | b86fdbc9be41183680b82b07d3a8e8ea10926e01 | https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/openvpn/client_disconnect.py#L19-L73 | train |
bitesofcode/projexui | projexui/widgets/xviewwidget/xviewwidget.py | XViewWidget.viewAt | def viewAt(self, point):
"""
Looks up the view at the inputed point.
:param point | <QtCore.QPoint>
:return <projexui.widgets.xviewwidget.XView> || None
"""
widget = self.childAt(point)
if widget:
return projexui.ancestor(widget, XView)
else:
return None | python | def viewAt(self, point):
"""
Looks up the view at the inputed point.
:param point | <QtCore.QPoint>
:return <projexui.widgets.xviewwidget.XView> || None
"""
widget = self.childAt(point)
if widget:
return projexui.ancestor(widget, XView)
else:
return None | [
"def",
"viewAt",
"(",
"self",
",",
"point",
")",
":",
"widget",
"=",
"self",
".",
"childAt",
"(",
"point",
")",
"if",
"widget",
":",
"return",
"projexui",
".",
"ancestor",
"(",
"widget",
",",
"XView",
")",
"else",
":",
"return",
"None"
] | Looks up the view at the inputed point.
:param point | <QtCore.QPoint>
:return <projexui.widgets.xviewwidget.XView> || None | [
"Looks",
"up",
"the",
"view",
"at",
"the",
"inputed",
"point",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xviewwidget/xviewwidget.py#L578-L590 | train |
mojaie/chorus | chorus/draw/drawer2d.py | draw | def draw(canvas, mol):
"""Draw molecule structure image.
Args:
canvas: draw.drawable.Drawable
mol: model.graphmol.Compound
"""
mol.require("ScaleAndCenter")
mlb = mol.size2d[2]
if not mol.atom_count():
return
bond_type_fn = {
1: {
0: single_bond,
1: wedged_single,
2: dashed_wedged_single,
3: wave_single,
}, 2: {
0: cw_double,
1: counter_cw_double,
2: double_bond,
3: cross_double
}, 3: {
0: triple_bond
}
}
# Draw bonds
for u, v, bond in mol.bonds_iter():
if not bond.visible:
continue
if (u < v) == bond.is_lower_first:
f, s = (u, v)
else:
s, f = (u, v)
p1 = mol.atom(f).coords
p2 = mol.atom(s).coords
if p1 == p2:
continue # avoid zero division
if mol.atom(f).visible:
p1 = gm.t_seg(p1, p2, F_AOVL, 2)[0]
if mol.atom(s).visible:
p2 = gm.t_seg(p1, p2, F_AOVL, 1)[1]
color1 = mol.atom(f).color
color2 = mol.atom(s).color
bond_type_fn[bond.order][bond.type](
canvas, p1, p2, color1, color2, mlb)
# Draw atoms
for n, atom in mol.atoms_iter():
if not atom.visible:
continue
p = atom.coords
color = atom.color
# Determine text direction
if atom.H_count:
cosnbrs = []
hrzn = (p[0] + 1, p[1])
for nbr in mol.graph.neighbors(n):
pnbr = mol.atom(nbr).coords
try:
cosnbrs.append(gm.dot_product(hrzn, pnbr, p) /
gm.distance(p, pnbr))
except ZeroDivisionError:
pass
if not cosnbrs or min(cosnbrs) > 0:
# [atom]< or isolated node(ex. H2O, HCl)
text = atom.formula_html(True)
canvas.draw_text(p, text, color, "right")
continue
elif max(cosnbrs) < 0:
# >[atom]
text = atom.formula_html()
canvas.draw_text(p, text, color, "left")
continue
# -[atom]- or no hydrogens
text = atom.formula_html()
canvas.draw_text(p, text, color, "center") | python | def draw(canvas, mol):
"""Draw molecule structure image.
Args:
canvas: draw.drawable.Drawable
mol: model.graphmol.Compound
"""
mol.require("ScaleAndCenter")
mlb = mol.size2d[2]
if not mol.atom_count():
return
bond_type_fn = {
1: {
0: single_bond,
1: wedged_single,
2: dashed_wedged_single,
3: wave_single,
}, 2: {
0: cw_double,
1: counter_cw_double,
2: double_bond,
3: cross_double
}, 3: {
0: triple_bond
}
}
# Draw bonds
for u, v, bond in mol.bonds_iter():
if not bond.visible:
continue
if (u < v) == bond.is_lower_first:
f, s = (u, v)
else:
s, f = (u, v)
p1 = mol.atom(f).coords
p2 = mol.atom(s).coords
if p1 == p2:
continue # avoid zero division
if mol.atom(f).visible:
p1 = gm.t_seg(p1, p2, F_AOVL, 2)[0]
if mol.atom(s).visible:
p2 = gm.t_seg(p1, p2, F_AOVL, 1)[1]
color1 = mol.atom(f).color
color2 = mol.atom(s).color
bond_type_fn[bond.order][bond.type](
canvas, p1, p2, color1, color2, mlb)
# Draw atoms
for n, atom in mol.atoms_iter():
if not atom.visible:
continue
p = atom.coords
color = atom.color
# Determine text direction
if atom.H_count:
cosnbrs = []
hrzn = (p[0] + 1, p[1])
for nbr in mol.graph.neighbors(n):
pnbr = mol.atom(nbr).coords
try:
cosnbrs.append(gm.dot_product(hrzn, pnbr, p) /
gm.distance(p, pnbr))
except ZeroDivisionError:
pass
if not cosnbrs or min(cosnbrs) > 0:
# [atom]< or isolated node(ex. H2O, HCl)
text = atom.formula_html(True)
canvas.draw_text(p, text, color, "right")
continue
elif max(cosnbrs) < 0:
# >[atom]
text = atom.formula_html()
canvas.draw_text(p, text, color, "left")
continue
# -[atom]- or no hydrogens
text = atom.formula_html()
canvas.draw_text(p, text, color, "center") | [
"def",
"draw",
"(",
"canvas",
",",
"mol",
")",
":",
"mol",
".",
"require",
"(",
"\"ScaleAndCenter\"",
")",
"mlb",
"=",
"mol",
".",
"size2d",
"[",
"2",
"]",
"if",
"not",
"mol",
".",
"atom_count",
"(",
")",
":",
"return",
"bond_type_fn",
"=",
"{",
"1",
":",
"{",
"0",
":",
"single_bond",
",",
"1",
":",
"wedged_single",
",",
"2",
":",
"dashed_wedged_single",
",",
"3",
":",
"wave_single",
",",
"}",
",",
"2",
":",
"{",
"0",
":",
"cw_double",
",",
"1",
":",
"counter_cw_double",
",",
"2",
":",
"double_bond",
",",
"3",
":",
"cross_double",
"}",
",",
"3",
":",
"{",
"0",
":",
"triple_bond",
"}",
"}",
"# Draw bonds",
"for",
"u",
",",
"v",
",",
"bond",
"in",
"mol",
".",
"bonds_iter",
"(",
")",
":",
"if",
"not",
"bond",
".",
"visible",
":",
"continue",
"if",
"(",
"u",
"<",
"v",
")",
"==",
"bond",
".",
"is_lower_first",
":",
"f",
",",
"s",
"=",
"(",
"u",
",",
"v",
")",
"else",
":",
"s",
",",
"f",
"=",
"(",
"u",
",",
"v",
")",
"p1",
"=",
"mol",
".",
"atom",
"(",
"f",
")",
".",
"coords",
"p2",
"=",
"mol",
".",
"atom",
"(",
"s",
")",
".",
"coords",
"if",
"p1",
"==",
"p2",
":",
"continue",
"# avoid zero division",
"if",
"mol",
".",
"atom",
"(",
"f",
")",
".",
"visible",
":",
"p1",
"=",
"gm",
".",
"t_seg",
"(",
"p1",
",",
"p2",
",",
"F_AOVL",
",",
"2",
")",
"[",
"0",
"]",
"if",
"mol",
".",
"atom",
"(",
"s",
")",
".",
"visible",
":",
"p2",
"=",
"gm",
".",
"t_seg",
"(",
"p1",
",",
"p2",
",",
"F_AOVL",
",",
"1",
")",
"[",
"1",
"]",
"color1",
"=",
"mol",
".",
"atom",
"(",
"f",
")",
".",
"color",
"color2",
"=",
"mol",
".",
"atom",
"(",
"s",
")",
".",
"color",
"bond_type_fn",
"[",
"bond",
".",
"order",
"]",
"[",
"bond",
".",
"type",
"]",
"(",
"canvas",
",",
"p1",
",",
"p2",
",",
"color1",
",",
"color2",
",",
"mlb",
")",
"# Draw atoms",
"for",
"n",
",",
"atom",
"in",
"mol",
".",
"atoms_iter",
"(",
")",
":",
"if",
"not",
"atom",
".",
"visible",
":",
"continue",
"p",
"=",
"atom",
".",
"coords",
"color",
"=",
"atom",
".",
"color",
"# Determine text direction",
"if",
"atom",
".",
"H_count",
":",
"cosnbrs",
"=",
"[",
"]",
"hrzn",
"=",
"(",
"p",
"[",
"0",
"]",
"+",
"1",
",",
"p",
"[",
"1",
"]",
")",
"for",
"nbr",
"in",
"mol",
".",
"graph",
".",
"neighbors",
"(",
"n",
")",
":",
"pnbr",
"=",
"mol",
".",
"atom",
"(",
"nbr",
")",
".",
"coords",
"try",
":",
"cosnbrs",
".",
"append",
"(",
"gm",
".",
"dot_product",
"(",
"hrzn",
",",
"pnbr",
",",
"p",
")",
"/",
"gm",
".",
"distance",
"(",
"p",
",",
"pnbr",
")",
")",
"except",
"ZeroDivisionError",
":",
"pass",
"if",
"not",
"cosnbrs",
"or",
"min",
"(",
"cosnbrs",
")",
">",
"0",
":",
"# [atom]< or isolated node(ex. H2O, HCl)",
"text",
"=",
"atom",
".",
"formula_html",
"(",
"True",
")",
"canvas",
".",
"draw_text",
"(",
"p",
",",
"text",
",",
"color",
",",
"\"right\"",
")",
"continue",
"elif",
"max",
"(",
"cosnbrs",
")",
"<",
"0",
":",
"# >[atom]",
"text",
"=",
"atom",
".",
"formula_html",
"(",
")",
"canvas",
".",
"draw_text",
"(",
"p",
",",
"text",
",",
"color",
",",
"\"left\"",
")",
"continue",
"# -[atom]- or no hydrogens",
"text",
"=",
"atom",
".",
"formula_html",
"(",
")",
"canvas",
".",
"draw_text",
"(",
"p",
",",
"text",
",",
"color",
",",
"\"center\"",
")"
] | Draw molecule structure image.
Args:
canvas: draw.drawable.Drawable
mol: model.graphmol.Compound | [
"Draw",
"molecule",
"structure",
"image",
"."
] | fc7fe23a0272554c67671645ab07830b315eeb1b | https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/draw/drawer2d.py#L17-L93 | train |
mojaie/chorus | chorus/smilessupplier.py | smiles_to_compound | def smiles_to_compound(smiles, assign_descriptors=True):
"""Convert SMILES text to compound object
Raises:
ValueError: SMILES with unsupported format
"""
it = iter(smiles)
mol = molecule()
try:
for token in it:
mol(token)
result, _ = mol(None)
except KeyError as err:
raise ValueError("Unsupported Symbol: {}".format(err))
result.graph.remove_node(0)
logger.debug(result)
if assign_descriptors:
molutil.assign_descriptors(result)
return result | python | def smiles_to_compound(smiles, assign_descriptors=True):
"""Convert SMILES text to compound object
Raises:
ValueError: SMILES with unsupported format
"""
it = iter(smiles)
mol = molecule()
try:
for token in it:
mol(token)
result, _ = mol(None)
except KeyError as err:
raise ValueError("Unsupported Symbol: {}".format(err))
result.graph.remove_node(0)
logger.debug(result)
if assign_descriptors:
molutil.assign_descriptors(result)
return result | [
"def",
"smiles_to_compound",
"(",
"smiles",
",",
"assign_descriptors",
"=",
"True",
")",
":",
"it",
"=",
"iter",
"(",
"smiles",
")",
"mol",
"=",
"molecule",
"(",
")",
"try",
":",
"for",
"token",
"in",
"it",
":",
"mol",
"(",
"token",
")",
"result",
",",
"_",
"=",
"mol",
"(",
"None",
")",
"except",
"KeyError",
"as",
"err",
":",
"raise",
"ValueError",
"(",
"\"Unsupported Symbol: {}\"",
".",
"format",
"(",
"err",
")",
")",
"result",
".",
"graph",
".",
"remove_node",
"(",
"0",
")",
"logger",
".",
"debug",
"(",
"result",
")",
"if",
"assign_descriptors",
":",
"molutil",
".",
"assign_descriptors",
"(",
"result",
")",
"return",
"result"
] | Convert SMILES text to compound object
Raises:
ValueError: SMILES with unsupported format | [
"Convert",
"SMILES",
"text",
"to",
"compound",
"object"
] | fc7fe23a0272554c67671645ab07830b315eeb1b | https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/smilessupplier.py#L316-L334 | train |
moble/spinsfast | python/__init__.py | salm2map | def salm2map(salm, s, lmax, Ntheta, Nphi):
"""Convert mode weights of spin-weighted function to values on a grid
Parameters
----------
salm : array_like, complex, shape (..., (lmax+1)**2)
Input array representing mode weights of the spin-weighted function. This array may be
multi-dimensional, where initial dimensions may represent different times, for example, or
separate functions on the sphere. The final dimension should give the values of the mode
weights, in the order described below in the 'Notes' section.
s : int or array, int, shape (...)
Spin weight of the function. If `salm` is multidimensional and this is an array, its
dimensions must match the first dimensions of `salm`, and the different values are the spin
weights of the different functions represented by those dimensions. Otherwise, if `salm` is
multidimensional and `s` is a single integer, all functions are assumed to have the same
spin weight.
lmax : int
The largest `ell` value present in the input array.
Ntheta : int
Number of points in the output grid along the polar angle.
Nphi : int
Number of points in the output grid along the azimuthal angle.
Returns
-------
map : ndarray, complex, shape (..., Ntheta, Nphi)
Values of the spin-weighted function on grid points of the sphere. This array is shaped
like the input `salm` array, but has one extra dimension. The final two dimensions describe
the values of the function on the sphere.
See also
--------
spinsfast.map2salm : Roughly the inverse of this function.
Notes
-----
The input `salm` data should be given in increasing order of `ell` value, always starting with
(ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc.
Explicitly, the ordering should match this:
[f_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)]
The input is converted to a contiguous complex numpy array if necessary.
The output data are presented on this grid of spherical coordinates:
np.array([[f(theta, phi)
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons.
First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input
to `salm2map`. It is also possible to define a `map` function that violates this assumption --
for example, having a nonzero average value over the sphere, if the function has nonzero spin
`s`, this is impossible. Also, it is possible to define a map of a function with so much
angular dependence that it cannot be captured with the given `lmax` value. For example, a
discontinuous function will never be perfectly resolved.
Example
-------
>>> s = -2
>>> lmax = 8
>>> Ntheta = Nphi = 2*lmax + 1
>>> modes = np.zeros(spinsfast.N_lm(lmax), dtype=np.complex128)
>>> modes[spinsfast.lm_ind(2, 2, 8)] = 1.0
>>> values = spinsfast.salm2map(modes, s, lmax, Ntheta, Nphi)
"""
if Ntheta < 2 or Nphi < 1:
raise ValueError("Input values of Ntheta={0} and Nphi={1} ".format(Ntheta, Nphi)
+ "are not allowed; they must be greater than 1 and 0, respectively.")
if lmax < 1:
raise ValueError("Input value of lmax={0} ".format(lmax)
+ "is not allowed; it must be greater than 0 and should be greater "
+ "than |s|={0}.".format(abs(s)))
import numpy as np
salm = np.ascontiguousarray(salm, dtype=np.complex128)
if salm.shape[-1] < N_lm(lmax):
raise ValueError("The input `salm` array of shape {0} is too small for the stated `lmax` of {1}. ".format(salm.shape, lmax)
+ "Perhaps you forgot to include the (zero) modes with ell<|s|.")
map = np.empty(salm.shape[:-1]+(Ntheta, Nphi), dtype=np.complex128)
if salm.ndim>1:
s = np.ascontiguousarray(s, dtype=np.intc)
if s.ndim != salm.ndim-1 or np.product(s.shape) != np.product(salm.shape[:-1]):
s = s*np.ones(salm.shape[:-1], dtype=np.intc)
_multi_salm2map(salm, map, s, lmax, Ntheta, Nphi)
else:
_salm2map(salm, map, s, lmax, Ntheta, Nphi)
return map | python | def salm2map(salm, s, lmax, Ntheta, Nphi):
"""Convert mode weights of spin-weighted function to values on a grid
Parameters
----------
salm : array_like, complex, shape (..., (lmax+1)**2)
Input array representing mode weights of the spin-weighted function. This array may be
multi-dimensional, where initial dimensions may represent different times, for example, or
separate functions on the sphere. The final dimension should give the values of the mode
weights, in the order described below in the 'Notes' section.
s : int or array, int, shape (...)
Spin weight of the function. If `salm` is multidimensional and this is an array, its
dimensions must match the first dimensions of `salm`, and the different values are the spin
weights of the different functions represented by those dimensions. Otherwise, if `salm` is
multidimensional and `s` is a single integer, all functions are assumed to have the same
spin weight.
lmax : int
The largest `ell` value present in the input array.
Ntheta : int
Number of points in the output grid along the polar angle.
Nphi : int
Number of points in the output grid along the azimuthal angle.
Returns
-------
map : ndarray, complex, shape (..., Ntheta, Nphi)
Values of the spin-weighted function on grid points of the sphere. This array is shaped
like the input `salm` array, but has one extra dimension. The final two dimensions describe
the values of the function on the sphere.
See also
--------
spinsfast.map2salm : Roughly the inverse of this function.
Notes
-----
The input `salm` data should be given in increasing order of `ell` value, always starting with
(ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc.
Explicitly, the ordering should match this:
[f_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)]
The input is converted to a contiguous complex numpy array if necessary.
The output data are presented on this grid of spherical coordinates:
np.array([[f(theta, phi)
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons.
First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input
to `salm2map`. It is also possible to define a `map` function that violates this assumption --
for example, having a nonzero average value over the sphere, if the function has nonzero spin
`s`, this is impossible. Also, it is possible to define a map of a function with so much
angular dependence that it cannot be captured with the given `lmax` value. For example, a
discontinuous function will never be perfectly resolved.
Example
-------
>>> s = -2
>>> lmax = 8
>>> Ntheta = Nphi = 2*lmax + 1
>>> modes = np.zeros(spinsfast.N_lm(lmax), dtype=np.complex128)
>>> modes[spinsfast.lm_ind(2, 2, 8)] = 1.0
>>> values = spinsfast.salm2map(modes, s, lmax, Ntheta, Nphi)
"""
if Ntheta < 2 or Nphi < 1:
raise ValueError("Input values of Ntheta={0} and Nphi={1} ".format(Ntheta, Nphi)
+ "are not allowed; they must be greater than 1 and 0, respectively.")
if lmax < 1:
raise ValueError("Input value of lmax={0} ".format(lmax)
+ "is not allowed; it must be greater than 0 and should be greater "
+ "than |s|={0}.".format(abs(s)))
import numpy as np
salm = np.ascontiguousarray(salm, dtype=np.complex128)
if salm.shape[-1] < N_lm(lmax):
raise ValueError("The input `salm` array of shape {0} is too small for the stated `lmax` of {1}. ".format(salm.shape, lmax)
+ "Perhaps you forgot to include the (zero) modes with ell<|s|.")
map = np.empty(salm.shape[:-1]+(Ntheta, Nphi), dtype=np.complex128)
if salm.ndim>1:
s = np.ascontiguousarray(s, dtype=np.intc)
if s.ndim != salm.ndim-1 or np.product(s.shape) != np.product(salm.shape[:-1]):
s = s*np.ones(salm.shape[:-1], dtype=np.intc)
_multi_salm2map(salm, map, s, lmax, Ntheta, Nphi)
else:
_salm2map(salm, map, s, lmax, Ntheta, Nphi)
return map | [
"def",
"salm2map",
"(",
"salm",
",",
"s",
",",
"lmax",
",",
"Ntheta",
",",
"Nphi",
")",
":",
"if",
"Ntheta",
"<",
"2",
"or",
"Nphi",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"\"Input values of Ntheta={0} and Nphi={1} \"",
".",
"format",
"(",
"Ntheta",
",",
"Nphi",
")",
"+",
"\"are not allowed; they must be greater than 1 and 0, respectively.\"",
")",
"if",
"lmax",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"\"Input value of lmax={0} \"",
".",
"format",
"(",
"lmax",
")",
"+",
"\"is not allowed; it must be greater than 0 and should be greater \"",
"+",
"\"than |s|={0}.\"",
".",
"format",
"(",
"abs",
"(",
"s",
")",
")",
")",
"import",
"numpy",
"as",
"np",
"salm",
"=",
"np",
".",
"ascontiguousarray",
"(",
"salm",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"if",
"salm",
".",
"shape",
"[",
"-",
"1",
"]",
"<",
"N_lm",
"(",
"lmax",
")",
":",
"raise",
"ValueError",
"(",
"\"The input `salm` array of shape {0} is too small for the stated `lmax` of {1}. \"",
".",
"format",
"(",
"salm",
".",
"shape",
",",
"lmax",
")",
"+",
"\"Perhaps you forgot to include the (zero) modes with ell<|s|.\"",
")",
"map",
"=",
"np",
".",
"empty",
"(",
"salm",
".",
"shape",
"[",
":",
"-",
"1",
"]",
"+",
"(",
"Ntheta",
",",
"Nphi",
")",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"if",
"salm",
".",
"ndim",
">",
"1",
":",
"s",
"=",
"np",
".",
"ascontiguousarray",
"(",
"s",
",",
"dtype",
"=",
"np",
".",
"intc",
")",
"if",
"s",
".",
"ndim",
"!=",
"salm",
".",
"ndim",
"-",
"1",
"or",
"np",
".",
"product",
"(",
"s",
".",
"shape",
")",
"!=",
"np",
".",
"product",
"(",
"salm",
".",
"shape",
"[",
":",
"-",
"1",
"]",
")",
":",
"s",
"=",
"s",
"*",
"np",
".",
"ones",
"(",
"salm",
".",
"shape",
"[",
":",
"-",
"1",
"]",
",",
"dtype",
"=",
"np",
".",
"intc",
")",
"_multi_salm2map",
"(",
"salm",
",",
"map",
",",
"s",
",",
"lmax",
",",
"Ntheta",
",",
"Nphi",
")",
"else",
":",
"_salm2map",
"(",
"salm",
",",
"map",
",",
"s",
",",
"lmax",
",",
"Ntheta",
",",
"Nphi",
")",
"return",
"map"
] | Convert mode weights of spin-weighted function to values on a grid
Parameters
----------
salm : array_like, complex, shape (..., (lmax+1)**2)
Input array representing mode weights of the spin-weighted function. This array may be
multi-dimensional, where initial dimensions may represent different times, for example, or
separate functions on the sphere. The final dimension should give the values of the mode
weights, in the order described below in the 'Notes' section.
s : int or array, int, shape (...)
Spin weight of the function. If `salm` is multidimensional and this is an array, its
dimensions must match the first dimensions of `salm`, and the different values are the spin
weights of the different functions represented by those dimensions. Otherwise, if `salm` is
multidimensional and `s` is a single integer, all functions are assumed to have the same
spin weight.
lmax : int
The largest `ell` value present in the input array.
Ntheta : int
Number of points in the output grid along the polar angle.
Nphi : int
Number of points in the output grid along the azimuthal angle.
Returns
-------
map : ndarray, complex, shape (..., Ntheta, Nphi)
Values of the spin-weighted function on grid points of the sphere. This array is shaped
like the input `salm` array, but has one extra dimension. The final two dimensions describe
the values of the function on the sphere.
See also
--------
spinsfast.map2salm : Roughly the inverse of this function.
Notes
-----
The input `salm` data should be given in increasing order of `ell` value, always starting with
(ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc.
Explicitly, the ordering should match this:
[f_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)]
The input is converted to a contiguous complex numpy array if necessary.
The output data are presented on this grid of spherical coordinates:
np.array([[f(theta, phi)
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons.
First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input
to `salm2map`. It is also possible to define a `map` function that violates this assumption --
for example, having a nonzero average value over the sphere, if the function has nonzero spin
`s`, this is impossible. Also, it is possible to define a map of a function with so much
angular dependence that it cannot be captured with the given `lmax` value. For example, a
discontinuous function will never be perfectly resolved.
Example
-------
>>> s = -2
>>> lmax = 8
>>> Ntheta = Nphi = 2*lmax + 1
>>> modes = np.zeros(spinsfast.N_lm(lmax), dtype=np.complex128)
>>> modes[spinsfast.lm_ind(2, 2, 8)] = 1.0
>>> values = spinsfast.salm2map(modes, s, lmax, Ntheta, Nphi) | [
"Convert",
"mode",
"weights",
"of",
"spin",
"-",
"weighted",
"function",
"to",
"values",
"on",
"a",
"grid"
] | 02480a3f712eb88eff5faa1d4afcbdfb0c25b865 | https://github.com/moble/spinsfast/blob/02480a3f712eb88eff5faa1d4afcbdfb0c25b865/python/__init__.py#L40-L133 | train |
moble/spinsfast | python/__init__.py | map2salm | def map2salm(map, s, lmax):
"""Convert values of spin-weighted function on a grid to mode weights
Parameters
----------
map : array_like, complex, shape (..., Ntheta, Nphi)
Values of the spin-weighted function on grid points of the sphere. This array may have more
than two dimensions, where initial dimensions may represent different times, for example, or
separate functions on the sphere. The final two dimensions should give the values of the
function, in the order described below in the 'Notes' section.
s : int or array, int, shape (...)
Spin weight of the function. If `amp` is multidimensional and this is an array, its
dimensions must match the first dimensions of `map`, and the different values are the spin
weights of the different functions represented by those dimensions. Otherwise, if `map` is
multidimensional and `s` is a single integer, all functions are assumed to have the same
spin weight.
lmax : int
The largest `ell` value present in the input array.
Returns
-------
salm : ndarray, complex, shape (..., (lmax+1)**2)
Mode weights of the spin-weighted function. This array is shaped like the input `map` array,
but has one less dimension. The final dimension describes the values of the mode weights on
the corresponding sphere, as described below in the 'Notes' section.
See also
--------
spinsfast.map2salm : Roughly the inverse of this function.
Notes
-----
The input data represent the values on this grid of spherical coordinates:
np.array([[map(theta, phi)
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
The input is converted to a contiguous complex numpy array if necessary.
The output `salm` data are given in increasing order of `ell` value, always starting with
(ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc.
Explicitly, the ordering matches this:
[map_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)]
Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons.
First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input
to `salm2map`. It is possible to define a `map` function that violates this assumption -- for
example, having a nonzero average value over the sphere, if the function has nonzero spin `s`,
this is impossible. Also, it is possible to define a map of a function with so much angular
dependence that it cannot be captured with the given `lmax` value. For example, a discontinuous
function will never be perfectly resolved.
Example
-------
>>> s = -2
>>> lmax = 8
>>> theta_phi = np.array([[[theta, phi]
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
>>> map = np.array([[np.sqrt(3/(8*np.pi)) * np.sin(tp[0]) for tp in _] for _ in theta_phi])
>>> salm = spinsfast.map2salm(map, s, lmax)
"""
import numpy as np
map = np.ascontiguousarray(map, dtype=np.complex128)
salm = np.empty(map.shape[:-2]+(N_lm(lmax),), dtype=np.complex128)
if map.ndim>2:
s = np.ascontiguousarray(s, dtype=np.intc)
if s.ndim != map.ndim-2 or np.product(s.shape) != np.product(map.shape[:-2]):
s = s*np.ones(map.shape[:-2], dtype=np.intc)
_multi_map2salm(map, salm, s, lmax)
else:
_map2salm(map, salm, s, lmax)
return salm | python | def map2salm(map, s, lmax):
"""Convert values of spin-weighted function on a grid to mode weights
Parameters
----------
map : array_like, complex, shape (..., Ntheta, Nphi)
Values of the spin-weighted function on grid points of the sphere. This array may have more
than two dimensions, where initial dimensions may represent different times, for example, or
separate functions on the sphere. The final two dimensions should give the values of the
function, in the order described below in the 'Notes' section.
s : int or array, int, shape (...)
Spin weight of the function. If `amp` is multidimensional and this is an array, its
dimensions must match the first dimensions of `map`, and the different values are the spin
weights of the different functions represented by those dimensions. Otherwise, if `map` is
multidimensional and `s` is a single integer, all functions are assumed to have the same
spin weight.
lmax : int
The largest `ell` value present in the input array.
Returns
-------
salm : ndarray, complex, shape (..., (lmax+1)**2)
Mode weights of the spin-weighted function. This array is shaped like the input `map` array,
but has one less dimension. The final dimension describes the values of the mode weights on
the corresponding sphere, as described below in the 'Notes' section.
See also
--------
spinsfast.map2salm : Roughly the inverse of this function.
Notes
-----
The input data represent the values on this grid of spherical coordinates:
np.array([[map(theta, phi)
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
The input is converted to a contiguous complex numpy array if necessary.
The output `salm` data are given in increasing order of `ell` value, always starting with
(ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc.
Explicitly, the ordering matches this:
[map_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)]
Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons.
First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input
to `salm2map`. It is possible to define a `map` function that violates this assumption -- for
example, having a nonzero average value over the sphere, if the function has nonzero spin `s`,
this is impossible. Also, it is possible to define a map of a function with so much angular
dependence that it cannot be captured with the given `lmax` value. For example, a discontinuous
function will never be perfectly resolved.
Example
-------
>>> s = -2
>>> lmax = 8
>>> theta_phi = np.array([[[theta, phi]
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
>>> map = np.array([[np.sqrt(3/(8*np.pi)) * np.sin(tp[0]) for tp in _] for _ in theta_phi])
>>> salm = spinsfast.map2salm(map, s, lmax)
"""
import numpy as np
map = np.ascontiguousarray(map, dtype=np.complex128)
salm = np.empty(map.shape[:-2]+(N_lm(lmax),), dtype=np.complex128)
if map.ndim>2:
s = np.ascontiguousarray(s, dtype=np.intc)
if s.ndim != map.ndim-2 or np.product(s.shape) != np.product(map.shape[:-2]):
s = s*np.ones(map.shape[:-2], dtype=np.intc)
_multi_map2salm(map, salm, s, lmax)
else:
_map2salm(map, salm, s, lmax)
return salm | [
"def",
"map2salm",
"(",
"map",
",",
"s",
",",
"lmax",
")",
":",
"import",
"numpy",
"as",
"np",
"map",
"=",
"np",
".",
"ascontiguousarray",
"(",
"map",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"salm",
"=",
"np",
".",
"empty",
"(",
"map",
".",
"shape",
"[",
":",
"-",
"2",
"]",
"+",
"(",
"N_lm",
"(",
"lmax",
")",
",",
")",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"if",
"map",
".",
"ndim",
">",
"2",
":",
"s",
"=",
"np",
".",
"ascontiguousarray",
"(",
"s",
",",
"dtype",
"=",
"np",
".",
"intc",
")",
"if",
"s",
".",
"ndim",
"!=",
"map",
".",
"ndim",
"-",
"2",
"or",
"np",
".",
"product",
"(",
"s",
".",
"shape",
")",
"!=",
"np",
".",
"product",
"(",
"map",
".",
"shape",
"[",
":",
"-",
"2",
"]",
")",
":",
"s",
"=",
"s",
"*",
"np",
".",
"ones",
"(",
"map",
".",
"shape",
"[",
":",
"-",
"2",
"]",
",",
"dtype",
"=",
"np",
".",
"intc",
")",
"_multi_map2salm",
"(",
"map",
",",
"salm",
",",
"s",
",",
"lmax",
")",
"else",
":",
"_map2salm",
"(",
"map",
",",
"salm",
",",
"s",
",",
"lmax",
")",
"return",
"salm"
] | Convert values of spin-weighted function on a grid to mode weights
Parameters
----------
map : array_like, complex, shape (..., Ntheta, Nphi)
Values of the spin-weighted function on grid points of the sphere. This array may have more
than two dimensions, where initial dimensions may represent different times, for example, or
separate functions on the sphere. The final two dimensions should give the values of the
function, in the order described below in the 'Notes' section.
s : int or array, int, shape (...)
Spin weight of the function. If `amp` is multidimensional and this is an array, its
dimensions must match the first dimensions of `map`, and the different values are the spin
weights of the different functions represented by those dimensions. Otherwise, if `map` is
multidimensional and `s` is a single integer, all functions are assumed to have the same
spin weight.
lmax : int
The largest `ell` value present in the input array.
Returns
-------
salm : ndarray, complex, shape (..., (lmax+1)**2)
Mode weights of the spin-weighted function. This array is shaped like the input `map` array,
but has one less dimension. The final dimension describes the values of the mode weights on
the corresponding sphere, as described below in the 'Notes' section.
See also
--------
spinsfast.map2salm : Roughly the inverse of this function.
Notes
-----
The input data represent the values on this grid of spherical coordinates:
np.array([[map(theta, phi)
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
The input is converted to a contiguous complex numpy array if necessary.
The output `salm` data are given in increasing order of `ell` value, always starting with
(ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc.
Explicitly, the ordering matches this:
[map_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)]
Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons.
First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input
to `salm2map`. It is possible to define a `map` function that violates this assumption -- for
example, having a nonzero average value over the sphere, if the function has nonzero spin `s`,
this is impossible. Also, it is possible to define a map of a function with so much angular
dependence that it cannot be captured with the given `lmax` value. For example, a discontinuous
function will never be perfectly resolved.
Example
-------
>>> s = -2
>>> lmax = 8
>>> theta_phi = np.array([[[theta, phi]
for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)]
for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)])
>>> map = np.array([[np.sqrt(3/(8*np.pi)) * np.sin(tp[0]) for tp in _] for _ in theta_phi])
>>> salm = spinsfast.map2salm(map, s, lmax) | [
"Convert",
"values",
"of",
"spin",
"-",
"weighted",
"function",
"on",
"a",
"grid",
"to",
"mode",
"weights"
] | 02480a3f712eb88eff5faa1d4afcbdfb0c25b865 | https://github.com/moble/spinsfast/blob/02480a3f712eb88eff5faa1d4afcbdfb0c25b865/python/__init__.py#L136-L216 | train |
moble/spinsfast | python/__init__.py | Imm | def Imm(extended_map, s, lmax):
"""Take the fft of the theta extended map, then zero pad and reorganize it
This is mostly an internal function, included here for backwards compatibility. See map2salm
and salm2map for more useful functions.
"""
import numpy as np
extended_map = np.ascontiguousarray(extended_map, dtype=np.complex128)
NImm = (2*lmax + 1)**2
imm = np.empty(NImm, dtype=np.complex128)
_Imm(extended_map, imm, s, lmax)
return imm | python | def Imm(extended_map, s, lmax):
"""Take the fft of the theta extended map, then zero pad and reorganize it
This is mostly an internal function, included here for backwards compatibility. See map2salm
and salm2map for more useful functions.
"""
import numpy as np
extended_map = np.ascontiguousarray(extended_map, dtype=np.complex128)
NImm = (2*lmax + 1)**2
imm = np.empty(NImm, dtype=np.complex128)
_Imm(extended_map, imm, s, lmax)
return imm | [
"def",
"Imm",
"(",
"extended_map",
",",
"s",
",",
"lmax",
")",
":",
"import",
"numpy",
"as",
"np",
"extended_map",
"=",
"np",
".",
"ascontiguousarray",
"(",
"extended_map",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"NImm",
"=",
"(",
"2",
"*",
"lmax",
"+",
"1",
")",
"**",
"2",
"imm",
"=",
"np",
".",
"empty",
"(",
"NImm",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"_Imm",
"(",
"extended_map",
",",
"imm",
",",
"s",
",",
"lmax",
")",
"return",
"imm"
] | Take the fft of the theta extended map, then zero pad and reorganize it
This is mostly an internal function, included here for backwards compatibility. See map2salm
and salm2map for more useful functions. | [
"Take",
"the",
"fft",
"of",
"the",
"theta",
"extended",
"map",
"then",
"zero",
"pad",
"and",
"reorganize",
"it"
] | 02480a3f712eb88eff5faa1d4afcbdfb0c25b865 | https://github.com/moble/spinsfast/blob/02480a3f712eb88eff5faa1d4afcbdfb0c25b865/python/__init__.py#L252-L264 | train |
ehansis/ozelot | ozelot/cache.py | RequestCache._query | def _query(self, url, xpath):
"""Base query for an url and xpath
Args:
url (str): URL to search
xpath (str): xpath to search (may be ``None``)
"""
return self.session.query(CachedRequest).filter(CachedRequest.url == url).filter(CachedRequest.xpath == xpath) | python | def _query(self, url, xpath):
"""Base query for an url and xpath
Args:
url (str): URL to search
xpath (str): xpath to search (may be ``None``)
"""
return self.session.query(CachedRequest).filter(CachedRequest.url == url).filter(CachedRequest.xpath == xpath) | [
"def",
"_query",
"(",
"self",
",",
"url",
",",
"xpath",
")",
":",
"return",
"self",
".",
"session",
".",
"query",
"(",
"CachedRequest",
")",
".",
"filter",
"(",
"CachedRequest",
".",
"url",
"==",
"url",
")",
".",
"filter",
"(",
"CachedRequest",
".",
"xpath",
"==",
"xpath",
")"
] | Base query for an url and xpath
Args:
url (str): URL to search
xpath (str): xpath to search (may be ``None``) | [
"Base",
"query",
"for",
"an",
"url",
"and",
"xpath"
] | 948675e02eb6fca940450f5cb814f53e97159e5b | https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/cache.py#L106-L113 | train |
ehansis/ozelot | ozelot/cache.py | RequestCache.get | def get(self, url, store_on_error=False, xpath=None, rate_limit=None, log_hits=True, log_misses=True):
"""Get a URL via the cache.
If the URL exists in the cache, return the cached value. Otherwise perform the request,
store the resulting content in the cache and return it.
Throws a :class:`RuntimeError` if the request results in an error.
Args:
url (str): URL to request
store_on_error (bool): If True, store request results in cache even if request results in an
an error. Otherwise (default) do not store results when an error occurs. Cached content
equals exception message.
xpath (str): If given (default is None), parses the response content to html, searches the first
node matching the given xpath and returns only that node (as UTF8-encoded html). Also, only
stores this node's html in the cache. Raises a ``RuntimeError`` if the xpath cannot be found
in the response.
rate_limit (float): If not None (default), wait at least this many seconds between the previous
request and the current one (this does not apply to cache hits).
log_hits (bool): If True, log cache hits
log_misses (bool): If True, log cache misses
Returns:
str: request content
"""
try:
# get cached request - if none is found, this throws a NoResultFound exception
cached = self._query(url, xpath).one()
if log_hits:
config.logger.info("Request cache hit: " + url)
# if the cached value is from a request that resulted in an error, throw an exception
if cached.status_code != requests.codes.ok:
raise RuntimeError("Cached request returned an error, code " + str(cached.status_code))
except NoResultFound:
if log_misses:
config.logger.info("Request cache miss: " + url)
# perform the request
try:
# rate limit
if rate_limit is not None and self.last_query is not None:
to_sleep = rate_limit - (datetime.datetime.now() - self.last_query).total_seconds()
if to_sleep > 0:
time.sleep(to_sleep)
self.last_query = datetime.datetime.now()
response = requests.get(url)
status_code = response.status_code
# get 'text', not 'content', because then we are sure to get unicode
content = response.text
response.close()
if xpath is not None:
doc = html.fromstring(content)
nodes = doc.xpath(xpath)
if len(nodes) == 0:
# xpath not found; set content and status code, exception is raised below
content = "xpath not found: " + xpath
status_code = ERROR_XPATH_NOT_FOUND
else:
# extract desired node only
content = html.tostring(nodes[0], encoding='unicode')
except requests.ConnectionError as e:
# on a connection error, write exception information to a response object
status_code = ERROR_CONNECTION_ERROR
content = str(e)
# a new request cache object
cached = CachedRequest(url=str(url),
content=content,
status_code=status_code,
xpath=xpath,
queried_on=datetime.datetime.now())
# if desired, store the response even if an error occurred
if status_code == requests.codes.ok or store_on_error:
self.session.add(cached)
self.session.commit()
if status_code != requests.codes.ok:
raise RuntimeError("Error processing the request, " + str(status_code) + ": " + content)
return cached.content | python | def get(self, url, store_on_error=False, xpath=None, rate_limit=None, log_hits=True, log_misses=True):
"""Get a URL via the cache.
If the URL exists in the cache, return the cached value. Otherwise perform the request,
store the resulting content in the cache and return it.
Throws a :class:`RuntimeError` if the request results in an error.
Args:
url (str): URL to request
store_on_error (bool): If True, store request results in cache even if request results in an
an error. Otherwise (default) do not store results when an error occurs. Cached content
equals exception message.
xpath (str): If given (default is None), parses the response content to html, searches the first
node matching the given xpath and returns only that node (as UTF8-encoded html). Also, only
stores this node's html in the cache. Raises a ``RuntimeError`` if the xpath cannot be found
in the response.
rate_limit (float): If not None (default), wait at least this many seconds between the previous
request and the current one (this does not apply to cache hits).
log_hits (bool): If True, log cache hits
log_misses (bool): If True, log cache misses
Returns:
str: request content
"""
try:
# get cached request - if none is found, this throws a NoResultFound exception
cached = self._query(url, xpath).one()
if log_hits:
config.logger.info("Request cache hit: " + url)
# if the cached value is from a request that resulted in an error, throw an exception
if cached.status_code != requests.codes.ok:
raise RuntimeError("Cached request returned an error, code " + str(cached.status_code))
except NoResultFound:
if log_misses:
config.logger.info("Request cache miss: " + url)
# perform the request
try:
# rate limit
if rate_limit is not None and self.last_query is not None:
to_sleep = rate_limit - (datetime.datetime.now() - self.last_query).total_seconds()
if to_sleep > 0:
time.sleep(to_sleep)
self.last_query = datetime.datetime.now()
response = requests.get(url)
status_code = response.status_code
# get 'text', not 'content', because then we are sure to get unicode
content = response.text
response.close()
if xpath is not None:
doc = html.fromstring(content)
nodes = doc.xpath(xpath)
if len(nodes) == 0:
# xpath not found; set content and status code, exception is raised below
content = "xpath not found: " + xpath
status_code = ERROR_XPATH_NOT_FOUND
else:
# extract desired node only
content = html.tostring(nodes[0], encoding='unicode')
except requests.ConnectionError as e:
# on a connection error, write exception information to a response object
status_code = ERROR_CONNECTION_ERROR
content = str(e)
# a new request cache object
cached = CachedRequest(url=str(url),
content=content,
status_code=status_code,
xpath=xpath,
queried_on=datetime.datetime.now())
# if desired, store the response even if an error occurred
if status_code == requests.codes.ok or store_on_error:
self.session.add(cached)
self.session.commit()
if status_code != requests.codes.ok:
raise RuntimeError("Error processing the request, " + str(status_code) + ": " + content)
return cached.content | [
"def",
"get",
"(",
"self",
",",
"url",
",",
"store_on_error",
"=",
"False",
",",
"xpath",
"=",
"None",
",",
"rate_limit",
"=",
"None",
",",
"log_hits",
"=",
"True",
",",
"log_misses",
"=",
"True",
")",
":",
"try",
":",
"# get cached request - if none is found, this throws a NoResultFound exception",
"cached",
"=",
"self",
".",
"_query",
"(",
"url",
",",
"xpath",
")",
".",
"one",
"(",
")",
"if",
"log_hits",
":",
"config",
".",
"logger",
".",
"info",
"(",
"\"Request cache hit: \"",
"+",
"url",
")",
"# if the cached value is from a request that resulted in an error, throw an exception",
"if",
"cached",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Cached request returned an error, code \"",
"+",
"str",
"(",
"cached",
".",
"status_code",
")",
")",
"except",
"NoResultFound",
":",
"if",
"log_misses",
":",
"config",
".",
"logger",
".",
"info",
"(",
"\"Request cache miss: \"",
"+",
"url",
")",
"# perform the request",
"try",
":",
"# rate limit",
"if",
"rate_limit",
"is",
"not",
"None",
"and",
"self",
".",
"last_query",
"is",
"not",
"None",
":",
"to_sleep",
"=",
"rate_limit",
"-",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"self",
".",
"last_query",
")",
".",
"total_seconds",
"(",
")",
"if",
"to_sleep",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"to_sleep",
")",
"self",
".",
"last_query",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"status_code",
"=",
"response",
".",
"status_code",
"# get 'text', not 'content', because then we are sure to get unicode",
"content",
"=",
"response",
".",
"text",
"response",
".",
"close",
"(",
")",
"if",
"xpath",
"is",
"not",
"None",
":",
"doc",
"=",
"html",
".",
"fromstring",
"(",
"content",
")",
"nodes",
"=",
"doc",
".",
"xpath",
"(",
"xpath",
")",
"if",
"len",
"(",
"nodes",
")",
"==",
"0",
":",
"# xpath not found; set content and status code, exception is raised below",
"content",
"=",
"\"xpath not found: \"",
"+",
"xpath",
"status_code",
"=",
"ERROR_XPATH_NOT_FOUND",
"else",
":",
"# extract desired node only",
"content",
"=",
"html",
".",
"tostring",
"(",
"nodes",
"[",
"0",
"]",
",",
"encoding",
"=",
"'unicode'",
")",
"except",
"requests",
".",
"ConnectionError",
"as",
"e",
":",
"# on a connection error, write exception information to a response object",
"status_code",
"=",
"ERROR_CONNECTION_ERROR",
"content",
"=",
"str",
"(",
"e",
")",
"# a new request cache object",
"cached",
"=",
"CachedRequest",
"(",
"url",
"=",
"str",
"(",
"url",
")",
",",
"content",
"=",
"content",
",",
"status_code",
"=",
"status_code",
",",
"xpath",
"=",
"xpath",
",",
"queried_on",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
"# if desired, store the response even if an error occurred",
"if",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
"or",
"store_on_error",
":",
"self",
".",
"session",
".",
"add",
"(",
"cached",
")",
"self",
".",
"session",
".",
"commit",
"(",
")",
"if",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Error processing the request, \"",
"+",
"str",
"(",
"status_code",
")",
"+",
"\": \"",
"+",
"content",
")",
"return",
"cached",
".",
"content"
] | Get a URL via the cache.
If the URL exists in the cache, return the cached value. Otherwise perform the request,
store the resulting content in the cache and return it.
Throws a :class:`RuntimeError` if the request results in an error.
Args:
url (str): URL to request
store_on_error (bool): If True, store request results in cache even if request results in an
an error. Otherwise (default) do not store results when an error occurs. Cached content
equals exception message.
xpath (str): If given (default is None), parses the response content to html, searches the first
node matching the given xpath and returns only that node (as UTF8-encoded html). Also, only
stores this node's html in the cache. Raises a ``RuntimeError`` if the xpath cannot be found
in the response.
rate_limit (float): If not None (default), wait at least this many seconds between the previous
request and the current one (this does not apply to cache hits).
log_hits (bool): If True, log cache hits
log_misses (bool): If True, log cache misses
Returns:
str: request content | [
"Get",
"a",
"URL",
"via",
"the",
"cache",
"."
] | 948675e02eb6fca940450f5cb814f53e97159e5b | https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/cache.py#L115-L202 | train |
ehansis/ozelot | ozelot/cache.py | RequestCache.get_timestamp | def get_timestamp(self, url, xpath=None):
"""Get time stamp of cached query result.
If DB has not yet been initialized or url/xpath has not been queried yet, return None.
Args:
url (str): If given, clear specific item only. Otherwise remove the DB file.
xpath (str): xpath to search (may be ``None``)
Returns:
datetime.datetime: cached response timestamp, None if not available
"""
if not path.exists(self.db_path):
return None
if self._query(url, xpath).count() > 0:
return self._query(url, xpath).one().queried_on | python | def get_timestamp(self, url, xpath=None):
"""Get time stamp of cached query result.
If DB has not yet been initialized or url/xpath has not been queried yet, return None.
Args:
url (str): If given, clear specific item only. Otherwise remove the DB file.
xpath (str): xpath to search (may be ``None``)
Returns:
datetime.datetime: cached response timestamp, None if not available
"""
if not path.exists(self.db_path):
return None
if self._query(url, xpath).count() > 0:
return self._query(url, xpath).one().queried_on | [
"def",
"get_timestamp",
"(",
"self",
",",
"url",
",",
"xpath",
"=",
"None",
")",
":",
"if",
"not",
"path",
".",
"exists",
"(",
"self",
".",
"db_path",
")",
":",
"return",
"None",
"if",
"self",
".",
"_query",
"(",
"url",
",",
"xpath",
")",
".",
"count",
"(",
")",
">",
"0",
":",
"return",
"self",
".",
"_query",
"(",
"url",
",",
"xpath",
")",
".",
"one",
"(",
")",
".",
"queried_on"
] | Get time stamp of cached query result.
If DB has not yet been initialized or url/xpath has not been queried yet, return None.
Args:
url (str): If given, clear specific item only. Otherwise remove the DB file.
xpath (str): xpath to search (may be ``None``)
Returns:
datetime.datetime: cached response timestamp, None if not available | [
"Get",
"time",
"stamp",
"of",
"cached",
"query",
"result",
"."
] | 948675e02eb6fca940450f5cb814f53e97159e5b | https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/cache.py#L241-L257 | train |
gmdzy2010/dingtalk_sdk_gmdzy2010 | dingtalk_sdk_gmdzy2010/base_request.py | BaseRequest.set_logger | def set_logger(self):
"""Method to build the base logging system. By default, logging level
is set to INFO."""
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
logger_file = os.path.join(self.logs_path, 'dingtalk_sdk.logs')
logger_handler = logging.FileHandler(logger_file)
logger_handler.setLevel(logging.INFO)
logger_formatter = logging.Formatter(
'[%(asctime)s | %(name)s | %(levelname)s] %(message)s'
)
logger_handler.setFormatter(logger_formatter)
logger.addHandler(logger_handler)
return logger | python | def set_logger(self):
"""Method to build the base logging system. By default, logging level
is set to INFO."""
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
logger_file = os.path.join(self.logs_path, 'dingtalk_sdk.logs')
logger_handler = logging.FileHandler(logger_file)
logger_handler.setLevel(logging.INFO)
logger_formatter = logging.Formatter(
'[%(asctime)s | %(name)s | %(levelname)s] %(message)s'
)
logger_handler.setFormatter(logger_formatter)
logger.addHandler(logger_handler)
return logger | [
"def",
"set_logger",
"(",
"self",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"setLevel",
"(",
"level",
"=",
"logging",
".",
"INFO",
")",
"logger_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"logs_path",
",",
"'dingtalk_sdk.logs'",
")",
"logger_handler",
"=",
"logging",
".",
"FileHandler",
"(",
"logger_file",
")",
"logger_handler",
".",
"setLevel",
"(",
"logging",
".",
"INFO",
")",
"logger_formatter",
"=",
"logging",
".",
"Formatter",
"(",
"'[%(asctime)s | %(name)s | %(levelname)s] %(message)s'",
")",
"logger_handler",
".",
"setFormatter",
"(",
"logger_formatter",
")",
"logger",
".",
"addHandler",
"(",
"logger_handler",
")",
"return",
"logger"
] | Method to build the base logging system. By default, logging level
is set to INFO. | [
"Method",
"to",
"build",
"the",
"base",
"logging",
"system",
".",
"By",
"default",
"logging",
"level",
"is",
"set",
"to",
"INFO",
"."
] | b06cb1f78f89be9554dcb6101af8bc72718a9ecd | https://github.com/gmdzy2010/dingtalk_sdk_gmdzy2010/blob/b06cb1f78f89be9554dcb6101af8bc72718a9ecd/dingtalk_sdk_gmdzy2010/base_request.py#L22-L35 | train |
gmdzy2010/dingtalk_sdk_gmdzy2010 | dingtalk_sdk_gmdzy2010/base_request.py | BaseRequest.get_response | def get_response(self):
"""Get the original response of requests"""
request = getattr(requests, self.request_method, None)
if request is None and self._request_method is None:
raise ValueError("A effective http request method must be set")
if self.request_url is None:
raise ValueError(
"Fatal error occurred, the class property \"request_url\" is"
"set to None, reset it with an effective url of dingtalk api."
)
response = request(self.request_url, **self.kwargs)
self.response = response
return response | python | def get_response(self):
"""Get the original response of requests"""
request = getattr(requests, self.request_method, None)
if request is None and self._request_method is None:
raise ValueError("A effective http request method must be set")
if self.request_url is None:
raise ValueError(
"Fatal error occurred, the class property \"request_url\" is"
"set to None, reset it with an effective url of dingtalk api."
)
response = request(self.request_url, **self.kwargs)
self.response = response
return response | [
"def",
"get_response",
"(",
"self",
")",
":",
"request",
"=",
"getattr",
"(",
"requests",
",",
"self",
".",
"request_method",
",",
"None",
")",
"if",
"request",
"is",
"None",
"and",
"self",
".",
"_request_method",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"A effective http request method must be set\"",
")",
"if",
"self",
".",
"request_url",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Fatal error occurred, the class property \\\"request_url\\\" is\"",
"\"set to None, reset it with an effective url of dingtalk api.\"",
")",
"response",
"=",
"request",
"(",
"self",
".",
"request_url",
",",
"*",
"*",
"self",
".",
"kwargs",
")",
"self",
".",
"response",
"=",
"response",
"return",
"response"
] | Get the original response of requests | [
"Get",
"the",
"original",
"response",
"of",
"requests"
] | b06cb1f78f89be9554dcb6101af8bc72718a9ecd | https://github.com/gmdzy2010/dingtalk_sdk_gmdzy2010/blob/b06cb1f78f89be9554dcb6101af8bc72718a9ecd/dingtalk_sdk_gmdzy2010/base_request.py#L55-L67 | train |
micolous/python-slackrealtime | src/slackrealtime/protocol.py | RtmProtocol.sendCommand | def sendCommand(self, **msg):
"""
Sends a raw command to the Slack server, generating a message ID automatically.
"""
assert 'type' in msg, 'Message type is required.'
msg['id'] = self.next_message_id
self.next_message_id += 1
if self.next_message_id >= maxint:
self.next_message_id = 1
self.sendMessage(json.dumps(msg))
return msg['id'] | python | def sendCommand(self, **msg):
"""
Sends a raw command to the Slack server, generating a message ID automatically.
"""
assert 'type' in msg, 'Message type is required.'
msg['id'] = self.next_message_id
self.next_message_id += 1
if self.next_message_id >= maxint:
self.next_message_id = 1
self.sendMessage(json.dumps(msg))
return msg['id'] | [
"def",
"sendCommand",
"(",
"self",
",",
"*",
"*",
"msg",
")",
":",
"assert",
"'type'",
"in",
"msg",
",",
"'Message type is required.'",
"msg",
"[",
"'id'",
"]",
"=",
"self",
".",
"next_message_id",
"self",
".",
"next_message_id",
"+=",
"1",
"if",
"self",
".",
"next_message_id",
">=",
"maxint",
":",
"self",
".",
"next_message_id",
"=",
"1",
"self",
".",
"sendMessage",
"(",
"json",
".",
"dumps",
"(",
"msg",
")",
")",
"return",
"msg",
"[",
"'id'",
"]"
] | Sends a raw command to the Slack server, generating a message ID automatically. | [
"Sends",
"a",
"raw",
"command",
"to",
"the",
"Slack",
"server",
"generating",
"a",
"message",
"ID",
"automatically",
"."
] | e9c94416f979a6582110ebba09c147de2bfe20a1 | https://github.com/micolous/python-slackrealtime/blob/e9c94416f979a6582110ebba09c147de2bfe20a1/src/slackrealtime/protocol.py#L70-L83 | train |
micolous/python-slackrealtime | src/slackrealtime/protocol.py | RtmProtocol.sendChatMessage | def sendChatMessage(self, text, id=None, user=None, group=None, channel=None, parse='none', link_names=True, unfurl_links=True, unfurl_media=False, send_with_api=False, icon_emoji=None, icon_url=None, username=None, attachments=None, thread_ts=None, reply_broadcast=False):
"""
Sends a chat message to a given id, user, group or channel.
If the API token is not a bot token (xoxb), ``send_with_api`` may be set
to True. This will send messages using ``chat.postMessage`` in the Slack
API, instead of using the WebSockets channel.
This makes the message sending process a little bit slower, however
permits writing of messages containing hyperlinks, like what can be done
with Incoming and Outgoing Webhooks integrations.
Bots are not permitted by Slack to use ``chat.postMessage`` so this will
result in an error.
Note: channel names must **not** be preceeded with ``#``.
"""
if id is not None:
assert user is None, 'id and user cannot both be set.'
assert group is None, 'id and group cannot both be set.'
assert channel is None, 'id and channel cannot both be set.'
elif user is not None:
assert group is None, 'user and group cannot both be set.'
assert channel is None, 'user and channel cannot both be set.'
# Private message to user, get the IM name
id = self.meta.find_im_by_user_name(user, auto_create=True)[0]
elif group is not None:
assert channel is None, 'group and channel cannot both be set.'
# Message to private group, get the group name.
id = self.meta.find_group_by_name(group)[0]
elif channel is not None:
# Message sent to a channel
id = self.meta.find_channel_by_name(channel)[0]
else:
raise Exception, 'Should not reach here.'
if send_with_api:
return self.meta.api.chat.postMessage(
token=self.meta.token,
channel=id,
text=text,
parse=parse,
link_names=link_names,
unfurl_links=unfurl_links,
unfurl_media=unfurl_media,
icon_url=icon_url,
icon_emoji=icon_emoji,
username=username,
attachments=attachments,
thread_ts=thread_ts,
reply_broadcast=reply_broadcast,
)
else:
assert icon_url is None, 'icon_url can only be set if send_with_api is True'
assert icon_emoji is None, 'icon_emoji can only be set if send_with_api is True'
assert username is None, 'username can only be set if send_with_api is True'
return self.sendCommand(
type='message',
channel=id,
text=text,
parse=parse,
link_names=link_names,
unfurl_links=unfurl_links,
unfurl_media=unfurl_media,
thread_ts=thread_ts,
reply_broadcast=reply_broadcast,
) | python | def sendChatMessage(self, text, id=None, user=None, group=None, channel=None, parse='none', link_names=True, unfurl_links=True, unfurl_media=False, send_with_api=False, icon_emoji=None, icon_url=None, username=None, attachments=None, thread_ts=None, reply_broadcast=False):
"""
Sends a chat message to a given id, user, group or channel.
If the API token is not a bot token (xoxb), ``send_with_api`` may be set
to True. This will send messages using ``chat.postMessage`` in the Slack
API, instead of using the WebSockets channel.
This makes the message sending process a little bit slower, however
permits writing of messages containing hyperlinks, like what can be done
with Incoming and Outgoing Webhooks integrations.
Bots are not permitted by Slack to use ``chat.postMessage`` so this will
result in an error.
Note: channel names must **not** be preceeded with ``#``.
"""
if id is not None:
assert user is None, 'id and user cannot both be set.'
assert group is None, 'id and group cannot both be set.'
assert channel is None, 'id and channel cannot both be set.'
elif user is not None:
assert group is None, 'user and group cannot both be set.'
assert channel is None, 'user and channel cannot both be set.'
# Private message to user, get the IM name
id = self.meta.find_im_by_user_name(user, auto_create=True)[0]
elif group is not None:
assert channel is None, 'group and channel cannot both be set.'
# Message to private group, get the group name.
id = self.meta.find_group_by_name(group)[0]
elif channel is not None:
# Message sent to a channel
id = self.meta.find_channel_by_name(channel)[0]
else:
raise Exception, 'Should not reach here.'
if send_with_api:
return self.meta.api.chat.postMessage(
token=self.meta.token,
channel=id,
text=text,
parse=parse,
link_names=link_names,
unfurl_links=unfurl_links,
unfurl_media=unfurl_media,
icon_url=icon_url,
icon_emoji=icon_emoji,
username=username,
attachments=attachments,
thread_ts=thread_ts,
reply_broadcast=reply_broadcast,
)
else:
assert icon_url is None, 'icon_url can only be set if send_with_api is True'
assert icon_emoji is None, 'icon_emoji can only be set if send_with_api is True'
assert username is None, 'username can only be set if send_with_api is True'
return self.sendCommand(
type='message',
channel=id,
text=text,
parse=parse,
link_names=link_names,
unfurl_links=unfurl_links,
unfurl_media=unfurl_media,
thread_ts=thread_ts,
reply_broadcast=reply_broadcast,
) | [
"def",
"sendChatMessage",
"(",
"self",
",",
"text",
",",
"id",
"=",
"None",
",",
"user",
"=",
"None",
",",
"group",
"=",
"None",
",",
"channel",
"=",
"None",
",",
"parse",
"=",
"'none'",
",",
"link_names",
"=",
"True",
",",
"unfurl_links",
"=",
"True",
",",
"unfurl_media",
"=",
"False",
",",
"send_with_api",
"=",
"False",
",",
"icon_emoji",
"=",
"None",
",",
"icon_url",
"=",
"None",
",",
"username",
"=",
"None",
",",
"attachments",
"=",
"None",
",",
"thread_ts",
"=",
"None",
",",
"reply_broadcast",
"=",
"False",
")",
":",
"if",
"id",
"is",
"not",
"None",
":",
"assert",
"user",
"is",
"None",
",",
"'id and user cannot both be set.'",
"assert",
"group",
"is",
"None",
",",
"'id and group cannot both be set.'",
"assert",
"channel",
"is",
"None",
",",
"'id and channel cannot both be set.'",
"elif",
"user",
"is",
"not",
"None",
":",
"assert",
"group",
"is",
"None",
",",
"'user and group cannot both be set.'",
"assert",
"channel",
"is",
"None",
",",
"'user and channel cannot both be set.'",
"# Private message to user, get the IM name",
"id",
"=",
"self",
".",
"meta",
".",
"find_im_by_user_name",
"(",
"user",
",",
"auto_create",
"=",
"True",
")",
"[",
"0",
"]",
"elif",
"group",
"is",
"not",
"None",
":",
"assert",
"channel",
"is",
"None",
",",
"'group and channel cannot both be set.'",
"# Message to private group, get the group name.",
"id",
"=",
"self",
".",
"meta",
".",
"find_group_by_name",
"(",
"group",
")",
"[",
"0",
"]",
"elif",
"channel",
"is",
"not",
"None",
":",
"# Message sent to a channel",
"id",
"=",
"self",
".",
"meta",
".",
"find_channel_by_name",
"(",
"channel",
")",
"[",
"0",
"]",
"else",
":",
"raise",
"Exception",
",",
"'Should not reach here.'",
"if",
"send_with_api",
":",
"return",
"self",
".",
"meta",
".",
"api",
".",
"chat",
".",
"postMessage",
"(",
"token",
"=",
"self",
".",
"meta",
".",
"token",
",",
"channel",
"=",
"id",
",",
"text",
"=",
"text",
",",
"parse",
"=",
"parse",
",",
"link_names",
"=",
"link_names",
",",
"unfurl_links",
"=",
"unfurl_links",
",",
"unfurl_media",
"=",
"unfurl_media",
",",
"icon_url",
"=",
"icon_url",
",",
"icon_emoji",
"=",
"icon_emoji",
",",
"username",
"=",
"username",
",",
"attachments",
"=",
"attachments",
",",
"thread_ts",
"=",
"thread_ts",
",",
"reply_broadcast",
"=",
"reply_broadcast",
",",
")",
"else",
":",
"assert",
"icon_url",
"is",
"None",
",",
"'icon_url can only be set if send_with_api is True'",
"assert",
"icon_emoji",
"is",
"None",
",",
"'icon_emoji can only be set if send_with_api is True'",
"assert",
"username",
"is",
"None",
",",
"'username can only be set if send_with_api is True'",
"return",
"self",
".",
"sendCommand",
"(",
"type",
"=",
"'message'",
",",
"channel",
"=",
"id",
",",
"text",
"=",
"text",
",",
"parse",
"=",
"parse",
",",
"link_names",
"=",
"link_names",
",",
"unfurl_links",
"=",
"unfurl_links",
",",
"unfurl_media",
"=",
"unfurl_media",
",",
"thread_ts",
"=",
"thread_ts",
",",
"reply_broadcast",
"=",
"reply_broadcast",
",",
")"
] | Sends a chat message to a given id, user, group or channel.
If the API token is not a bot token (xoxb), ``send_with_api`` may be set
to True. This will send messages using ``chat.postMessage`` in the Slack
API, instead of using the WebSockets channel.
This makes the message sending process a little bit slower, however
permits writing of messages containing hyperlinks, like what can be done
with Incoming and Outgoing Webhooks integrations.
Bots are not permitted by Slack to use ``chat.postMessage`` so this will
result in an error.
Note: channel names must **not** be preceeded with ``#``. | [
"Sends",
"a",
"chat",
"message",
"to",
"a",
"given",
"id",
"user",
"group",
"or",
"channel",
"."
] | e9c94416f979a6582110ebba09c147de2bfe20a1 | https://github.com/micolous/python-slackrealtime/blob/e9c94416f979a6582110ebba09c147de2bfe20a1/src/slackrealtime/protocol.py#L86-L155 | train |
abantos/bolt | bolt/__init__.py | run | def run():
"""
Entry point for the `bolt` executable.
"""
options = btoptions.Options()
btlog.initialize_logging(options.log_level, options.log_file)
app = btapp.get_application()
app.run() | python | def run():
"""
Entry point for the `bolt` executable.
"""
options = btoptions.Options()
btlog.initialize_logging(options.log_level, options.log_file)
app = btapp.get_application()
app.run() | [
"def",
"run",
"(",
")",
":",
"options",
"=",
"btoptions",
".",
"Options",
"(",
")",
"btlog",
".",
"initialize_logging",
"(",
"options",
".",
"log_level",
",",
"options",
".",
"log_file",
")",
"app",
"=",
"btapp",
".",
"get_application",
"(",
")",
"app",
".",
"run",
"(",
")"
] | Entry point for the `bolt` executable. | [
"Entry",
"point",
"for",
"the",
"bolt",
"executable",
"."
] | 8b6a911d4a7b1a6e870748a523c9b2b91997c773 | https://github.com/abantos/bolt/blob/8b6a911d4a7b1a6e870748a523c9b2b91997c773/bolt/__init__.py#L31-L38 | train |
mojaie/chorus | chorus/v2000writer.py | mols_to_file | def mols_to_file(mols, path):
"""Save molecules to the SDFile format file
Args:
mols: list of molecule objects
path: file path to save
"""
with open(path, 'w') as f:
f.write(mols_to_text(mols)) | python | def mols_to_file(mols, path):
"""Save molecules to the SDFile format file
Args:
mols: list of molecule objects
path: file path to save
"""
with open(path, 'w') as f:
f.write(mols_to_text(mols)) | [
"def",
"mols_to_file",
"(",
"mols",
",",
"path",
")",
":",
"with",
"open",
"(",
"path",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"mols_to_text",
"(",
"mols",
")",
")"
] | Save molecules to the SDFile format file
Args:
mols: list of molecule objects
path: file path to save | [
"Save",
"molecules",
"to",
"the",
"SDFile",
"format",
"file"
] | fc7fe23a0272554c67671645ab07830b315eeb1b | https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/v2000writer.py#L130-L138 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.listen | def listen(self):
"""Starts the client listener to listen for server responses.
Args:
None
Returns:
None
"""
logger.info("Listening on port " + str(self.listener.listen_port))
self.listener.listen() | python | def listen(self):
"""Starts the client listener to listen for server responses.
Args:
None
Returns:
None
"""
logger.info("Listening on port " + str(self.listener.listen_port))
self.listener.listen() | [
"def",
"listen",
"(",
"self",
")",
":",
"logger",
".",
"info",
"(",
"\"Listening on port \"",
"+",
"str",
"(",
"self",
".",
"listener",
".",
"listen_port",
")",
")",
"self",
".",
"listener",
".",
"listen",
"(",
")"
] | Starts the client listener to listen for server responses.
Args:
None
Returns:
None | [
"Starts",
"the",
"client",
"listener",
"to",
"listen",
"for",
"server",
"responses",
"."
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L158-L170 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.retransmit | def retransmit(self, data):
"""Processes messages that have been delivered from the transport
protocol.
Args:
data (dict): A dictionary containing the packet data to resend.
Returns:
None
Examples:
>>> data
{'method': 'REGISTER', 'address': ('192.168.0.20', 40080)}
"""
# Handle retransmitting REGISTER requests if we don't hear back from
# the server.
if data["method"] == "REGISTER":
if not self.registered and self.register_retries < self.max_retries:
logger.debug("<%s> Timeout exceeded. " % str(self.cuuid) + \
"Retransmitting REGISTER request.")
self.register_retries += 1
self.register(data["address"], retry=False)
else:
logger.debug("<%s> No need to retransmit." % str(self.cuuid))
if data["method"] == "EVENT":
if data["euuid"] in self.event_uuids:
# Increment the current retry count of the euuid
self.event_uuids[data["euuid"]]["retry"] += 1
if self.event_uuids[data["euuid"]]["retry"] > self.max_retries:
logger.debug("<%s> Max retries exceeded. Timed out waiting "
"for server for event: %s" % (data["cuuid"],
data["euuid"]))
logger.debug("<%s> <euuid:%s> Deleting event from currently "
"processing event uuids" % (data["cuuid"],
str(data["euuid"])))
del self.event_uuids[data["euuid"]]
else:
# Retransmit that shit
self.listener.send_datagram(
serialize_data(data, self.compression,
self.encryption, self.server_key),
self.server)
# Then we set another schedule to check again
logger.debug("<%s> <euuid:%s> Scheduling to retry in %s "
"seconds" % (data["cuuid"],
str(data["euuid"]),
str(self.timeout)))
self.listener.call_later(
self.timeout, self.retransmit, data)
else:
logger.debug("<%s> <euuid:%s> No need to "
"retransmit." % (str(self.cuuid),
str(data["euuid"]))) | python | def retransmit(self, data):
"""Processes messages that have been delivered from the transport
protocol.
Args:
data (dict): A dictionary containing the packet data to resend.
Returns:
None
Examples:
>>> data
{'method': 'REGISTER', 'address': ('192.168.0.20', 40080)}
"""
# Handle retransmitting REGISTER requests if we don't hear back from
# the server.
if data["method"] == "REGISTER":
if not self.registered and self.register_retries < self.max_retries:
logger.debug("<%s> Timeout exceeded. " % str(self.cuuid) + \
"Retransmitting REGISTER request.")
self.register_retries += 1
self.register(data["address"], retry=False)
else:
logger.debug("<%s> No need to retransmit." % str(self.cuuid))
if data["method"] == "EVENT":
if data["euuid"] in self.event_uuids:
# Increment the current retry count of the euuid
self.event_uuids[data["euuid"]]["retry"] += 1
if self.event_uuids[data["euuid"]]["retry"] > self.max_retries:
logger.debug("<%s> Max retries exceeded. Timed out waiting "
"for server for event: %s" % (data["cuuid"],
data["euuid"]))
logger.debug("<%s> <euuid:%s> Deleting event from currently "
"processing event uuids" % (data["cuuid"],
str(data["euuid"])))
del self.event_uuids[data["euuid"]]
else:
# Retransmit that shit
self.listener.send_datagram(
serialize_data(data, self.compression,
self.encryption, self.server_key),
self.server)
# Then we set another schedule to check again
logger.debug("<%s> <euuid:%s> Scheduling to retry in %s "
"seconds" % (data["cuuid"],
str(data["euuid"]),
str(self.timeout)))
self.listener.call_later(
self.timeout, self.retransmit, data)
else:
logger.debug("<%s> <euuid:%s> No need to "
"retransmit." % (str(self.cuuid),
str(data["euuid"]))) | [
"def",
"retransmit",
"(",
"self",
",",
"data",
")",
":",
"# Handle retransmitting REGISTER requests if we don't hear back from",
"# the server.",
"if",
"data",
"[",
"\"method\"",
"]",
"==",
"\"REGISTER\"",
":",
"if",
"not",
"self",
".",
"registered",
"and",
"self",
".",
"register_retries",
"<",
"self",
".",
"max_retries",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Timeout exceeded. \"",
"%",
"str",
"(",
"self",
".",
"cuuid",
")",
"+",
"\"Retransmitting REGISTER request.\"",
")",
"self",
".",
"register_retries",
"+=",
"1",
"self",
".",
"register",
"(",
"data",
"[",
"\"address\"",
"]",
",",
"retry",
"=",
"False",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"\"<%s> No need to retransmit.\"",
"%",
"str",
"(",
"self",
".",
"cuuid",
")",
")",
"if",
"data",
"[",
"\"method\"",
"]",
"==",
"\"EVENT\"",
":",
"if",
"data",
"[",
"\"euuid\"",
"]",
"in",
"self",
".",
"event_uuids",
":",
"# Increment the current retry count of the euuid",
"self",
".",
"event_uuids",
"[",
"data",
"[",
"\"euuid\"",
"]",
"]",
"[",
"\"retry\"",
"]",
"+=",
"1",
"if",
"self",
".",
"event_uuids",
"[",
"data",
"[",
"\"euuid\"",
"]",
"]",
"[",
"\"retry\"",
"]",
">",
"self",
".",
"max_retries",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Max retries exceeded. Timed out waiting \"",
"\"for server for event: %s\"",
"%",
"(",
"data",
"[",
"\"cuuid\"",
"]",
",",
"data",
"[",
"\"euuid\"",
"]",
")",
")",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Deleting event from currently \"",
"\"processing event uuids\"",
"%",
"(",
"data",
"[",
"\"cuuid\"",
"]",
",",
"str",
"(",
"data",
"[",
"\"euuid\"",
"]",
")",
")",
")",
"del",
"self",
".",
"event_uuids",
"[",
"data",
"[",
"\"euuid\"",
"]",
"]",
"else",
":",
"# Retransmit that shit",
"self",
".",
"listener",
".",
"send_datagram",
"(",
"serialize_data",
"(",
"data",
",",
"self",
".",
"compression",
",",
"self",
".",
"encryption",
",",
"self",
".",
"server_key",
")",
",",
"self",
".",
"server",
")",
"# Then we set another schedule to check again",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Scheduling to retry in %s \"",
"\"seconds\"",
"%",
"(",
"data",
"[",
"\"cuuid\"",
"]",
",",
"str",
"(",
"data",
"[",
"\"euuid\"",
"]",
")",
",",
"str",
"(",
"self",
".",
"timeout",
")",
")",
")",
"self",
".",
"listener",
".",
"call_later",
"(",
"self",
".",
"timeout",
",",
"self",
".",
"retransmit",
",",
"data",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> No need to \"",
"\"retransmit.\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"str",
"(",
"data",
"[",
"\"euuid\"",
"]",
")",
")",
")"
] | Processes messages that have been delivered from the transport
protocol.
Args:
data (dict): A dictionary containing the packet data to resend.
Returns:
None
Examples:
>>> data
{'method': 'REGISTER', 'address': ('192.168.0.20', 40080)} | [
"Processes",
"messages",
"that",
"have",
"been",
"delivered",
"from",
"the",
"transport",
"protocol",
"."
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L173-L230 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.handle_message | def handle_message(self, msg, host):
"""Processes messages that have been delivered from the transport
protocol
Args:
msg (string): The raw packet data delivered from the transport
protocol.
host (tuple): A tuple containing the (address, port) combination of
the message's origin.
Returns:
A formatted response to the client with the results of the processed
message.
Examples:
>>> msg
{"method": "OHAI Client", "version": "1.0"}
>>> host
('192.168.0.20', 36545)
"""
logger.debug("Executing handle_message method.")
response = None
# Unserialize the data packet
# If encryption is enabled, and we've receive the server's public key
# already, try to decrypt
if self.encryption and self.server_key:
msg_data = unserialize_data(msg, self.compression, self.encryption)
else:
msg_data = unserialize_data(msg, self.compression)
# Log the packet
logger.debug("Packet received: " + pformat(msg_data))
# If the message data is blank, return none
if not msg_data:
return response
if "method" in msg_data:
if msg_data["method"] == "OHAI Client":
logger.debug("<%s> Autodiscover response from server received "
"from: %s" % (self.cuuid, host[0]))
self.discovered_servers[host]= [msg_data["version"], msg_data["server_name"]]
# Try to register with the discovered server
if self.autoregistering:
self.register(host)
self.autoregistering = False
elif msg_data["method"] == "NOTIFY":
self.event_notifies[msg_data["euuid"]] = msg_data["event_data"]
logger.debug("<%s> Notify received" % self.cuuid)
logger.debug("<%s> Notify event buffer: %s" % (self.cuuid,
pformat(self.event_notifies)))
# Send an OK NOTIFY to the server confirming we got the message
response = serialize_data(
{"cuuid": str(self.cuuid),
"method": "OK NOTIFY",
"euuid": msg_data["euuid"]},
self.compression, self.encryption, self.server_key)
elif msg_data["method"] == "OK REGISTER":
logger.debug("<%s> Ok register received" % self.cuuid)
self.registered = True
self.server = host
# If the server sent us their public key, store it
if "encryption" in msg_data and self.encryption:
self.server_key = PublicKey(
msg_data["encryption"][0], msg_data["encryption"][1])
elif (msg_data["method"] == "LEGAL" or
msg_data["method"] == "ILLEGAL"):
logger.debug("<%s> Legality message received" % str(self.cuuid))
self.legal_check(msg_data)
# Send an OK EVENT response to the server confirming we
# received the message
response = serialize_data(
{"cuuid": str(self.cuuid),
"method": "OK EVENT",
"euuid": msg_data["euuid"]},
self.compression, self.encryption, self.server_key)
logger.debug("Packet processing completed")
return response | python | def handle_message(self, msg, host):
"""Processes messages that have been delivered from the transport
protocol
Args:
msg (string): The raw packet data delivered from the transport
protocol.
host (tuple): A tuple containing the (address, port) combination of
the message's origin.
Returns:
A formatted response to the client with the results of the processed
message.
Examples:
>>> msg
{"method": "OHAI Client", "version": "1.0"}
>>> host
('192.168.0.20', 36545)
"""
logger.debug("Executing handle_message method.")
response = None
# Unserialize the data packet
# If encryption is enabled, and we've receive the server's public key
# already, try to decrypt
if self.encryption and self.server_key:
msg_data = unserialize_data(msg, self.compression, self.encryption)
else:
msg_data = unserialize_data(msg, self.compression)
# Log the packet
logger.debug("Packet received: " + pformat(msg_data))
# If the message data is blank, return none
if not msg_data:
return response
if "method" in msg_data:
if msg_data["method"] == "OHAI Client":
logger.debug("<%s> Autodiscover response from server received "
"from: %s" % (self.cuuid, host[0]))
self.discovered_servers[host]= [msg_data["version"], msg_data["server_name"]]
# Try to register with the discovered server
if self.autoregistering:
self.register(host)
self.autoregistering = False
elif msg_data["method"] == "NOTIFY":
self.event_notifies[msg_data["euuid"]] = msg_data["event_data"]
logger.debug("<%s> Notify received" % self.cuuid)
logger.debug("<%s> Notify event buffer: %s" % (self.cuuid,
pformat(self.event_notifies)))
# Send an OK NOTIFY to the server confirming we got the message
response = serialize_data(
{"cuuid": str(self.cuuid),
"method": "OK NOTIFY",
"euuid": msg_data["euuid"]},
self.compression, self.encryption, self.server_key)
elif msg_data["method"] == "OK REGISTER":
logger.debug("<%s> Ok register received" % self.cuuid)
self.registered = True
self.server = host
# If the server sent us their public key, store it
if "encryption" in msg_data and self.encryption:
self.server_key = PublicKey(
msg_data["encryption"][0], msg_data["encryption"][1])
elif (msg_data["method"] == "LEGAL" or
msg_data["method"] == "ILLEGAL"):
logger.debug("<%s> Legality message received" % str(self.cuuid))
self.legal_check(msg_data)
# Send an OK EVENT response to the server confirming we
# received the message
response = serialize_data(
{"cuuid": str(self.cuuid),
"method": "OK EVENT",
"euuid": msg_data["euuid"]},
self.compression, self.encryption, self.server_key)
logger.debug("Packet processing completed")
return response | [
"def",
"handle_message",
"(",
"self",
",",
"msg",
",",
"host",
")",
":",
"logger",
".",
"debug",
"(",
"\"Executing handle_message method.\"",
")",
"response",
"=",
"None",
"# Unserialize the data packet",
"# If encryption is enabled, and we've receive the server's public key",
"# already, try to decrypt",
"if",
"self",
".",
"encryption",
"and",
"self",
".",
"server_key",
":",
"msg_data",
"=",
"unserialize_data",
"(",
"msg",
",",
"self",
".",
"compression",
",",
"self",
".",
"encryption",
")",
"else",
":",
"msg_data",
"=",
"unserialize_data",
"(",
"msg",
",",
"self",
".",
"compression",
")",
"# Log the packet",
"logger",
".",
"debug",
"(",
"\"Packet received: \"",
"+",
"pformat",
"(",
"msg_data",
")",
")",
"# If the message data is blank, return none",
"if",
"not",
"msg_data",
":",
"return",
"response",
"if",
"\"method\"",
"in",
"msg_data",
":",
"if",
"msg_data",
"[",
"\"method\"",
"]",
"==",
"\"OHAI Client\"",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Autodiscover response from server received \"",
"\"from: %s\"",
"%",
"(",
"self",
".",
"cuuid",
",",
"host",
"[",
"0",
"]",
")",
")",
"self",
".",
"discovered_servers",
"[",
"host",
"]",
"=",
"[",
"msg_data",
"[",
"\"version\"",
"]",
",",
"msg_data",
"[",
"\"server_name\"",
"]",
"]",
"# Try to register with the discovered server",
"if",
"self",
".",
"autoregistering",
":",
"self",
".",
"register",
"(",
"host",
")",
"self",
".",
"autoregistering",
"=",
"False",
"elif",
"msg_data",
"[",
"\"method\"",
"]",
"==",
"\"NOTIFY\"",
":",
"self",
".",
"event_notifies",
"[",
"msg_data",
"[",
"\"euuid\"",
"]",
"]",
"=",
"msg_data",
"[",
"\"event_data\"",
"]",
"logger",
".",
"debug",
"(",
"\"<%s> Notify received\"",
"%",
"self",
".",
"cuuid",
")",
"logger",
".",
"debug",
"(",
"\"<%s> Notify event buffer: %s\"",
"%",
"(",
"self",
".",
"cuuid",
",",
"pformat",
"(",
"self",
".",
"event_notifies",
")",
")",
")",
"# Send an OK NOTIFY to the server confirming we got the message",
"response",
"=",
"serialize_data",
"(",
"{",
"\"cuuid\"",
":",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"\"method\"",
":",
"\"OK NOTIFY\"",
",",
"\"euuid\"",
":",
"msg_data",
"[",
"\"euuid\"",
"]",
"}",
",",
"self",
".",
"compression",
",",
"self",
".",
"encryption",
",",
"self",
".",
"server_key",
")",
"elif",
"msg_data",
"[",
"\"method\"",
"]",
"==",
"\"OK REGISTER\"",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Ok register received\"",
"%",
"self",
".",
"cuuid",
")",
"self",
".",
"registered",
"=",
"True",
"self",
".",
"server",
"=",
"host",
"# If the server sent us their public key, store it",
"if",
"\"encryption\"",
"in",
"msg_data",
"and",
"self",
".",
"encryption",
":",
"self",
".",
"server_key",
"=",
"PublicKey",
"(",
"msg_data",
"[",
"\"encryption\"",
"]",
"[",
"0",
"]",
",",
"msg_data",
"[",
"\"encryption\"",
"]",
"[",
"1",
"]",
")",
"elif",
"(",
"msg_data",
"[",
"\"method\"",
"]",
"==",
"\"LEGAL\"",
"or",
"msg_data",
"[",
"\"method\"",
"]",
"==",
"\"ILLEGAL\"",
")",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Legality message received\"",
"%",
"str",
"(",
"self",
".",
"cuuid",
")",
")",
"self",
".",
"legal_check",
"(",
"msg_data",
")",
"# Send an OK EVENT response to the server confirming we",
"# received the message",
"response",
"=",
"serialize_data",
"(",
"{",
"\"cuuid\"",
":",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"\"method\"",
":",
"\"OK EVENT\"",
",",
"\"euuid\"",
":",
"msg_data",
"[",
"\"euuid\"",
"]",
"}",
",",
"self",
".",
"compression",
",",
"self",
".",
"encryption",
",",
"self",
".",
"server_key",
")",
"logger",
".",
"debug",
"(",
"\"Packet processing completed\"",
")",
"return",
"response"
] | Processes messages that have been delivered from the transport
protocol
Args:
msg (string): The raw packet data delivered from the transport
protocol.
host (tuple): A tuple containing the (address, port) combination of
the message's origin.
Returns:
A formatted response to the client with the results of the processed
message.
Examples:
>>> msg
{"method": "OHAI Client", "version": "1.0"}
>>> host
('192.168.0.20', 36545) | [
"Processes",
"messages",
"that",
"have",
"been",
"delivered",
"from",
"the",
"transport",
"protocol"
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L233-L321 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.autodiscover | def autodiscover(self, autoregister=True):
"""This function will send out an autodiscover broadcast to find a
Neteria server. Any servers that respond with an "OHAI CLIENT"
packet are servers that we can connect to. Servers that respond are
stored in the "discovered_servers" list.
Args:
autoregister (boolean): Whether or not to automatically register
with any responding servers. Defaults to True.
Returns:
None
Examples:
>>> myclient = neteria.client.NeteriaClient()
>>> myclient.listen()
>>> myclient.autodiscover()
>>> myclient.discovered_servers
{('192.168.0.20', 40080): u'1.0', ('192.168.0.82', 40080): '2.0'}
"""
logger.debug("<%s> Sending autodiscover message to broadcast "
"address" % str(self.cuuid))
if not self.listener.listening:
logger.warning("Neteria client is not listening. The client "
"will not be able to process responses from the server")
message = serialize_data(
{"method": "OHAI",
"version": self.version,
"cuuid": str(self.cuuid)},
self.compression, encryption=False)
if autoregister:
self.autoregistering = True
self.listener.send_datagram(
message, ("<broadcast>", self.server_port), message_type="broadcast") | python | def autodiscover(self, autoregister=True):
"""This function will send out an autodiscover broadcast to find a
Neteria server. Any servers that respond with an "OHAI CLIENT"
packet are servers that we can connect to. Servers that respond are
stored in the "discovered_servers" list.
Args:
autoregister (boolean): Whether or not to automatically register
with any responding servers. Defaults to True.
Returns:
None
Examples:
>>> myclient = neteria.client.NeteriaClient()
>>> myclient.listen()
>>> myclient.autodiscover()
>>> myclient.discovered_servers
{('192.168.0.20', 40080): u'1.0', ('192.168.0.82', 40080): '2.0'}
"""
logger.debug("<%s> Sending autodiscover message to broadcast "
"address" % str(self.cuuid))
if not self.listener.listening:
logger.warning("Neteria client is not listening. The client "
"will not be able to process responses from the server")
message = serialize_data(
{"method": "OHAI",
"version": self.version,
"cuuid": str(self.cuuid)},
self.compression, encryption=False)
if autoregister:
self.autoregistering = True
self.listener.send_datagram(
message, ("<broadcast>", self.server_port), message_type="broadcast") | [
"def",
"autodiscover",
"(",
"self",
",",
"autoregister",
"=",
"True",
")",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Sending autodiscover message to broadcast \"",
"\"address\"",
"%",
"str",
"(",
"self",
".",
"cuuid",
")",
")",
"if",
"not",
"self",
".",
"listener",
".",
"listening",
":",
"logger",
".",
"warning",
"(",
"\"Neteria client is not listening. The client \"",
"\"will not be able to process responses from the server\"",
")",
"message",
"=",
"serialize_data",
"(",
"{",
"\"method\"",
":",
"\"OHAI\"",
",",
"\"version\"",
":",
"self",
".",
"version",
",",
"\"cuuid\"",
":",
"str",
"(",
"self",
".",
"cuuid",
")",
"}",
",",
"self",
".",
"compression",
",",
"encryption",
"=",
"False",
")",
"if",
"autoregister",
":",
"self",
".",
"autoregistering",
"=",
"True",
"self",
".",
"listener",
".",
"send_datagram",
"(",
"message",
",",
"(",
"\"<broadcast>\"",
",",
"self",
".",
"server_port",
")",
",",
"message_type",
"=",
"\"broadcast\"",
")"
] | This function will send out an autodiscover broadcast to find a
Neteria server. Any servers that respond with an "OHAI CLIENT"
packet are servers that we can connect to. Servers that respond are
stored in the "discovered_servers" list.
Args:
autoregister (boolean): Whether or not to automatically register
with any responding servers. Defaults to True.
Returns:
None
Examples:
>>> myclient = neteria.client.NeteriaClient()
>>> myclient.listen()
>>> myclient.autodiscover()
>>> myclient.discovered_servers
{('192.168.0.20', 40080): u'1.0', ('192.168.0.82', 40080): '2.0'} | [
"This",
"function",
"will",
"send",
"out",
"an",
"autodiscover",
"broadcast",
"to",
"find",
"a",
"Neteria",
"server",
".",
"Any",
"servers",
"that",
"respond",
"with",
"an",
"OHAI",
"CLIENT",
"packet",
"are",
"servers",
"that",
"we",
"can",
"connect",
"to",
".",
"Servers",
"that",
"respond",
"are",
"stored",
"in",
"the",
"discovered_servers",
"list",
"."
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L324-L360 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.register | def register(self, address, retry=True):
"""This function will send a register packet to the discovered Neteria
server.
Args:
address (tuple): A tuple of the (address, port) to send the register
request to.
retry (boolean): Whether or not we want to reset the current number
of registration retries to 0.
Returns:
None
Examples:
>>> address
('192.168.0.20', 40080)
"""
logger.debug("<%s> Sending REGISTER request to: %s" % (str(self.cuuid),
str(address)))
if not self.listener.listening:
logger.warning("Neteria client is not listening.")
# Construct the message to send
message = {"method": "REGISTER", "cuuid": str(self.cuuid)}
# If we have encryption enabled, send our public key with our REGISTER
# request
if self.encryption:
message["encryption"] = [self.encryption.n, self.encryption.e]
# Send a REGISTER to the server
self.listener.send_datagram(
serialize_data(message, self.compression,
encryption=False), address)
if retry:
# Reset the current number of REGISTER retries
self.register_retries = 0
# Schedule a task to run in x seconds to check to see if we've timed
# out in receiving a response from the server
self.listener.call_later(
self.timeout, self.retransmit, {"method": "REGISTER",
"address": address}) | python | def register(self, address, retry=True):
"""This function will send a register packet to the discovered Neteria
server.
Args:
address (tuple): A tuple of the (address, port) to send the register
request to.
retry (boolean): Whether or not we want to reset the current number
of registration retries to 0.
Returns:
None
Examples:
>>> address
('192.168.0.20', 40080)
"""
logger.debug("<%s> Sending REGISTER request to: %s" % (str(self.cuuid),
str(address)))
if not self.listener.listening:
logger.warning("Neteria client is not listening.")
# Construct the message to send
message = {"method": "REGISTER", "cuuid": str(self.cuuid)}
# If we have encryption enabled, send our public key with our REGISTER
# request
if self.encryption:
message["encryption"] = [self.encryption.n, self.encryption.e]
# Send a REGISTER to the server
self.listener.send_datagram(
serialize_data(message, self.compression,
encryption=False), address)
if retry:
# Reset the current number of REGISTER retries
self.register_retries = 0
# Schedule a task to run in x seconds to check to see if we've timed
# out in receiving a response from the server
self.listener.call_later(
self.timeout, self.retransmit, {"method": "REGISTER",
"address": address}) | [
"def",
"register",
"(",
"self",
",",
"address",
",",
"retry",
"=",
"True",
")",
":",
"logger",
".",
"debug",
"(",
"\"<%s> Sending REGISTER request to: %s\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"str",
"(",
"address",
")",
")",
")",
"if",
"not",
"self",
".",
"listener",
".",
"listening",
":",
"logger",
".",
"warning",
"(",
"\"Neteria client is not listening.\"",
")",
"# Construct the message to send",
"message",
"=",
"{",
"\"method\"",
":",
"\"REGISTER\"",
",",
"\"cuuid\"",
":",
"str",
"(",
"self",
".",
"cuuid",
")",
"}",
"# If we have encryption enabled, send our public key with our REGISTER",
"# request",
"if",
"self",
".",
"encryption",
":",
"message",
"[",
"\"encryption\"",
"]",
"=",
"[",
"self",
".",
"encryption",
".",
"n",
",",
"self",
".",
"encryption",
".",
"e",
"]",
"# Send a REGISTER to the server",
"self",
".",
"listener",
".",
"send_datagram",
"(",
"serialize_data",
"(",
"message",
",",
"self",
".",
"compression",
",",
"encryption",
"=",
"False",
")",
",",
"address",
")",
"if",
"retry",
":",
"# Reset the current number of REGISTER retries",
"self",
".",
"register_retries",
"=",
"0",
"# Schedule a task to run in x seconds to check to see if we've timed",
"# out in receiving a response from the server",
"self",
".",
"listener",
".",
"call_later",
"(",
"self",
".",
"timeout",
",",
"self",
".",
"retransmit",
",",
"{",
"\"method\"",
":",
"\"REGISTER\"",
",",
"\"address\"",
":",
"address",
"}",
")"
] | This function will send a register packet to the discovered Neteria
server.
Args:
address (tuple): A tuple of the (address, port) to send the register
request to.
retry (boolean): Whether or not we want to reset the current number
of registration retries to 0.
Returns:
None
Examples:
>>> address
('192.168.0.20', 40080) | [
"This",
"function",
"will",
"send",
"a",
"register",
"packet",
"to",
"the",
"discovered",
"Neteria",
"server",
"."
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L363-L408 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.event | def event(self, event_data, priority="normal", event_method="EVENT"):
"""This function will send event packets to the server. This is the
main method you would use to send data from your application to the
server.
Whenever an event is sent to the server, a universally unique event id
(euuid) is created for each event and stored in the "event_uuids"
dictionary. This dictionary contains a list of all events that are
currently waiting for a response from the server. The event will only
be removed from this dictionary if the server responds with LEGAL or
ILLEGAL or if the request times out.
Args:
event_data (dict): The event data to send to the server. This data
will be passed through the server's middleware to determine if the
event is legal or not, and then processed by the server it is legal
priority (string): The event's priority informs the server of whether
or not the client is going to wait for a confirmation message from
the server indicating whether its event was LEGAL or ILLEGAL.
Setting this to "normal" informs the server that the client will
wait for a response from the server before processing the event.
Setting this to "high" informs the server that the client will NOT
wait for a response. Defaults to "normal".
event_method (string): The type of event to send to the server. Valid
methods are "EVENT", "AUTH". Defaults to "EVENT".
Returns:
A universally unique identifier (uuid) of the event.
Examples:
>>> event_data
>>> priority
"""
logger.debug("event: " + str(event_data))
# Generate an event UUID for this event
euuid = uuid.uuid1()
logger.debug("<%s> <euuid:%s> Sending event data to server: "
"%s" % (str(self.cuuid), str(euuid), str(self.server)))
if not self.listener.listening:
logger.warning("Neteria client is not listening.")
# If we're not even registered, don't even bother.
if not self.registered:
logger.warning("<%s> <euuid:%s> Client is currently not registered. "
"Event not sent." % (str(self.cuuid), str(euuid)))
return False
# Send the event data to the server
packet = {"method": event_method,
"cuuid": str(self.cuuid),
"euuid": str(euuid),
"event_data": event_data,
"timestamp": str(datetime.now()),
"retry": 0,
"priority": priority}
self.listener.send_datagram(
serialize_data(packet, self.compression,
self.encryption, self.server_key),
self.server)
logger.debug("<%s> Sending EVENT Packet: %s" % (str(self.cuuid),
pformat(packet)))
# Set the sent event to our event buffer to see if we need to roll back
# or anything
self.event_uuids[str(euuid)] = packet
# Now we need to reschedule a timeout/retransmit check
logger.debug("<%s> Scheduling retry in %s seconds" % (str(self.cuuid),
str(self.timeout)))
self.listener.call_later(self.timeout, self.retransmit, packet)
return euuid | python | def event(self, event_data, priority="normal", event_method="EVENT"):
"""This function will send event packets to the server. This is the
main method you would use to send data from your application to the
server.
Whenever an event is sent to the server, a universally unique event id
(euuid) is created for each event and stored in the "event_uuids"
dictionary. This dictionary contains a list of all events that are
currently waiting for a response from the server. The event will only
be removed from this dictionary if the server responds with LEGAL or
ILLEGAL or if the request times out.
Args:
event_data (dict): The event data to send to the server. This data
will be passed through the server's middleware to determine if the
event is legal or not, and then processed by the server it is legal
priority (string): The event's priority informs the server of whether
or not the client is going to wait for a confirmation message from
the server indicating whether its event was LEGAL or ILLEGAL.
Setting this to "normal" informs the server that the client will
wait for a response from the server before processing the event.
Setting this to "high" informs the server that the client will NOT
wait for a response. Defaults to "normal".
event_method (string): The type of event to send to the server. Valid
methods are "EVENT", "AUTH". Defaults to "EVENT".
Returns:
A universally unique identifier (uuid) of the event.
Examples:
>>> event_data
>>> priority
"""
logger.debug("event: " + str(event_data))
# Generate an event UUID for this event
euuid = uuid.uuid1()
logger.debug("<%s> <euuid:%s> Sending event data to server: "
"%s" % (str(self.cuuid), str(euuid), str(self.server)))
if not self.listener.listening:
logger.warning("Neteria client is not listening.")
# If we're not even registered, don't even bother.
if not self.registered:
logger.warning("<%s> <euuid:%s> Client is currently not registered. "
"Event not sent." % (str(self.cuuid), str(euuid)))
return False
# Send the event data to the server
packet = {"method": event_method,
"cuuid": str(self.cuuid),
"euuid": str(euuid),
"event_data": event_data,
"timestamp": str(datetime.now()),
"retry": 0,
"priority": priority}
self.listener.send_datagram(
serialize_data(packet, self.compression,
self.encryption, self.server_key),
self.server)
logger.debug("<%s> Sending EVENT Packet: %s" % (str(self.cuuid),
pformat(packet)))
# Set the sent event to our event buffer to see if we need to roll back
# or anything
self.event_uuids[str(euuid)] = packet
# Now we need to reschedule a timeout/retransmit check
logger.debug("<%s> Scheduling retry in %s seconds" % (str(self.cuuid),
str(self.timeout)))
self.listener.call_later(self.timeout, self.retransmit, packet)
return euuid | [
"def",
"event",
"(",
"self",
",",
"event_data",
",",
"priority",
"=",
"\"normal\"",
",",
"event_method",
"=",
"\"EVENT\"",
")",
":",
"logger",
".",
"debug",
"(",
"\"event: \"",
"+",
"str",
"(",
"event_data",
")",
")",
"# Generate an event UUID for this event",
"euuid",
"=",
"uuid",
".",
"uuid1",
"(",
")",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Sending event data to server: \"",
"\"%s\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"str",
"(",
"euuid",
")",
",",
"str",
"(",
"self",
".",
"server",
")",
")",
")",
"if",
"not",
"self",
".",
"listener",
".",
"listening",
":",
"logger",
".",
"warning",
"(",
"\"Neteria client is not listening.\"",
")",
"# If we're not even registered, don't even bother.",
"if",
"not",
"self",
".",
"registered",
":",
"logger",
".",
"warning",
"(",
"\"<%s> <euuid:%s> Client is currently not registered. \"",
"\"Event not sent.\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"str",
"(",
"euuid",
")",
")",
")",
"return",
"False",
"# Send the event data to the server",
"packet",
"=",
"{",
"\"method\"",
":",
"event_method",
",",
"\"cuuid\"",
":",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"\"euuid\"",
":",
"str",
"(",
"euuid",
")",
",",
"\"event_data\"",
":",
"event_data",
",",
"\"timestamp\"",
":",
"str",
"(",
"datetime",
".",
"now",
"(",
")",
")",
",",
"\"retry\"",
":",
"0",
",",
"\"priority\"",
":",
"priority",
"}",
"self",
".",
"listener",
".",
"send_datagram",
"(",
"serialize_data",
"(",
"packet",
",",
"self",
".",
"compression",
",",
"self",
".",
"encryption",
",",
"self",
".",
"server_key",
")",
",",
"self",
".",
"server",
")",
"logger",
".",
"debug",
"(",
"\"<%s> Sending EVENT Packet: %s\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"pformat",
"(",
"packet",
")",
")",
")",
"# Set the sent event to our event buffer to see if we need to roll back",
"# or anything",
"self",
".",
"event_uuids",
"[",
"str",
"(",
"euuid",
")",
"]",
"=",
"packet",
"# Now we need to reschedule a timeout/retransmit check",
"logger",
".",
"debug",
"(",
"\"<%s> Scheduling retry in %s seconds\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"str",
"(",
"self",
".",
"timeout",
")",
")",
")",
"self",
".",
"listener",
".",
"call_later",
"(",
"self",
".",
"timeout",
",",
"self",
".",
"retransmit",
",",
"packet",
")",
"return",
"euuid"
] | This function will send event packets to the server. This is the
main method you would use to send data from your application to the
server.
Whenever an event is sent to the server, a universally unique event id
(euuid) is created for each event and stored in the "event_uuids"
dictionary. This dictionary contains a list of all events that are
currently waiting for a response from the server. The event will only
be removed from this dictionary if the server responds with LEGAL or
ILLEGAL or if the request times out.
Args:
event_data (dict): The event data to send to the server. This data
will be passed through the server's middleware to determine if the
event is legal or not, and then processed by the server it is legal
priority (string): The event's priority informs the server of whether
or not the client is going to wait for a confirmation message from
the server indicating whether its event was LEGAL or ILLEGAL.
Setting this to "normal" informs the server that the client will
wait for a response from the server before processing the event.
Setting this to "high" informs the server that the client will NOT
wait for a response. Defaults to "normal".
event_method (string): The type of event to send to the server. Valid
methods are "EVENT", "AUTH". Defaults to "EVENT".
Returns:
A universally unique identifier (uuid) of the event.
Examples:
>>> event_data
>>> priority | [
"This",
"function",
"will",
"send",
"event",
"packets",
"to",
"the",
"server",
".",
"This",
"is",
"the",
"main",
"method",
"you",
"would",
"use",
"to",
"send",
"data",
"from",
"your",
"application",
"to",
"the",
"server",
"."
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L411-L487 | train |
ShadowBlip/Neteria | neteria/client.py | NeteriaClient.legal_check | def legal_check(self, message):
"""This method handles event legality check messages from the server.
Args:
message (dict): The unserialized legality dictionary received from
the server.
Returns:
None
Examples:
>>> message
"""
# If the event was legal, remove it from our event buffer
if message["method"] == "LEGAL":
logger.debug("<%s> <euuid:%s> Event LEGAL" % (str(self.cuuid),
message["euuid"]))
logger.debug("<%s> <euuid:%s> Removing event from event "
"buffer." % (str(self.cuuid), message["euuid"]))
# If the message was a high priority, then we keep track of legal
# events too
if message["priority"] == "high":
self.event_confirmations[
message["euuid"]] = self.event_uuids[message["euuid"]]
logger.debug("<%s> <euuid:%s> Event was high priority. Adding "
"to confirmations buffer." % (str(self.cuuid),
message["euuid"]))
logger.debug("<%s> <euuid:%s> Current event confirmation "
"buffer: %s" % (str(self.cuuid),
message["euuid"],
pformat(self.event_confirmations)))
# Try and remove the event from the currently processing events
try:
del self.event_uuids[message["euuid"]]
except KeyError:
logger.warning("<%s> <euuid:%s> Euuid does not exist in event "
"buffer. Key was removed before we could process "
"it." % (str(self.cuuid), message["euuid"]))
# If the event was illegal, remove it from our event buffer and add it
# to our rollback list
elif message["method"] == "ILLEGAL":
logger.debug("<%s> <euuid:%s> Event ILLEGAL" % (str(self.cuuid),
message["euuid"]))
logger.debug("<%s> <euuid:%s> Removing event from event buffer and "
"adding to rollback buffer." % (str(self.cuuid),
message["euuid"]))
self.event_rollbacks[
message["euuid"]] = self.event_uuids[message["euuid"]]
del self.event_uuids[message["euuid"]] | python | def legal_check(self, message):
"""This method handles event legality check messages from the server.
Args:
message (dict): The unserialized legality dictionary received from
the server.
Returns:
None
Examples:
>>> message
"""
# If the event was legal, remove it from our event buffer
if message["method"] == "LEGAL":
logger.debug("<%s> <euuid:%s> Event LEGAL" % (str(self.cuuid),
message["euuid"]))
logger.debug("<%s> <euuid:%s> Removing event from event "
"buffer." % (str(self.cuuid), message["euuid"]))
# If the message was a high priority, then we keep track of legal
# events too
if message["priority"] == "high":
self.event_confirmations[
message["euuid"]] = self.event_uuids[message["euuid"]]
logger.debug("<%s> <euuid:%s> Event was high priority. Adding "
"to confirmations buffer." % (str(self.cuuid),
message["euuid"]))
logger.debug("<%s> <euuid:%s> Current event confirmation "
"buffer: %s" % (str(self.cuuid),
message["euuid"],
pformat(self.event_confirmations)))
# Try and remove the event from the currently processing events
try:
del self.event_uuids[message["euuid"]]
except KeyError:
logger.warning("<%s> <euuid:%s> Euuid does not exist in event "
"buffer. Key was removed before we could process "
"it." % (str(self.cuuid), message["euuid"]))
# If the event was illegal, remove it from our event buffer and add it
# to our rollback list
elif message["method"] == "ILLEGAL":
logger.debug("<%s> <euuid:%s> Event ILLEGAL" % (str(self.cuuid),
message["euuid"]))
logger.debug("<%s> <euuid:%s> Removing event from event buffer and "
"adding to rollback buffer." % (str(self.cuuid),
message["euuid"]))
self.event_rollbacks[
message["euuid"]] = self.event_uuids[message["euuid"]]
del self.event_uuids[message["euuid"]] | [
"def",
"legal_check",
"(",
"self",
",",
"message",
")",
":",
"# If the event was legal, remove it from our event buffer",
"if",
"message",
"[",
"\"method\"",
"]",
"==",
"\"LEGAL\"",
":",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Event LEGAL\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
")",
")",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Removing event from event \"",
"\"buffer.\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
")",
")",
"# If the message was a high priority, then we keep track of legal",
"# events too",
"if",
"message",
"[",
"\"priority\"",
"]",
"==",
"\"high\"",
":",
"self",
".",
"event_confirmations",
"[",
"message",
"[",
"\"euuid\"",
"]",
"]",
"=",
"self",
".",
"event_uuids",
"[",
"message",
"[",
"\"euuid\"",
"]",
"]",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Event was high priority. Adding \"",
"\"to confirmations buffer.\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
")",
")",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Current event confirmation \"",
"\"buffer: %s\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
",",
"pformat",
"(",
"self",
".",
"event_confirmations",
")",
")",
")",
"# Try and remove the event from the currently processing events",
"try",
":",
"del",
"self",
".",
"event_uuids",
"[",
"message",
"[",
"\"euuid\"",
"]",
"]",
"except",
"KeyError",
":",
"logger",
".",
"warning",
"(",
"\"<%s> <euuid:%s> Euuid does not exist in event \"",
"\"buffer. Key was removed before we could process \"",
"\"it.\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
")",
")",
"# If the event was illegal, remove it from our event buffer and add it",
"# to our rollback list",
"elif",
"message",
"[",
"\"method\"",
"]",
"==",
"\"ILLEGAL\"",
":",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Event ILLEGAL\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
")",
")",
"logger",
".",
"debug",
"(",
"\"<%s> <euuid:%s> Removing event from event buffer and \"",
"\"adding to rollback buffer.\"",
"%",
"(",
"str",
"(",
"self",
".",
"cuuid",
")",
",",
"message",
"[",
"\"euuid\"",
"]",
")",
")",
"self",
".",
"event_rollbacks",
"[",
"message",
"[",
"\"euuid\"",
"]",
"]",
"=",
"self",
".",
"event_uuids",
"[",
"message",
"[",
"\"euuid\"",
"]",
"]",
"del",
"self",
".",
"event_uuids",
"[",
"message",
"[",
"\"euuid\"",
"]",
"]"
] | This method handles event legality check messages from the server.
Args:
message (dict): The unserialized legality dictionary received from
the server.
Returns:
None
Examples:
>>> message | [
"This",
"method",
"handles",
"event",
"legality",
"check",
"messages",
"from",
"the",
"server",
"."
] | 1a8c976eb2beeca0a5a272a34ac58b2c114495a4 | https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/client.py#L490-L543 | train |
Locu-Unofficial/locu-python | locu/api.py | VenueApiClient.search | def search(self, category = None, cuisine = None, location = (None, None), radius = None, tl_coord = (None, None), \
br_coord = (None, None), name = None, country = None, locality = None, \
region = None, postal_code = None, street_address = None,\
website_url = None, has_menu = None, open_at = None):
"""
Locu Venue Search API Call Wrapper
Args:
*Note that none of the arguments are required
category : List of category types that need to be filtered by: ['restaurant', 'spa', 'beauty salon', 'gym', 'laundry', 'hair care', 'other']
type : [string]
cuisine : List of cuisine types that need to be filtered by: ['american', 'italian', ...]
type : [string]
location : Tuple that consists of (latitude, longtitude) coordinates
type : tuple(float, float)
radius : Radius around the given lat, long
type : float
tl_coord : Tuple that consists of (latitude, longtitude) for bounding box top left coordinates
type : tuple(float, float)
br_coord : Tuple that consists of (latitude, longtitude) for bounding box bottom right coordinates
type : tuple(float, float)
name : Name of the venue
type : string
country : Country where venue is located
type : string
locality : Locality. Ex 'San Francisco'
type : string
region : Region/state. Ex. 'CA'
type : string
postal_code : Postal code
type : string
street_address : Address
type : string
open_at : Search for venues open at the specified time
type : datetime
website_url : Filter by the a website url
type : string
has_menu : Filter venues that have menus in them
type : boolean
Returns:
A dictionary with a data returned by the server
Raises:
HttpException with the error message from the server
"""
params = self._get_params(category = category, cuisine = cuisine, location = location, radius = radius, tl_coord = tl_coord, \
br_coord = br_coord, name = name, country = country, locality = locality, \
region = region, postal_code = postal_code, street_address = street_address, \
website_url = website_url, has_menu = has_menu, open_at = open_at)
return self._create_query('search', params) | python | def search(self, category = None, cuisine = None, location = (None, None), radius = None, tl_coord = (None, None), \
br_coord = (None, None), name = None, country = None, locality = None, \
region = None, postal_code = None, street_address = None,\
website_url = None, has_menu = None, open_at = None):
"""
Locu Venue Search API Call Wrapper
Args:
*Note that none of the arguments are required
category : List of category types that need to be filtered by: ['restaurant', 'spa', 'beauty salon', 'gym', 'laundry', 'hair care', 'other']
type : [string]
cuisine : List of cuisine types that need to be filtered by: ['american', 'italian', ...]
type : [string]
location : Tuple that consists of (latitude, longtitude) coordinates
type : tuple(float, float)
radius : Radius around the given lat, long
type : float
tl_coord : Tuple that consists of (latitude, longtitude) for bounding box top left coordinates
type : tuple(float, float)
br_coord : Tuple that consists of (latitude, longtitude) for bounding box bottom right coordinates
type : tuple(float, float)
name : Name of the venue
type : string
country : Country where venue is located
type : string
locality : Locality. Ex 'San Francisco'
type : string
region : Region/state. Ex. 'CA'
type : string
postal_code : Postal code
type : string
street_address : Address
type : string
open_at : Search for venues open at the specified time
type : datetime
website_url : Filter by the a website url
type : string
has_menu : Filter venues that have menus in them
type : boolean
Returns:
A dictionary with a data returned by the server
Raises:
HttpException with the error message from the server
"""
params = self._get_params(category = category, cuisine = cuisine, location = location, radius = radius, tl_coord = tl_coord, \
br_coord = br_coord, name = name, country = country, locality = locality, \
region = region, postal_code = postal_code, street_address = street_address, \
website_url = website_url, has_menu = has_menu, open_at = open_at)
return self._create_query('search', params) | [
"def",
"search",
"(",
"self",
",",
"category",
"=",
"None",
",",
"cuisine",
"=",
"None",
",",
"location",
"=",
"(",
"None",
",",
"None",
")",
",",
"radius",
"=",
"None",
",",
"tl_coord",
"=",
"(",
"None",
",",
"None",
")",
",",
"br_coord",
"=",
"(",
"None",
",",
"None",
")",
",",
"name",
"=",
"None",
",",
"country",
"=",
"None",
",",
"locality",
"=",
"None",
",",
"region",
"=",
"None",
",",
"postal_code",
"=",
"None",
",",
"street_address",
"=",
"None",
",",
"website_url",
"=",
"None",
",",
"has_menu",
"=",
"None",
",",
"open_at",
"=",
"None",
")",
":",
"params",
"=",
"self",
".",
"_get_params",
"(",
"category",
"=",
"category",
",",
"cuisine",
"=",
"cuisine",
",",
"location",
"=",
"location",
",",
"radius",
"=",
"radius",
",",
"tl_coord",
"=",
"tl_coord",
",",
"br_coord",
"=",
"br_coord",
",",
"name",
"=",
"name",
",",
"country",
"=",
"country",
",",
"locality",
"=",
"locality",
",",
"region",
"=",
"region",
",",
"postal_code",
"=",
"postal_code",
",",
"street_address",
"=",
"street_address",
",",
"website_url",
"=",
"website_url",
",",
"has_menu",
"=",
"has_menu",
",",
"open_at",
"=",
"open_at",
")",
"return",
"self",
".",
"_create_query",
"(",
"'search'",
",",
"params",
")"
] | Locu Venue Search API Call Wrapper
Args:
*Note that none of the arguments are required
category : List of category types that need to be filtered by: ['restaurant', 'spa', 'beauty salon', 'gym', 'laundry', 'hair care', 'other']
type : [string]
cuisine : List of cuisine types that need to be filtered by: ['american', 'italian', ...]
type : [string]
location : Tuple that consists of (latitude, longtitude) coordinates
type : tuple(float, float)
radius : Radius around the given lat, long
type : float
tl_coord : Tuple that consists of (latitude, longtitude) for bounding box top left coordinates
type : tuple(float, float)
br_coord : Tuple that consists of (latitude, longtitude) for bounding box bottom right coordinates
type : tuple(float, float)
name : Name of the venue
type : string
country : Country where venue is located
type : string
locality : Locality. Ex 'San Francisco'
type : string
region : Region/state. Ex. 'CA'
type : string
postal_code : Postal code
type : string
street_address : Address
type : string
open_at : Search for venues open at the specified time
type : datetime
website_url : Filter by the a website url
type : string
has_menu : Filter venues that have menus in them
type : boolean
Returns:
A dictionary with a data returned by the server
Raises:
HttpException with the error message from the server | [
"Locu",
"Venue",
"Search",
"API",
"Call",
"Wrapper"
] | fcdf136b68333ab7055e623591801dd35df3bc45 | https://github.com/Locu-Unofficial/locu-python/blob/fcdf136b68333ab7055e623591801dd35df3bc45/locu/api.py#L147-L199 | train |
Locu-Unofficial/locu-python | locu/api.py | VenueApiClient.search_next | def search_next(self, obj):
"""
Takes the dictionary that is returned by 'search' or 'search_next' function and gets the next batch of results
Args:
obj: dictionary returned by the 'search' or 'search_next' function
Returns:
A dictionary with a data returned by the server
Raises:
HttpException with the error message from the server
"""
if 'meta' in obj and 'next' in obj['meta'] and obj['meta']['next'] != None:
uri = self.api_url % obj['meta']['next']
header, content = self._http_uri_request(uri)
resp = json.loads(content)
if not self._is_http_response_ok(header):
error = resp.get('error_message', 'Unknown Error')
raise HttpException(header.status, header.reason, error)
return resp
return {} | python | def search_next(self, obj):
"""
Takes the dictionary that is returned by 'search' or 'search_next' function and gets the next batch of results
Args:
obj: dictionary returned by the 'search' or 'search_next' function
Returns:
A dictionary with a data returned by the server
Raises:
HttpException with the error message from the server
"""
if 'meta' in obj and 'next' in obj['meta'] and obj['meta']['next'] != None:
uri = self.api_url % obj['meta']['next']
header, content = self._http_uri_request(uri)
resp = json.loads(content)
if not self._is_http_response_ok(header):
error = resp.get('error_message', 'Unknown Error')
raise HttpException(header.status, header.reason, error)
return resp
return {} | [
"def",
"search_next",
"(",
"self",
",",
"obj",
")",
":",
"if",
"'meta'",
"in",
"obj",
"and",
"'next'",
"in",
"obj",
"[",
"'meta'",
"]",
"and",
"obj",
"[",
"'meta'",
"]",
"[",
"'next'",
"]",
"!=",
"None",
":",
"uri",
"=",
"self",
".",
"api_url",
"%",
"obj",
"[",
"'meta'",
"]",
"[",
"'next'",
"]",
"header",
",",
"content",
"=",
"self",
".",
"_http_uri_request",
"(",
"uri",
")",
"resp",
"=",
"json",
".",
"loads",
"(",
"content",
")",
"if",
"not",
"self",
".",
"_is_http_response_ok",
"(",
"header",
")",
":",
"error",
"=",
"resp",
".",
"get",
"(",
"'error_message'",
",",
"'Unknown Error'",
")",
"raise",
"HttpException",
"(",
"header",
".",
"status",
",",
"header",
".",
"reason",
",",
"error",
")",
"return",
"resp",
"return",
"{",
"}"
] | Takes the dictionary that is returned by 'search' or 'search_next' function and gets the next batch of results
Args:
obj: dictionary returned by the 'search' or 'search_next' function
Returns:
A dictionary with a data returned by the server
Raises:
HttpException with the error message from the server | [
"Takes",
"the",
"dictionary",
"that",
"is",
"returned",
"by",
"search",
"or",
"search_next",
"function",
"and",
"gets",
"the",
"next",
"batch",
"of",
"results"
] | fcdf136b68333ab7055e623591801dd35df3bc45 | https://github.com/Locu-Unofficial/locu-python/blob/fcdf136b68333ab7055e623591801dd35df3bc45/locu/api.py#L201-L222 | train |
Locu-Unofficial/locu-python | locu/api.py | VenueApiClient.get_details | def get_details(self, ids):
"""
Locu Venue Details API Call Wrapper
Args:
list of ids : ids of a particular venues to get insights about. Can process up to 5 ids
"""
if isinstance(ids, list):
if len(ids) > 5:
ids = ids[:5]
id_param = ';'.join(ids) + '/'
else:
ids = str(ids)
id_param = ids + '/'
header, content = self._http_request(id_param)
resp = json.loads(content)
if not self._is_http_response_ok(header):
error = resp.get('error_message', 'Unknown Error')
raise HttpException(header.status, header.reason, error)
return resp | python | def get_details(self, ids):
"""
Locu Venue Details API Call Wrapper
Args:
list of ids : ids of a particular venues to get insights about. Can process up to 5 ids
"""
if isinstance(ids, list):
if len(ids) > 5:
ids = ids[:5]
id_param = ';'.join(ids) + '/'
else:
ids = str(ids)
id_param = ids + '/'
header, content = self._http_request(id_param)
resp = json.loads(content)
if not self._is_http_response_ok(header):
error = resp.get('error_message', 'Unknown Error')
raise HttpException(header.status, header.reason, error)
return resp | [
"def",
"get_details",
"(",
"self",
",",
"ids",
")",
":",
"if",
"isinstance",
"(",
"ids",
",",
"list",
")",
":",
"if",
"len",
"(",
"ids",
")",
">",
"5",
":",
"ids",
"=",
"ids",
"[",
":",
"5",
"]",
"id_param",
"=",
"';'",
".",
"join",
"(",
"ids",
")",
"+",
"'/'",
"else",
":",
"ids",
"=",
"str",
"(",
"ids",
")",
"id_param",
"=",
"ids",
"+",
"'/'",
"header",
",",
"content",
"=",
"self",
".",
"_http_request",
"(",
"id_param",
")",
"resp",
"=",
"json",
".",
"loads",
"(",
"content",
")",
"if",
"not",
"self",
".",
"_is_http_response_ok",
"(",
"header",
")",
":",
"error",
"=",
"resp",
".",
"get",
"(",
"'error_message'",
",",
"'Unknown Error'",
")",
"raise",
"HttpException",
"(",
"header",
".",
"status",
",",
"header",
".",
"reason",
",",
"error",
")",
"return",
"resp"
] | Locu Venue Details API Call Wrapper
Args:
list of ids : ids of a particular venues to get insights about. Can process up to 5 ids | [
"Locu",
"Venue",
"Details",
"API",
"Call",
"Wrapper"
] | fcdf136b68333ab7055e623591801dd35df3bc45 | https://github.com/Locu-Unofficial/locu-python/blob/fcdf136b68333ab7055e623591801dd35df3bc45/locu/api.py#L280-L302 | train |
Locu-Unofficial/locu-python | locu/api.py | VenueApiClient.get_menus | def get_menus(self, id):
"""
Given a venue id returns a list of menus associated with a venue
"""
resp = self.get_details([id])
menus = []
for obj in resp['objects']:
if obj['has_menu']:
menus += obj['menus']
return menus | python | def get_menus(self, id):
"""
Given a venue id returns a list of menus associated with a venue
"""
resp = self.get_details([id])
menus = []
for obj in resp['objects']:
if obj['has_menu']:
menus += obj['menus']
return menus | [
"def",
"get_menus",
"(",
"self",
",",
"id",
")",
":",
"resp",
"=",
"self",
".",
"get_details",
"(",
"[",
"id",
"]",
")",
"menus",
"=",
"[",
"]",
"for",
"obj",
"in",
"resp",
"[",
"'objects'",
"]",
":",
"if",
"obj",
"[",
"'has_menu'",
"]",
":",
"menus",
"+=",
"obj",
"[",
"'menus'",
"]",
"return",
"menus"
] | Given a venue id returns a list of menus associated with a venue | [
"Given",
"a",
"venue",
"id",
"returns",
"a",
"list",
"of",
"menus",
"associated",
"with",
"a",
"venue"
] | fcdf136b68333ab7055e623591801dd35df3bc45 | https://github.com/Locu-Unofficial/locu-python/blob/fcdf136b68333ab7055e623591801dd35df3bc45/locu/api.py#L304-L314 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.