repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
johnnoone/aioconsul | aioconsul/client/kv_endpoint.py | KVOperations.set | def set(self, key, value, *, flags=None):
"""Sets the Key to the given Value
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
flags (int): Flags to set with value
"""
self.append({
"Verb": "set",
"Key": key,
"Value": encode_value(value, flags, base64=True).decode("utf-8"),
"Flags": flags
})
return self | python | def set(self, key, value, *, flags=None):
"""Sets the Key to the given Value
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
flags (int): Flags to set with value
"""
self.append({
"Verb": "set",
"Key": key,
"Value": encode_value(value, flags, base64=True).decode("utf-8"),
"Flags": flags
})
return self | [
"def",
"set",
"(",
"self",
",",
"key",
",",
"value",
",",
"*",
",",
"flags",
"=",
"None",
")",
":",
"self",
".",
"append",
"(",
"{",
"\"Verb\"",
":",
"\"set\"",
",",
"\"Key\"",
":",
"key",
",",
"\"Value\"",
":",
"encode_value",
"(",
"value",
",",
"flags",
",",
"base64",
"=",
"True",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
",",
"\"Flags\"",
":",
"flags",
"}",
")",
"return",
"self"
] | Sets the Key to the given Value
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
flags (int): Flags to set with value | [
"Sets",
"the",
"Key",
"to",
"the",
"given",
"Value"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/kv_endpoint.py#L356-L370 | train |
johnnoone/aioconsul | aioconsul/client/kv_endpoint.py | KVOperations.cas | def cas(self, key, value, *, flags=None, index):
"""Sets the Key to the given Value with check-and-set semantics
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
index (ObjectIndex): Index ID
flags (int): Flags to set with value
The Key will only be set if its current modify index matches the
supplied Index
"""
self.append({
"Verb": "cas",
"Key": key,
"Value": encode_value(value, flags, base64=True).decode("utf-8"),
"Flags": flags,
"Index": extract_attr(index, keys=["ModifyIndex", "Index"])
})
return self | python | def cas(self, key, value, *, flags=None, index):
"""Sets the Key to the given Value with check-and-set semantics
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
index (ObjectIndex): Index ID
flags (int): Flags to set with value
The Key will only be set if its current modify index matches the
supplied Index
"""
self.append({
"Verb": "cas",
"Key": key,
"Value": encode_value(value, flags, base64=True).decode("utf-8"),
"Flags": flags,
"Index": extract_attr(index, keys=["ModifyIndex", "Index"])
})
return self | [
"def",
"cas",
"(",
"self",
",",
"key",
",",
"value",
",",
"*",
",",
"flags",
"=",
"None",
",",
"index",
")",
":",
"self",
".",
"append",
"(",
"{",
"\"Verb\"",
":",
"\"cas\"",
",",
"\"Key\"",
":",
"key",
",",
"\"Value\"",
":",
"encode_value",
"(",
"value",
",",
"flags",
",",
"base64",
"=",
"True",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
",",
"\"Flags\"",
":",
"flags",
",",
"\"Index\"",
":",
"extract_attr",
"(",
"index",
",",
"keys",
"=",
"[",
"\"ModifyIndex\"",
",",
"\"Index\"",
"]",
")",
"}",
")",
"return",
"self"
] | Sets the Key to the given Value with check-and-set semantics
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
index (ObjectIndex): Index ID
flags (int): Flags to set with value
The Key will only be set if its current modify index matches the
supplied Index | [
"Sets",
"the",
"Key",
"to",
"the",
"given",
"Value",
"with",
"check",
"-",
"and",
"-",
"set",
"semantics"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/kv_endpoint.py#L372-L391 | train |
johnnoone/aioconsul | aioconsul/client/kv_endpoint.py | KVOperations.lock | def lock(self, key, value, *, flags=None, session):
"""Locks the Key with the given Session
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
session (ObjectID): Session ID
The Key will only be set if its current modify index matches the
supplied Index
"""
self.append({
"Verb": "lock",
"Key": key,
"Value": encode_value(value, flags, base64=True).decode("utf-8"),
"Flags": flags,
"Session": extract_attr(session, keys=["ID"])
})
return self | python | def lock(self, key, value, *, flags=None, session):
"""Locks the Key with the given Session
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
session (ObjectID): Session ID
The Key will only be set if its current modify index matches the
supplied Index
"""
self.append({
"Verb": "lock",
"Key": key,
"Value": encode_value(value, flags, base64=True).decode("utf-8"),
"Flags": flags,
"Session": extract_attr(session, keys=["ID"])
})
return self | [
"def",
"lock",
"(",
"self",
",",
"key",
",",
"value",
",",
"*",
",",
"flags",
"=",
"None",
",",
"session",
")",
":",
"self",
".",
"append",
"(",
"{",
"\"Verb\"",
":",
"\"lock\"",
",",
"\"Key\"",
":",
"key",
",",
"\"Value\"",
":",
"encode_value",
"(",
"value",
",",
"flags",
",",
"base64",
"=",
"True",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
",",
"\"Flags\"",
":",
"flags",
",",
"\"Session\"",
":",
"extract_attr",
"(",
"session",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"}",
")",
"return",
"self"
] | Locks the Key with the given Session
Parameters:
key (str): Key to set
value (Payload): Value to set, It will be encoded by flags
session (ObjectID): Session ID
The Key will only be set if its current modify index matches the
supplied Index | [
"Locks",
"the",
"Key",
"with",
"the",
"given",
"Session"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/kv_endpoint.py#L393-L411 | train |
johnnoone/aioconsul | aioconsul/client/kv_endpoint.py | KVOperations.check_index | def check_index(self, key, *, index):
"""Fails the transaction if Key does not have a modify index equal to
Index
Parameters:
key (str): Key to check
index (ObjectIndex): Index ID
"""
self.append({
"Verb": "check-index",
"Key": key,
"Index": extract_attr(index, keys=["ModifyIndex", "Index"])
})
return self | python | def check_index(self, key, *, index):
"""Fails the transaction if Key does not have a modify index equal to
Index
Parameters:
key (str): Key to check
index (ObjectIndex): Index ID
"""
self.append({
"Verb": "check-index",
"Key": key,
"Index": extract_attr(index, keys=["ModifyIndex", "Index"])
})
return self | [
"def",
"check_index",
"(",
"self",
",",
"key",
",",
"*",
",",
"index",
")",
":",
"self",
".",
"append",
"(",
"{",
"\"Verb\"",
":",
"\"check-index\"",
",",
"\"Key\"",
":",
"key",
",",
"\"Index\"",
":",
"extract_attr",
"(",
"index",
",",
"keys",
"=",
"[",
"\"ModifyIndex\"",
",",
"\"Index\"",
"]",
")",
"}",
")",
"return",
"self"
] | Fails the transaction if Key does not have a modify index equal to
Index
Parameters:
key (str): Key to check
index (ObjectIndex): Index ID | [
"Fails",
"the",
"transaction",
"if",
"Key",
"does",
"not",
"have",
"a",
"modify",
"index",
"equal",
"to",
"Index"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/kv_endpoint.py#L471-L484 | train |
johnnoone/aioconsul | aioconsul/client/kv_endpoint.py | KVOperations.check_session | def check_session(self, key, *, session=None):
"""Fails the transaction if Key is not currently locked by Session
Parameters:
key (str): Key to check
session (ObjectID): Session ID
"""
self.append({
"Verb": "check-session",
"Key": key,
"Session": extract_attr(session, keys=["ID"])
})
return self | python | def check_session(self, key, *, session=None):
"""Fails the transaction if Key is not currently locked by Session
Parameters:
key (str): Key to check
session (ObjectID): Session ID
"""
self.append({
"Verb": "check-session",
"Key": key,
"Session": extract_attr(session, keys=["ID"])
})
return self | [
"def",
"check_session",
"(",
"self",
",",
"key",
",",
"*",
",",
"session",
"=",
"None",
")",
":",
"self",
".",
"append",
"(",
"{",
"\"Verb\"",
":",
"\"check-session\"",
",",
"\"Key\"",
":",
"key",
",",
"\"Session\"",
":",
"extract_attr",
"(",
"session",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"}",
")",
"return",
"self"
] | Fails the transaction if Key is not currently locked by Session
Parameters:
key (str): Key to check
session (ObjectID): Session ID | [
"Fails",
"the",
"transaction",
"if",
"Key",
"is",
"not",
"currently",
"locked",
"by",
"Session"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/kv_endpoint.py#L486-L498 | train |
johnnoone/aioconsul | aioconsul/client/kv_endpoint.py | KVOperations.execute | async def execute(self, dc=None, token=None):
"""Execute stored operations
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
token (ObjectID): Token ID
Returns:
Collection: Results of operations.
Raises:
TransactionError: Transaction failed
"""
token_id = extract_attr(token, keys=["ID"])
try:
response = await self._api.put(
"/v1/txn",
data=self.operations,
params={
"dc": dc,
"token": token_id
})
except ConflictError as error:
errors = {elt["OpIndex"]: elt for elt in error.value["Errors"]}
operations = [op["KV"] for op in self.operations]
meta = error.meta
raise TransactionError(errors, operations, meta) from error
except Exception as error:
raise error
else:
self.operations[:] = []
results = []
for _ in response.body["Results"]:
data = _["KV"]
if data["Value"] is not None:
data["Value"] = decode_value(data["Value"], data["Flags"])
results.append(data)
return results | python | async def execute(self, dc=None, token=None):
"""Execute stored operations
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
token (ObjectID): Token ID
Returns:
Collection: Results of operations.
Raises:
TransactionError: Transaction failed
"""
token_id = extract_attr(token, keys=["ID"])
try:
response = await self._api.put(
"/v1/txn",
data=self.operations,
params={
"dc": dc,
"token": token_id
})
except ConflictError as error:
errors = {elt["OpIndex"]: elt for elt in error.value["Errors"]}
operations = [op["KV"] for op in self.operations]
meta = error.meta
raise TransactionError(errors, operations, meta) from error
except Exception as error:
raise error
else:
self.operations[:] = []
results = []
for _ in response.body["Results"]:
data = _["KV"]
if data["Value"] is not None:
data["Value"] = decode_value(data["Value"], data["Flags"])
results.append(data)
return results | [
"async",
"def",
"execute",
"(",
"self",
",",
"dc",
"=",
"None",
",",
"token",
"=",
"None",
")",
":",
"token_id",
"=",
"extract_attr",
"(",
"token",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"try",
":",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/txn\"",
",",
"data",
"=",
"self",
".",
"operations",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
",",
"\"token\"",
":",
"token_id",
"}",
")",
"except",
"ConflictError",
"as",
"error",
":",
"errors",
"=",
"{",
"elt",
"[",
"\"OpIndex\"",
"]",
":",
"elt",
"for",
"elt",
"in",
"error",
".",
"value",
"[",
"\"Errors\"",
"]",
"}",
"operations",
"=",
"[",
"op",
"[",
"\"KV\"",
"]",
"for",
"op",
"in",
"self",
".",
"operations",
"]",
"meta",
"=",
"error",
".",
"meta",
"raise",
"TransactionError",
"(",
"errors",
",",
"operations",
",",
"meta",
")",
"from",
"error",
"except",
"Exception",
"as",
"error",
":",
"raise",
"error",
"else",
":",
"self",
".",
"operations",
"[",
":",
"]",
"=",
"[",
"]",
"results",
"=",
"[",
"]",
"for",
"_",
"in",
"response",
".",
"body",
"[",
"\"Results\"",
"]",
":",
"data",
"=",
"_",
"[",
"\"KV\"",
"]",
"if",
"data",
"[",
"\"Value\"",
"]",
"is",
"not",
"None",
":",
"data",
"[",
"\"Value\"",
"]",
"=",
"decode_value",
"(",
"data",
"[",
"\"Value\"",
"]",
",",
"data",
"[",
"\"Flags\"",
"]",
")",
"results",
".",
"append",
"(",
"data",
")",
"return",
"results"
] | Execute stored operations
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
token (ObjectID): Token ID
Returns:
Collection: Results of operations.
Raises:
TransactionError: Transaction failed | [
"Execute",
"stored",
"operations"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/kv_endpoint.py#L542-L579 | train |
tueda/python-form | form/formlink.py | FormLink.write | def write(self, script):
# type: (str) -> None
"""Send a script to FORM.
Write the given script to the communication channel to FORM. It could
be buffered and so FORM may not execute the sent script until
:meth:`flush` or :meth:`read` is called.
"""
if self._closed:
raise IOError('tried to write to closed connection')
script = script.strip()
if script:
assert self._parentout is not None
self._parentout.write(script)
self._parentout.write('\n') | python | def write(self, script):
# type: (str) -> None
"""Send a script to FORM.
Write the given script to the communication channel to FORM. It could
be buffered and so FORM may not execute the sent script until
:meth:`flush` or :meth:`read` is called.
"""
if self._closed:
raise IOError('tried to write to closed connection')
script = script.strip()
if script:
assert self._parentout is not None
self._parentout.write(script)
self._parentout.write('\n') | [
"def",
"write",
"(",
"self",
",",
"script",
")",
":",
"# type: (str) -> None",
"if",
"self",
".",
"_closed",
":",
"raise",
"IOError",
"(",
"'tried to write to closed connection'",
")",
"script",
"=",
"script",
".",
"strip",
"(",
")",
"if",
"script",
":",
"assert",
"self",
".",
"_parentout",
"is",
"not",
"None",
"self",
".",
"_parentout",
".",
"write",
"(",
"script",
")",
"self",
".",
"_parentout",
".",
"write",
"(",
"'\\n'",
")"
] | Send a script to FORM.
Write the given script to the communication channel to FORM. It could
be buffered and so FORM may not execute the sent script until
:meth:`flush` or :meth:`read` is called. | [
"Send",
"a",
"script",
"to",
"FORM",
"."
] | 1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b | https://github.com/tueda/python-form/blob/1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b/form/formlink.py#L310-L324 | train |
tueda/python-form | form/formlink.py | FormLink.flush | def flush(self):
# type: () -> None
"""Flush the channel to FORM.
Flush the communication channel to FORM. Because :meth:`write` is
buffered and :meth:`read` is a blocking operation, this method is used
for asynchronous execution of FORM scripts.
"""
if self._closed:
raise IOError('tried to flush closed connection')
assert self._parentout is not None
self._parentout.flush() | python | def flush(self):
# type: () -> None
"""Flush the channel to FORM.
Flush the communication channel to FORM. Because :meth:`write` is
buffered and :meth:`read` is a blocking operation, this method is used
for asynchronous execution of FORM scripts.
"""
if self._closed:
raise IOError('tried to flush closed connection')
assert self._parentout is not None
self._parentout.flush() | [
"def",
"flush",
"(",
"self",
")",
":",
"# type: () -> None",
"if",
"self",
".",
"_closed",
":",
"raise",
"IOError",
"(",
"'tried to flush closed connection'",
")",
"assert",
"self",
".",
"_parentout",
"is",
"not",
"None",
"self",
".",
"_parentout",
".",
"flush",
"(",
")"
] | Flush the channel to FORM.
Flush the communication channel to FORM. Because :meth:`write` is
buffered and :meth:`read` is a blocking operation, this method is used
for asynchronous execution of FORM scripts. | [
"Flush",
"the",
"channel",
"to",
"FORM",
"."
] | 1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b | https://github.com/tueda/python-form/blob/1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b/form/formlink.py#L326-L337 | train |
whiteclover/dbpy | db/query/delete.py | DeleteQuery.compile | def compile(self):
"""Compiles the delete sql statement"""
sql = ''
sql += 'DELETE FROM ' + self.dialect.quote_table(self._table)
if self._where:
sql += ' WHERE ' + self.compile_condition(self._where)
if self._order_by:
sql += ' ' + self.compile_order_by(self._order_by)
if self._limit:
sql += ' LIMIT ' + self._limit
return sql | python | def compile(self):
"""Compiles the delete sql statement"""
sql = ''
sql += 'DELETE FROM ' + self.dialect.quote_table(self._table)
if self._where:
sql += ' WHERE ' + self.compile_condition(self._where)
if self._order_by:
sql += ' ' + self.compile_order_by(self._order_by)
if self._limit:
sql += ' LIMIT ' + self._limit
return sql | [
"def",
"compile",
"(",
"self",
")",
":",
"sql",
"=",
"''",
"sql",
"+=",
"'DELETE FROM '",
"+",
"self",
".",
"dialect",
".",
"quote_table",
"(",
"self",
".",
"_table",
")",
"if",
"self",
".",
"_where",
":",
"sql",
"+=",
"' WHERE '",
"+",
"self",
".",
"compile_condition",
"(",
"self",
".",
"_where",
")",
"if",
"self",
".",
"_order_by",
":",
"sql",
"+=",
"' '",
"+",
"self",
".",
"compile_order_by",
"(",
"self",
".",
"_order_by",
")",
"if",
"self",
".",
"_limit",
":",
"sql",
"+=",
"' LIMIT '",
"+",
"self",
".",
"_limit",
"return",
"sql"
] | Compiles the delete sql statement | [
"Compiles",
"the",
"delete",
"sql",
"statement"
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/delete.py#L40-L51 | train |
whiteclover/dbpy | db/query/delete.py | DeleteQuery.clear | def clear(self):
"""Clear and reset to orignal state"""
WhereQuery.clear(self)
self._table = None
self._parameters = []
self._sql = None | python | def clear(self):
"""Clear and reset to orignal state"""
WhereQuery.clear(self)
self._table = None
self._parameters = []
self._sql = None | [
"def",
"clear",
"(",
"self",
")",
":",
"WhereQuery",
".",
"clear",
"(",
"self",
")",
"self",
".",
"_table",
"=",
"None",
"self",
".",
"_parameters",
"=",
"[",
"]",
"self",
".",
"_sql",
"=",
"None"
] | Clear and reset to orignal state | [
"Clear",
"and",
"reset",
"to",
"orignal",
"state"
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/delete.py#L53-L58 | train |
bitesofcode/projexui | projexui/widgets/xtreewidget/xloaderitem.py | XLoaderItem.startLoading | def startLoading(self):
"""
Updates this item to mark the item as loading. This will create
a QLabel with the loading ajax spinner to indicate that progress
is occurring.
"""
if self._loading:
return False
tree = self.treeWidget()
if not tree:
return
self._loading = True
self.setText(0, '')
# create the label for this item
lbl = QtGui.QLabel(self.treeWidget())
lbl.setMovie(XLoaderWidget.getMovie())
lbl.setAlignment(QtCore.Qt.AlignCenter)
tree.setItemWidget(self, 0, lbl)
try:
tree.loadStarted.emit(self)
except AttributeError:
pass
return True | python | def startLoading(self):
"""
Updates this item to mark the item as loading. This will create
a QLabel with the loading ajax spinner to indicate that progress
is occurring.
"""
if self._loading:
return False
tree = self.treeWidget()
if not tree:
return
self._loading = True
self.setText(0, '')
# create the label for this item
lbl = QtGui.QLabel(self.treeWidget())
lbl.setMovie(XLoaderWidget.getMovie())
lbl.setAlignment(QtCore.Qt.AlignCenter)
tree.setItemWidget(self, 0, lbl)
try:
tree.loadStarted.emit(self)
except AttributeError:
pass
return True | [
"def",
"startLoading",
"(",
"self",
")",
":",
"if",
"self",
".",
"_loading",
":",
"return",
"False",
"tree",
"=",
"self",
".",
"treeWidget",
"(",
")",
"if",
"not",
"tree",
":",
"return",
"self",
".",
"_loading",
"=",
"True",
"self",
".",
"setText",
"(",
"0",
",",
"''",
")",
"# create the label for this item\r",
"lbl",
"=",
"QtGui",
".",
"QLabel",
"(",
"self",
".",
"treeWidget",
"(",
")",
")",
"lbl",
".",
"setMovie",
"(",
"XLoaderWidget",
".",
"getMovie",
"(",
")",
")",
"lbl",
".",
"setAlignment",
"(",
"QtCore",
".",
"Qt",
".",
"AlignCenter",
")",
"tree",
".",
"setItemWidget",
"(",
"self",
",",
"0",
",",
"lbl",
")",
"try",
":",
"tree",
".",
"loadStarted",
".",
"emit",
"(",
"self",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"True"
] | Updates this item to mark the item as loading. This will create
a QLabel with the loading ajax spinner to indicate that progress
is occurring. | [
"Updates",
"this",
"item",
"to",
"mark",
"the",
"item",
"as",
"loading",
".",
"This",
"will",
"create",
"a",
"QLabel",
"with",
"the",
"loading",
"ajax",
"spinner",
"to",
"indicate",
"that",
"progress",
"is",
"occurring",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xloaderitem.py#L73-L100 | train |
bitesofcode/projexui | projexui/xhistorystack.py | XHistoryStack.emitCurrentChanged | def emitCurrentChanged(self):
"""
Emits the current index changed signal provided signals are not blocked.
"""
if not self.signalsBlocked():
self.currentIndexChanged.emit(self.currentIndex())
self.currentUrlChanged.emit(self.currentUrl())
self.canGoBackChanged.emit(self.canGoBack())
self.canGoForwardChanged.emit(self.canGoForward()) | python | def emitCurrentChanged(self):
"""
Emits the current index changed signal provided signals are not blocked.
"""
if not self.signalsBlocked():
self.currentIndexChanged.emit(self.currentIndex())
self.currentUrlChanged.emit(self.currentUrl())
self.canGoBackChanged.emit(self.canGoBack())
self.canGoForwardChanged.emit(self.canGoForward()) | [
"def",
"emitCurrentChanged",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"signalsBlocked",
"(",
")",
":",
"self",
".",
"currentIndexChanged",
".",
"emit",
"(",
"self",
".",
"currentIndex",
"(",
")",
")",
"self",
".",
"currentUrlChanged",
".",
"emit",
"(",
"self",
".",
"currentUrl",
"(",
")",
")",
"self",
".",
"canGoBackChanged",
".",
"emit",
"(",
"self",
".",
"canGoBack",
"(",
")",
")",
"self",
".",
"canGoForwardChanged",
".",
"emit",
"(",
"self",
".",
"canGoForward",
"(",
")",
")"
] | Emits the current index changed signal provided signals are not blocked. | [
"Emits",
"the",
"current",
"index",
"changed",
"signal",
"provided",
"signals",
"are",
"not",
"blocked",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xhistorystack.py#L94-L103 | train |
bitesofcode/projexui | projexui/xhistorystack.py | XHistoryStack.goHome | def goHome(self):
"""
Goes to the home url. If there is no home url specifically set, then \
this will go to the first url in the history. Otherwise, it will \
look to see if the home url is in the stack and go to that level, if \
the home url is not found, then it will be pushed to the top of the \
stack using the push method.
"""
if not self.canGoBack():
return ''
if self.homeUrl():
self.push(self.homeUrl())
self._blockStack = True
self._index = 0
self.emitCurrentChanged()
self._blockStack = False
return self.currentUrl() | python | def goHome(self):
"""
Goes to the home url. If there is no home url specifically set, then \
this will go to the first url in the history. Otherwise, it will \
look to see if the home url is in the stack and go to that level, if \
the home url is not found, then it will be pushed to the top of the \
stack using the push method.
"""
if not self.canGoBack():
return ''
if self.homeUrl():
self.push(self.homeUrl())
self._blockStack = True
self._index = 0
self.emitCurrentChanged()
self._blockStack = False
return self.currentUrl() | [
"def",
"goHome",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"canGoBack",
"(",
")",
":",
"return",
"''",
"if",
"self",
".",
"homeUrl",
"(",
")",
":",
"self",
".",
"push",
"(",
"self",
".",
"homeUrl",
"(",
")",
")",
"self",
".",
"_blockStack",
"=",
"True",
"self",
".",
"_index",
"=",
"0",
"self",
".",
"emitCurrentChanged",
"(",
")",
"self",
".",
"_blockStack",
"=",
"False",
"return",
"self",
".",
"currentUrl",
"(",
")"
] | Goes to the home url. If there is no home url specifically set, then \
this will go to the first url in the history. Otherwise, it will \
look to see if the home url is in the stack and go to that level, if \
the home url is not found, then it will be pushed to the top of the \
stack using the push method. | [
"Goes",
"to",
"the",
"home",
"url",
".",
"If",
"there",
"is",
"no",
"home",
"url",
"specifically",
"set",
"then",
"\\",
"this",
"will",
"go",
"to",
"the",
"first",
"url",
"in",
"the",
"history",
".",
"Otherwise",
"it",
"will",
"\\",
"look",
"to",
"see",
"if",
"the",
"home",
"url",
"is",
"in",
"the",
"stack",
"and",
"go",
"to",
"that",
"level",
"if",
"\\",
"the",
"home",
"url",
"is",
"not",
"found",
"then",
"it",
"will",
"be",
"pushed",
"to",
"the",
"top",
"of",
"the",
"\\",
"stack",
"using",
"the",
"push",
"method",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xhistorystack.py#L141-L159 | train |
talkincode/txradius | txradius/openvpn/user_pass_verify.py | cli | def cli(conf):
""" OpenVPN user_pass_verify method
"""
config = init_config(conf)
nas_id = config.get('DEFAULT', 'nas_id')
nas_addr = config.get('DEFAULT', 'nas_addr')
secret = config.get('DEFAULT', 'radius_secret')
radius_addr = config.get('DEFAULT', 'radius_addr')
radius_auth_port = config.getint('DEFAULT', 'radius_auth_port')
radius_timeout = config.getint('DEFAULT', 'radius_timeout')
client_config_dir = config.get('DEFAULT', 'client_config_dir')
username = os.environ.get('username')
req = {'User-Name':username}
req['CHAP-Challenge'] = get_challenge()
req['CHAP-Password-Plaintext'] = os.environ.get('password')
req["NAS-IP-Address"] = nas_addr
req["NAS-Port-Id"] = '0/0/0:0.0'
req["NAS-Port"] = 0
req["Service-Type"] = "Login-User"
req["NAS-Identifier"] = nas_id
req["Called-Station-Id"] = '00:00:00:00:00:00'
req["Calling-Station-Id"] = '00:00:00:00:00:00'
# req["Framed-IP-Address"] = os.environ.get('ifconfig_pool_remote_ip')
# log.msg("radius auth: %s" % repr(req))
def shutdown(exitcode=0):
reactor.addSystemEventTrigger('after', 'shutdown', os._exit,exitcode)
reactor.stop()
def onresp(r):
if r.code == packet.AccessAccept:
try:
ccdattrs = []
userip = get_radius_addr_attr(r,8)
if userip:
ccdattrs.append('ifconfig-push {0} 255.255.255.0'.format(userip))
with open(os.path.join(client_config_dir,username),'wb') as ccdfs:
ccdfs.write('\n'.join(ccdattrs))
except:
traceback.print_exc()
shutdown(0)
else:
shutdown(1)
def onerr(e):
log.err(e)
shutdown(1)
d = client.send_auth(str(secret),
get_dictionary(), radius_addr, authport=radius_auth_port, debug=True,**req)
d.addCallbacks(onresp,onerr)
reactor.callLater(radius_timeout,shutdown,1)
reactor.run() | python | def cli(conf):
""" OpenVPN user_pass_verify method
"""
config = init_config(conf)
nas_id = config.get('DEFAULT', 'nas_id')
nas_addr = config.get('DEFAULT', 'nas_addr')
secret = config.get('DEFAULT', 'radius_secret')
radius_addr = config.get('DEFAULT', 'radius_addr')
radius_auth_port = config.getint('DEFAULT', 'radius_auth_port')
radius_timeout = config.getint('DEFAULT', 'radius_timeout')
client_config_dir = config.get('DEFAULT', 'client_config_dir')
username = os.environ.get('username')
req = {'User-Name':username}
req['CHAP-Challenge'] = get_challenge()
req['CHAP-Password-Plaintext'] = os.environ.get('password')
req["NAS-IP-Address"] = nas_addr
req["NAS-Port-Id"] = '0/0/0:0.0'
req["NAS-Port"] = 0
req["Service-Type"] = "Login-User"
req["NAS-Identifier"] = nas_id
req["Called-Station-Id"] = '00:00:00:00:00:00'
req["Calling-Station-Id"] = '00:00:00:00:00:00'
# req["Framed-IP-Address"] = os.environ.get('ifconfig_pool_remote_ip')
# log.msg("radius auth: %s" % repr(req))
def shutdown(exitcode=0):
reactor.addSystemEventTrigger('after', 'shutdown', os._exit,exitcode)
reactor.stop()
def onresp(r):
if r.code == packet.AccessAccept:
try:
ccdattrs = []
userip = get_radius_addr_attr(r,8)
if userip:
ccdattrs.append('ifconfig-push {0} 255.255.255.0'.format(userip))
with open(os.path.join(client_config_dir,username),'wb') as ccdfs:
ccdfs.write('\n'.join(ccdattrs))
except:
traceback.print_exc()
shutdown(0)
else:
shutdown(1)
def onerr(e):
log.err(e)
shutdown(1)
d = client.send_auth(str(secret),
get_dictionary(), radius_addr, authport=radius_auth_port, debug=True,**req)
d.addCallbacks(onresp,onerr)
reactor.callLater(radius_timeout,shutdown,1)
reactor.run() | [
"def",
"cli",
"(",
"conf",
")",
":",
"config",
"=",
"init_config",
"(",
"conf",
")",
"nas_id",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_id'",
")",
"nas_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'nas_addr'",
")",
"secret",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'radius_secret'",
")",
"radius_addr",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'radius_addr'",
")",
"radius_auth_port",
"=",
"config",
".",
"getint",
"(",
"'DEFAULT'",
",",
"'radius_auth_port'",
")",
"radius_timeout",
"=",
"config",
".",
"getint",
"(",
"'DEFAULT'",
",",
"'radius_timeout'",
")",
"client_config_dir",
"=",
"config",
".",
"get",
"(",
"'DEFAULT'",
",",
"'client_config_dir'",
")",
"username",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'username'",
")",
"req",
"=",
"{",
"'User-Name'",
":",
"username",
"}",
"req",
"[",
"'CHAP-Challenge'",
"]",
"=",
"get_challenge",
"(",
")",
"req",
"[",
"'CHAP-Password-Plaintext'",
"]",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'password'",
")",
"req",
"[",
"\"NAS-IP-Address\"",
"]",
"=",
"nas_addr",
"req",
"[",
"\"NAS-Port-Id\"",
"]",
"=",
"'0/0/0:0.0'",
"req",
"[",
"\"NAS-Port\"",
"]",
"=",
"0",
"req",
"[",
"\"Service-Type\"",
"]",
"=",
"\"Login-User\"",
"req",
"[",
"\"NAS-Identifier\"",
"]",
"=",
"nas_id",
"req",
"[",
"\"Called-Station-Id\"",
"]",
"=",
"'00:00:00:00:00:00'",
"req",
"[",
"\"Calling-Station-Id\"",
"]",
"=",
"'00:00:00:00:00:00'",
"# req[\"Framed-IP-Address\"] = os.environ.get('ifconfig_pool_remote_ip')",
"# log.msg(\"radius auth: %s\" % repr(req))",
"def",
"shutdown",
"(",
"exitcode",
"=",
"0",
")",
":",
"reactor",
".",
"addSystemEventTrigger",
"(",
"'after'",
",",
"'shutdown'",
",",
"os",
".",
"_exit",
",",
"exitcode",
")",
"reactor",
".",
"stop",
"(",
")",
"def",
"onresp",
"(",
"r",
")",
":",
"if",
"r",
".",
"code",
"==",
"packet",
".",
"AccessAccept",
":",
"try",
":",
"ccdattrs",
"=",
"[",
"]",
"userip",
"=",
"get_radius_addr_attr",
"(",
"r",
",",
"8",
")",
"if",
"userip",
":",
"ccdattrs",
".",
"append",
"(",
"'ifconfig-push {0} 255.255.255.0'",
".",
"format",
"(",
"userip",
")",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"client_config_dir",
",",
"username",
")",
",",
"'wb'",
")",
"as",
"ccdfs",
":",
"ccdfs",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"ccdattrs",
")",
")",
"except",
":",
"traceback",
".",
"print_exc",
"(",
")",
"shutdown",
"(",
"0",
")",
"else",
":",
"shutdown",
"(",
"1",
")",
"def",
"onerr",
"(",
"e",
")",
":",
"log",
".",
"err",
"(",
"e",
")",
"shutdown",
"(",
"1",
")",
"d",
"=",
"client",
".",
"send_auth",
"(",
"str",
"(",
"secret",
")",
",",
"get_dictionary",
"(",
")",
",",
"radius_addr",
",",
"authport",
"=",
"radius_auth_port",
",",
"debug",
"=",
"True",
",",
"*",
"*",
"req",
")",
"d",
".",
"addCallbacks",
"(",
"onresp",
",",
"onerr",
")",
"reactor",
".",
"callLater",
"(",
"radius_timeout",
",",
"shutdown",
",",
"1",
")",
"reactor",
".",
"run",
"(",
")"
] | OpenVPN user_pass_verify method | [
"OpenVPN",
"user_pass_verify",
"method"
] | b86fdbc9be41183680b82b07d3a8e8ea10926e01 | https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/openvpn/user_pass_verify.py#L24-L78 | train |
johnnoone/aioconsul | aioconsul/client/session_endpoint.py | SessionEndpoint.create | async def create(self, session, *, dc=None):
"""Creates a new session
Parameters:
session (Object): Session definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: ID of the created session
The create endpoint is used to initialize a new session.
Sessions must be associated with a node and may be associated
with any number of checks.
The session object must look like::
{
"LockDelay": timedelta(seconds=15),
"Name": "my-service-lock",
"Node": "foobar",
"Checks": ["a", "b", "c"],
"Behavior": "release",
"TTL": timedelta(seconds=0)
}
**LockDelay** can be specified as a duration string using a "s"
suffix for seconds. The default is 15s.
**Node** must refer to a node that is already registered, if specified.
By default, the agent's own node name is used.
**Name** can be used to provide a human-readable name for the Session.
**Checks** is used to provide a list of associated health checks.
It is highly recommended that, if you override this list, you include
the default "serfHealth".
**Behavior** can be set to either ``release`` or ``delete``.
This controls the behavior when a session is invalidated.
By default, this is ``release``, causing any locks that are held to be
released. Changing this to ``delete`` causes any locks that are held
to be deleted. ``delete`` is useful for creating ephemeral key/value
entries.
**TTL** field is a duration string, and like ``LockDelay`` it can use
"s" as a suffix for seconds. If specified, it must be between 10s and
86400s currently. When provided, the session is invalidated if it is
not renewed before the TTL expires. The lowest practical TTL should be
used to keep the number of managed sessions low.
When locks are forcibly expired, such as during a leader election,
sessions may not be reaped for up to double this TTL, so long TTL
values (>1 hour) should be avoided.
"""
response = await self._api.put(
"/v1/session/create",
data=session,
params={"dc": dc})
return response.body | python | async def create(self, session, *, dc=None):
"""Creates a new session
Parameters:
session (Object): Session definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: ID of the created session
The create endpoint is used to initialize a new session.
Sessions must be associated with a node and may be associated
with any number of checks.
The session object must look like::
{
"LockDelay": timedelta(seconds=15),
"Name": "my-service-lock",
"Node": "foobar",
"Checks": ["a", "b", "c"],
"Behavior": "release",
"TTL": timedelta(seconds=0)
}
**LockDelay** can be specified as a duration string using a "s"
suffix for seconds. The default is 15s.
**Node** must refer to a node that is already registered, if specified.
By default, the agent's own node name is used.
**Name** can be used to provide a human-readable name for the Session.
**Checks** is used to provide a list of associated health checks.
It is highly recommended that, if you override this list, you include
the default "serfHealth".
**Behavior** can be set to either ``release`` or ``delete``.
This controls the behavior when a session is invalidated.
By default, this is ``release``, causing any locks that are held to be
released. Changing this to ``delete`` causes any locks that are held
to be deleted. ``delete`` is useful for creating ephemeral key/value
entries.
**TTL** field is a duration string, and like ``LockDelay`` it can use
"s" as a suffix for seconds. If specified, it must be between 10s and
86400s currently. When provided, the session is invalidated if it is
not renewed before the TTL expires. The lowest practical TTL should be
used to keep the number of managed sessions low.
When locks are forcibly expired, such as during a leader election,
sessions may not be reaped for up to double this TTL, so long TTL
values (>1 hour) should be avoided.
"""
response = await self._api.put(
"/v1/session/create",
data=session,
params={"dc": dc})
return response.body | [
"async",
"def",
"create",
"(",
"self",
",",
"session",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/session/create\"",
",",
"data",
"=",
"session",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
")",
"return",
"response",
".",
"body"
] | Creates a new session
Parameters:
session (Object): Session definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: ID of the created session
The create endpoint is used to initialize a new session.
Sessions must be associated with a node and may be associated
with any number of checks.
The session object must look like::
{
"LockDelay": timedelta(seconds=15),
"Name": "my-service-lock",
"Node": "foobar",
"Checks": ["a", "b", "c"],
"Behavior": "release",
"TTL": timedelta(seconds=0)
}
**LockDelay** can be specified as a duration string using a "s"
suffix for seconds. The default is 15s.
**Node** must refer to a node that is already registered, if specified.
By default, the agent's own node name is used.
**Name** can be used to provide a human-readable name for the Session.
**Checks** is used to provide a list of associated health checks.
It is highly recommended that, if you override this list, you include
the default "serfHealth".
**Behavior** can be set to either ``release`` or ``delete``.
This controls the behavior when a session is invalidated.
By default, this is ``release``, causing any locks that are held to be
released. Changing this to ``delete`` causes any locks that are held
to be deleted. ``delete`` is useful for creating ephemeral key/value
entries.
**TTL** field is a duration string, and like ``LockDelay`` it can use
"s" as a suffix for seconds. If specified, it must be between 10s and
86400s currently. When provided, the session is invalidated if it is
not renewed before the TTL expires. The lowest practical TTL should be
used to keep the number of managed sessions low.
When locks are forcibly expired, such as during a leader election,
sessions may not be reaped for up to double this TTL, so long TTL
values (>1 hour) should be avoided. | [
"Creates",
"a",
"new",
"session"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/session_endpoint.py#L18-L76 | train |
johnnoone/aioconsul | aioconsul/client/session_endpoint.py | SessionEndpoint.destroy | async def destroy(self, session, *, dc=None):
"""Destroys a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.put("/v1/session/destroy", session_id,
params={"dc": dc})
return response.body is True | python | async def destroy(self, session, *, dc=None):
"""Destroys a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.put("/v1/session/destroy", session_id,
params={"dc": dc})
return response.body is True | [
"async",
"def",
"destroy",
"(",
"self",
",",
"session",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"session_id",
"=",
"extract_attr",
"(",
"session",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/session/destroy\"",
",",
"session_id",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
")",
"return",
"response",
".",
"body",
"is",
"True"
] | Destroys a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success | [
"Destroys",
"a",
"given",
"session"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/session_endpoint.py#L78-L91 | train |
johnnoone/aioconsul | aioconsul/client/session_endpoint.py | SessionEndpoint.info | async def info(self, session, *, dc=None, watch=None, consistency=None):
"""Queries a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
ObjectMeta: where value is the queried session
Raises:
NotFound: session is absent
Returns the requested session information within a given datacenter.
It returns a mapping like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
}
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.get("/v1/session/info", session_id,
watch=watch,
consistency=consistency,
params={"dc": dc})
try:
result = response.body[0]
except IndexError:
meta = extract_meta(response.headers)
raise NotFound("No session for %r" % session_id, meta=meta)
return consul(result, meta=extract_meta(response.headers)) | python | async def info(self, session, *, dc=None, watch=None, consistency=None):
"""Queries a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
ObjectMeta: where value is the queried session
Raises:
NotFound: session is absent
Returns the requested session information within a given datacenter.
It returns a mapping like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
}
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.get("/v1/session/info", session_id,
watch=watch,
consistency=consistency,
params={"dc": dc})
try:
result = response.body[0]
except IndexError:
meta = extract_meta(response.headers)
raise NotFound("No session for %r" % session_id, meta=meta)
return consul(result, meta=extract_meta(response.headers)) | [
"async",
"def",
"info",
"(",
"self",
",",
"session",
",",
"*",
",",
"dc",
"=",
"None",
",",
"watch",
"=",
"None",
",",
"consistency",
"=",
"None",
")",
":",
"session_id",
"=",
"extract_attr",
"(",
"session",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/session/info\"",
",",
"session_id",
",",
"watch",
"=",
"watch",
",",
"consistency",
"=",
"consistency",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
")",
"try",
":",
"result",
"=",
"response",
".",
"body",
"[",
"0",
"]",
"except",
"IndexError",
":",
"meta",
"=",
"extract_meta",
"(",
"response",
".",
"headers",
")",
"raise",
"NotFound",
"(",
"\"No session for %r\"",
"%",
"session_id",
",",
"meta",
"=",
"meta",
")",
"return",
"consul",
"(",
"result",
",",
"meta",
"=",
"extract_meta",
"(",
"response",
".",
"headers",
")",
")"
] | Queries a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
ObjectMeta: where value is the queried session
Raises:
NotFound: session is absent
Returns the requested session information within a given datacenter.
It returns a mapping like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
} | [
"Queries",
"a",
"given",
"session"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/session_endpoint.py#L95-L133 | train |
johnnoone/aioconsul | aioconsul/client/session_endpoint.py | SessionEndpoint.renew | async def renew(self, session, *, dc=None):
"""Renews a TTL-based session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
ObjectMeta: where value is session
Raises:
NotFound: session is absent
The response looks like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
"Behavior": "release",
"TTL": datetime.timedelta(0, 15)
}
.. note:: Consul MAY return a TTL value higher than the one
specified during session creation. This indicates
the server is under high load and is requesting
clients renew less often.
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.put("/v1/session/renew", session_id,
params={"dc": dc})
try:
result = response.body[0]
except IndexError:
meta = extract_meta(response.headers)
raise NotFound("No session for %r" % session_id, meta=meta)
return consul(result, meta=extract_meta(response.headers)) | python | async def renew(self, session, *, dc=None):
"""Renews a TTL-based session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
ObjectMeta: where value is session
Raises:
NotFound: session is absent
The response looks like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
"Behavior": "release",
"TTL": datetime.timedelta(0, 15)
}
.. note:: Consul MAY return a TTL value higher than the one
specified during session creation. This indicates
the server is under high load and is requesting
clients renew less often.
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.put("/v1/session/renew", session_id,
params={"dc": dc})
try:
result = response.body[0]
except IndexError:
meta = extract_meta(response.headers)
raise NotFound("No session for %r" % session_id, meta=meta)
return consul(result, meta=extract_meta(response.headers)) | [
"async",
"def",
"renew",
"(",
"self",
",",
"session",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"session_id",
"=",
"extract_attr",
"(",
"session",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/session/renew\"",
",",
"session_id",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
")",
"try",
":",
"result",
"=",
"response",
".",
"body",
"[",
"0",
"]",
"except",
"IndexError",
":",
"meta",
"=",
"extract_meta",
"(",
"response",
".",
"headers",
")",
"raise",
"NotFound",
"(",
"\"No session for %r\"",
"%",
"session_id",
",",
"meta",
"=",
"meta",
")",
"return",
"consul",
"(",
"result",
",",
"meta",
"=",
"extract_meta",
"(",
"response",
".",
"headers",
")",
")"
] | Renews a TTL-based session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
ObjectMeta: where value is session
Raises:
NotFound: session is absent
The response looks like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
"Behavior": "release",
"TTL": datetime.timedelta(0, 15)
}
.. note:: Consul MAY return a TTL value higher than the one
specified during session creation. This indicates
the server is under high load and is requesting
clients renew less often. | [
"Renews",
"a",
"TTL",
"-",
"based",
"session"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/session_endpoint.py#L198-L237 | train |
adonisnafeh/urlvalidator | urlvalidator/utils.py | _lazy_re_compile | def _lazy_re_compile(regex, flags=0):
"""Lazily compile a regex with flags."""
def _compile():
# Compile the regex if it was not passed pre-compiled.
if isinstance(regex, str):
return re.compile(regex, flags)
else:
assert not flags, "flags must be empty if regex is passed pre-compiled"
return regex
return SimpleLazyObject(_compile) | python | def _lazy_re_compile(regex, flags=0):
"""Lazily compile a regex with flags."""
def _compile():
# Compile the regex if it was not passed pre-compiled.
if isinstance(regex, str):
return re.compile(regex, flags)
else:
assert not flags, "flags must be empty if regex is passed pre-compiled"
return regex
return SimpleLazyObject(_compile) | [
"def",
"_lazy_re_compile",
"(",
"regex",
",",
"flags",
"=",
"0",
")",
":",
"def",
"_compile",
"(",
")",
":",
"# Compile the regex if it was not passed pre-compiled.",
"if",
"isinstance",
"(",
"regex",
",",
"str",
")",
":",
"return",
"re",
".",
"compile",
"(",
"regex",
",",
"flags",
")",
"else",
":",
"assert",
"not",
"flags",
",",
"\"flags must be empty if regex is passed pre-compiled\"",
"return",
"regex",
"return",
"SimpleLazyObject",
"(",
"_compile",
")"
] | Lazily compile a regex with flags. | [
"Lazily",
"compile",
"a",
"regex",
"with",
"flags",
"."
] | 008438365faa00b2c580e3991068ec6b161c3578 | https://github.com/adonisnafeh/urlvalidator/blob/008438365faa00b2c580e3991068ec6b161c3578/urlvalidator/utils.py#L95-L104 | train |
adonisnafeh/urlvalidator | urlvalidator/utils.py | deconstructible | def deconstructible(*args, path=None):
"""
Class decorator that allows the decorated class to be serialized
by the migrations subsystem.
The `path` kwarg specifies the import path.
"""
def decorator(klass):
def __new__(cls, *args, **kwargs):
# We capture the arguments to make returning them trivial
obj = super(klass, cls).__new__(cls)
obj._constructor_args = (args, kwargs)
return obj
klass.__new__ = staticmethod(__new__)
return klass
if not args:
return decorator
return decorator(*args) | python | def deconstructible(*args, path=None):
"""
Class decorator that allows the decorated class to be serialized
by the migrations subsystem.
The `path` kwarg specifies the import path.
"""
def decorator(klass):
def __new__(cls, *args, **kwargs):
# We capture the arguments to make returning them trivial
obj = super(klass, cls).__new__(cls)
obj._constructor_args = (args, kwargs)
return obj
klass.__new__ = staticmethod(__new__)
return klass
if not args:
return decorator
return decorator(*args) | [
"def",
"deconstructible",
"(",
"*",
"args",
",",
"path",
"=",
"None",
")",
":",
"def",
"decorator",
"(",
"klass",
")",
":",
"def",
"__new__",
"(",
"cls",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# We capture the arguments to make returning them trivial",
"obj",
"=",
"super",
"(",
"klass",
",",
"cls",
")",
".",
"__new__",
"(",
"cls",
")",
"obj",
".",
"_constructor_args",
"=",
"(",
"args",
",",
"kwargs",
")",
"return",
"obj",
"klass",
".",
"__new__",
"=",
"staticmethod",
"(",
"__new__",
")",
"return",
"klass",
"if",
"not",
"args",
":",
"return",
"decorator",
"return",
"decorator",
"(",
"*",
"args",
")"
] | Class decorator that allows the decorated class to be serialized
by the migrations subsystem.
The `path` kwarg specifies the import path. | [
"Class",
"decorator",
"that",
"allows",
"the",
"decorated",
"class",
"to",
"be",
"serialized",
"by",
"the",
"migrations",
"subsystem",
"."
] | 008438365faa00b2c580e3991068ec6b161c3578 | https://github.com/adonisnafeh/urlvalidator/blob/008438365faa00b2c580e3991068ec6b161c3578/urlvalidator/utils.py#L107-L127 | train |
bitesofcode/projexui | projexui/menus/xmenu.py | XSearchActionWidget.clear | def clear(self):
"""
Clears the text from the search edit.
"""
self._searchEdit.blockSignals(True)
self._searchEdit.setText('')
self._searchEdit.blockSignals(False) | python | def clear(self):
"""
Clears the text from the search edit.
"""
self._searchEdit.blockSignals(True)
self._searchEdit.setText('')
self._searchEdit.blockSignals(False) | [
"def",
"clear",
"(",
"self",
")",
":",
"self",
".",
"_searchEdit",
".",
"blockSignals",
"(",
"True",
")",
"self",
".",
"_searchEdit",
".",
"setText",
"(",
"''",
")",
"self",
".",
"_searchEdit",
".",
"blockSignals",
"(",
"False",
")"
] | Clears the text from the search edit. | [
"Clears",
"the",
"text",
"from",
"the",
"search",
"edit",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/menus/xmenu.py#L150-L156 | train |
bitesofcode/projexui | projexui/menus/xmenu.py | XMenu.clearAdvancedActions | def clearAdvancedActions( self ):
"""
Clears out the advanced action map.
"""
self._advancedMap.clear()
margins = list(self.getContentsMargins())
margins[2] = 0
self.setContentsMargins(*margins) | python | def clearAdvancedActions( self ):
"""
Clears out the advanced action map.
"""
self._advancedMap.clear()
margins = list(self.getContentsMargins())
margins[2] = 0
self.setContentsMargins(*margins) | [
"def",
"clearAdvancedActions",
"(",
"self",
")",
":",
"self",
".",
"_advancedMap",
".",
"clear",
"(",
")",
"margins",
"=",
"list",
"(",
"self",
".",
"getContentsMargins",
"(",
")",
")",
"margins",
"[",
"2",
"]",
"=",
"0",
"self",
".",
"setContentsMargins",
"(",
"*",
"margins",
")"
] | Clears out the advanced action map. | [
"Clears",
"out",
"the",
"advanced",
"action",
"map",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/menus/xmenu.py#L389-L396 | train |
bitesofcode/projexui | projexui/menus/xmenu.py | XMenu.rebuildButtons | def rebuildButtons(self):
"""
Rebuilds the buttons for the advanced actions.
"""
for btn in self.findChildren(XAdvancedButton):
btn.close()
btn.setParent(None)
btn.deleteLater()
for standard, advanced in self._advancedMap.items():
rect = self.actionGeometry(standard)
btn = XAdvancedButton(self)
btn.setFixedWidth(22)
btn.setFixedHeight(rect.height())
btn.setDefaultAction(advanced)
btn.setAutoRaise(True)
btn.move(rect.right() + 1, rect.top())
btn.show()
if btn.icon().isNull():
btn.setIcon(QIcon(resources.find('img/advanced.png')))
btn.clicked.connect(self.acceptAdvanced) | python | def rebuildButtons(self):
"""
Rebuilds the buttons for the advanced actions.
"""
for btn in self.findChildren(XAdvancedButton):
btn.close()
btn.setParent(None)
btn.deleteLater()
for standard, advanced in self._advancedMap.items():
rect = self.actionGeometry(standard)
btn = XAdvancedButton(self)
btn.setFixedWidth(22)
btn.setFixedHeight(rect.height())
btn.setDefaultAction(advanced)
btn.setAutoRaise(True)
btn.move(rect.right() + 1, rect.top())
btn.show()
if btn.icon().isNull():
btn.setIcon(QIcon(resources.find('img/advanced.png')))
btn.clicked.connect(self.acceptAdvanced) | [
"def",
"rebuildButtons",
"(",
"self",
")",
":",
"for",
"btn",
"in",
"self",
".",
"findChildren",
"(",
"XAdvancedButton",
")",
":",
"btn",
".",
"close",
"(",
")",
"btn",
".",
"setParent",
"(",
"None",
")",
"btn",
".",
"deleteLater",
"(",
")",
"for",
"standard",
",",
"advanced",
"in",
"self",
".",
"_advancedMap",
".",
"items",
"(",
")",
":",
"rect",
"=",
"self",
".",
"actionGeometry",
"(",
"standard",
")",
"btn",
"=",
"XAdvancedButton",
"(",
"self",
")",
"btn",
".",
"setFixedWidth",
"(",
"22",
")",
"btn",
".",
"setFixedHeight",
"(",
"rect",
".",
"height",
"(",
")",
")",
"btn",
".",
"setDefaultAction",
"(",
"advanced",
")",
"btn",
".",
"setAutoRaise",
"(",
"True",
")",
"btn",
".",
"move",
"(",
"rect",
".",
"right",
"(",
")",
"+",
"1",
",",
"rect",
".",
"top",
"(",
")",
")",
"btn",
".",
"show",
"(",
")",
"if",
"btn",
".",
"icon",
"(",
")",
".",
"isNull",
"(",
")",
":",
"btn",
".",
"setIcon",
"(",
"QIcon",
"(",
"resources",
".",
"find",
"(",
"'img/advanced.png'",
")",
")",
")",
"btn",
".",
"clicked",
".",
"connect",
"(",
"self",
".",
"acceptAdvanced",
")"
] | Rebuilds the buttons for the advanced actions. | [
"Rebuilds",
"the",
"buttons",
"for",
"the",
"advanced",
"actions",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/menus/xmenu.py#L440-L462 | train |
bitesofcode/projexui | projexui/widgets/xcalendarwidget/xcalendarscene.py | XCalendarScene.rebuild | def rebuild( self ):
"""
Rebuilds the information for this scene.
"""
self._buildData.clear()
self._dateGrid.clear()
self._dateTimeGrid.clear()
curr_min = self._minimumDate
curr_max = self._maximumDate
self._maximumDate = QDate()
self._minimumDate = QDate()
self.markForRebuild(False)
# rebuilds the month view
if ( self.currentMode() == XCalendarScene.Mode.Month ):
self.rebuildMonth()
elif ( self.currentMode() in (XCalendarScene.Mode.Week,
XCalendarScene.Mode.Day)):
self.rebuildDays()
# rebuild the items in the scene
items = sorted(self.items())
for item in items:
item.setPos(0, 0)
item.hide()
for item in items:
if ( isinstance(item, XCalendarItem) ):
item.rebuild()
if ( curr_min != self._minimumDate or curr_max != self._maximumDate ):
parent = self.parent()
if ( parent and not parent.signalsBlocked() ):
parent.dateRangeChanged.emit(self._minimumDate,
self._maximumDate) | python | def rebuild( self ):
"""
Rebuilds the information for this scene.
"""
self._buildData.clear()
self._dateGrid.clear()
self._dateTimeGrid.clear()
curr_min = self._minimumDate
curr_max = self._maximumDate
self._maximumDate = QDate()
self._minimumDate = QDate()
self.markForRebuild(False)
# rebuilds the month view
if ( self.currentMode() == XCalendarScene.Mode.Month ):
self.rebuildMonth()
elif ( self.currentMode() in (XCalendarScene.Mode.Week,
XCalendarScene.Mode.Day)):
self.rebuildDays()
# rebuild the items in the scene
items = sorted(self.items())
for item in items:
item.setPos(0, 0)
item.hide()
for item in items:
if ( isinstance(item, XCalendarItem) ):
item.rebuild()
if ( curr_min != self._minimumDate or curr_max != self._maximumDate ):
parent = self.parent()
if ( parent and not parent.signalsBlocked() ):
parent.dateRangeChanged.emit(self._minimumDate,
self._maximumDate) | [
"def",
"rebuild",
"(",
"self",
")",
":",
"self",
".",
"_buildData",
".",
"clear",
"(",
")",
"self",
".",
"_dateGrid",
".",
"clear",
"(",
")",
"self",
".",
"_dateTimeGrid",
".",
"clear",
"(",
")",
"curr_min",
"=",
"self",
".",
"_minimumDate",
"curr_max",
"=",
"self",
".",
"_maximumDate",
"self",
".",
"_maximumDate",
"=",
"QDate",
"(",
")",
"self",
".",
"_minimumDate",
"=",
"QDate",
"(",
")",
"self",
".",
"markForRebuild",
"(",
"False",
")",
"# rebuilds the month view\r",
"if",
"(",
"self",
".",
"currentMode",
"(",
")",
"==",
"XCalendarScene",
".",
"Mode",
".",
"Month",
")",
":",
"self",
".",
"rebuildMonth",
"(",
")",
"elif",
"(",
"self",
".",
"currentMode",
"(",
")",
"in",
"(",
"XCalendarScene",
".",
"Mode",
".",
"Week",
",",
"XCalendarScene",
".",
"Mode",
".",
"Day",
")",
")",
":",
"self",
".",
"rebuildDays",
"(",
")",
"# rebuild the items in the scene\r",
"items",
"=",
"sorted",
"(",
"self",
".",
"items",
"(",
")",
")",
"for",
"item",
"in",
"items",
":",
"item",
".",
"setPos",
"(",
"0",
",",
"0",
")",
"item",
".",
"hide",
"(",
")",
"for",
"item",
"in",
"items",
":",
"if",
"(",
"isinstance",
"(",
"item",
",",
"XCalendarItem",
")",
")",
":",
"item",
".",
"rebuild",
"(",
")",
"if",
"(",
"curr_min",
"!=",
"self",
".",
"_minimumDate",
"or",
"curr_max",
"!=",
"self",
".",
"_maximumDate",
")",
":",
"parent",
"=",
"self",
".",
"parent",
"(",
")",
"if",
"(",
"parent",
"and",
"not",
"parent",
".",
"signalsBlocked",
"(",
")",
")",
":",
"parent",
".",
"dateRangeChanged",
".",
"emit",
"(",
"self",
".",
"_minimumDate",
",",
"self",
".",
"_maximumDate",
")"
] | Rebuilds the information for this scene. | [
"Rebuilds",
"the",
"information",
"for",
"this",
"scene",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xcalendarwidget/xcalendarscene.py#L253-L290 | train |
intelsdi-x/snap-plugin-lib-py | snap_plugin/v1/timestamp.py | Timestamp.set | def set(self, time):
"""Sets time in seconds since Epoch
Args:
time (:obj:`float`): time in seconds since Epoch (see time.time())
Returns:
None
"""
self._time = time
self._pb.sec = int(self._time)
self._pb.nsec = int((self._time - self._pb.sec) * 10 ** 9) | python | def set(self, time):
"""Sets time in seconds since Epoch
Args:
time (:obj:`float`): time in seconds since Epoch (see time.time())
Returns:
None
"""
self._time = time
self._pb.sec = int(self._time)
self._pb.nsec = int((self._time - self._pb.sec) * 10 ** 9) | [
"def",
"set",
"(",
"self",
",",
"time",
")",
":",
"self",
".",
"_time",
"=",
"time",
"self",
".",
"_pb",
".",
"sec",
"=",
"int",
"(",
"self",
".",
"_time",
")",
"self",
".",
"_pb",
".",
"nsec",
"=",
"int",
"(",
"(",
"self",
".",
"_time",
"-",
"self",
".",
"_pb",
".",
"sec",
")",
"*",
"10",
"**",
"9",
")"
] | Sets time in seconds since Epoch
Args:
time (:obj:`float`): time in seconds since Epoch (see time.time())
Returns:
None | [
"Sets",
"time",
"in",
"seconds",
"since",
"Epoch"
] | 8da5d00ac5f9d2b48a7239563ac7788209891ca4 | https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/timestamp.py#L56-L67 | train |
whiteclover/dbpy | db/query/expr.py | Expr.compile | def compile(self, db):
"""Building the sql expression
:param db: the database instance
"""
sql = self.expression
if self.alias:
sql += (' AS ' + db.quote_column(self.alias))
return sql | python | def compile(self, db):
"""Building the sql expression
:param db: the database instance
"""
sql = self.expression
if self.alias:
sql += (' AS ' + db.quote_column(self.alias))
return sql | [
"def",
"compile",
"(",
"self",
",",
"db",
")",
":",
"sql",
"=",
"self",
".",
"expression",
"if",
"self",
".",
"alias",
":",
"sql",
"+=",
"(",
"' AS '",
"+",
"db",
".",
"quote_column",
"(",
"self",
".",
"alias",
")",
")",
"return",
"sql"
] | Building the sql expression
:param db: the database instance | [
"Building",
"the",
"sql",
"expression"
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/expr.py#L26-L34 | train |
johnnoone/aioconsul | aioconsul/client/checks_endpoint.py | ChecksEndpoint.register | async def register(self, check, *, token=None):
"""Registers a new local check
Parameters:
check (Object): Check definition
token (ObjectID): Token ID
Returns:
bool: ``True`` on success
The register endpoint is used to add a new check to the local agent.
Checks may be of script, HTTP, TCP, or TTL type. The agent is
responsible for managing the status of the check and keeping the
Catalog in sync.
The request body must look like::
{
"ID": "mem",
"Name": "Memory utilization",
"Notes": "Ensure we don't oversubscribe memory",
"DeregisterCriticalServiceAfter": "90m",
"Script": "/usr/local/bin/check_mem.py",
"DockerContainerID": "f972c95ebf0e",
"Shell": "/bin/bash",
"HTTP": "http://example.com",
"TCP": "example.com:22",
"Interval": timedelta(seconds=10),
"TTL": timedelta(seconds=15)
}
The **Name** field is mandatory, as is one of **Script**, **HTTP**,
**TCP** or **TTL**. **Script**, **TCP** and **HTTP** also require that
**Interval** be set.
If an **ID** is not provided, it is set to **Name**. You cannot have
duplicate **ID** entries per agent, so it may be necessary to provide
an **ID**.
The **Notes** field is not used internally by Consul and is meant to
be human-readable.
Checks that are associated with a service may also contain an optional
**DeregisterCriticalServiceAfter** field, which is a timeout in the
same duration format as **Interval** and **TTL**. If a check is in the
critical state for more than this configured value, then its
associated service (and all of its associated checks) will
automatically be deregistered. The minimum timeout is 1 minute, and
the process that reaps critical services runs every 30 seconds, so it
may take slightly longer than the configured timeout to trigger the
deregistration. This should generally be configured with a timeout
that's much, much longer than any expected recoverable outage for the
given service.
If a **Script** is provided, the check type is a script, and Consul
will evaluate the script every **Interval** to update the status.
If a **DockerContainerID** is provided, the check is a Docker check,
and Consul will evaluate the script every **Interval** in the given
container using the specified Shell. Note that Shell is currently only
supported for Docker checks.
An **HTTP** check will perform an HTTP GET request against the value of
**HTTP** (expected to be a URL) every **Interval**. If the response is
any 2xx code, the check is passing. If the response is
``429 Too Many Requests``, the check is **warning**.
Otherwise, the check is **critical**.
An **TCP** check will perform an TCP connection attempt against the
value of **TCP** (expected to be an IP/hostname and port combination)
every **Interval**. If the connection attempt is successful, the check
is **passing**. If the connection attempt is unsuccessful, the check
is **critical**. In the case of a hostname that resolves to both IPv4
and IPv6 addresses, an attempt will be made to both addresses, and the
first successful connection attempt will result in a successful check.
If a **TTL** type is used, then the TTL update endpoint must be used
periodically to update the state of the check.
The **ServiceID** field can be provided to associate the registered
check with an existing service provided by the agent.
The **Status** field can be provided to specify the initial state of
the health check.
"""
token_id = extract_attr(token, keys=["ID"])
params = {"token": token_id}
response = await self._api.put("/v1/agent/check/register",
params=params,
data=check)
return response.status == 200 | python | async def register(self, check, *, token=None):
"""Registers a new local check
Parameters:
check (Object): Check definition
token (ObjectID): Token ID
Returns:
bool: ``True`` on success
The register endpoint is used to add a new check to the local agent.
Checks may be of script, HTTP, TCP, or TTL type. The agent is
responsible for managing the status of the check and keeping the
Catalog in sync.
The request body must look like::
{
"ID": "mem",
"Name": "Memory utilization",
"Notes": "Ensure we don't oversubscribe memory",
"DeregisterCriticalServiceAfter": "90m",
"Script": "/usr/local/bin/check_mem.py",
"DockerContainerID": "f972c95ebf0e",
"Shell": "/bin/bash",
"HTTP": "http://example.com",
"TCP": "example.com:22",
"Interval": timedelta(seconds=10),
"TTL": timedelta(seconds=15)
}
The **Name** field is mandatory, as is one of **Script**, **HTTP**,
**TCP** or **TTL**. **Script**, **TCP** and **HTTP** also require that
**Interval** be set.
If an **ID** is not provided, it is set to **Name**. You cannot have
duplicate **ID** entries per agent, so it may be necessary to provide
an **ID**.
The **Notes** field is not used internally by Consul and is meant to
be human-readable.
Checks that are associated with a service may also contain an optional
**DeregisterCriticalServiceAfter** field, which is a timeout in the
same duration format as **Interval** and **TTL**. If a check is in the
critical state for more than this configured value, then its
associated service (and all of its associated checks) will
automatically be deregistered. The minimum timeout is 1 minute, and
the process that reaps critical services runs every 30 seconds, so it
may take slightly longer than the configured timeout to trigger the
deregistration. This should generally be configured with a timeout
that's much, much longer than any expected recoverable outage for the
given service.
If a **Script** is provided, the check type is a script, and Consul
will evaluate the script every **Interval** to update the status.
If a **DockerContainerID** is provided, the check is a Docker check,
and Consul will evaluate the script every **Interval** in the given
container using the specified Shell. Note that Shell is currently only
supported for Docker checks.
An **HTTP** check will perform an HTTP GET request against the value of
**HTTP** (expected to be a URL) every **Interval**. If the response is
any 2xx code, the check is passing. If the response is
``429 Too Many Requests``, the check is **warning**.
Otherwise, the check is **critical**.
An **TCP** check will perform an TCP connection attempt against the
value of **TCP** (expected to be an IP/hostname and port combination)
every **Interval**. If the connection attempt is successful, the check
is **passing**. If the connection attempt is unsuccessful, the check
is **critical**. In the case of a hostname that resolves to both IPv4
and IPv6 addresses, an attempt will be made to both addresses, and the
first successful connection attempt will result in a successful check.
If a **TTL** type is used, then the TTL update endpoint must be used
periodically to update the state of the check.
The **ServiceID** field can be provided to associate the registered
check with an existing service provided by the agent.
The **Status** field can be provided to specify the initial state of
the health check.
"""
token_id = extract_attr(token, keys=["ID"])
params = {"token": token_id}
response = await self._api.put("/v1/agent/check/register",
params=params,
data=check)
return response.status == 200 | [
"async",
"def",
"register",
"(",
"self",
",",
"check",
",",
"*",
",",
"token",
"=",
"None",
")",
":",
"token_id",
"=",
"extract_attr",
"(",
"token",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"params",
"=",
"{",
"\"token\"",
":",
"token_id",
"}",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/agent/check/register\"",
",",
"params",
"=",
"params",
",",
"data",
"=",
"check",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Registers a new local check
Parameters:
check (Object): Check definition
token (ObjectID): Token ID
Returns:
bool: ``True`` on success
The register endpoint is used to add a new check to the local agent.
Checks may be of script, HTTP, TCP, or TTL type. The agent is
responsible for managing the status of the check and keeping the
Catalog in sync.
The request body must look like::
{
"ID": "mem",
"Name": "Memory utilization",
"Notes": "Ensure we don't oversubscribe memory",
"DeregisterCriticalServiceAfter": "90m",
"Script": "/usr/local/bin/check_mem.py",
"DockerContainerID": "f972c95ebf0e",
"Shell": "/bin/bash",
"HTTP": "http://example.com",
"TCP": "example.com:22",
"Interval": timedelta(seconds=10),
"TTL": timedelta(seconds=15)
}
The **Name** field is mandatory, as is one of **Script**, **HTTP**,
**TCP** or **TTL**. **Script**, **TCP** and **HTTP** also require that
**Interval** be set.
If an **ID** is not provided, it is set to **Name**. You cannot have
duplicate **ID** entries per agent, so it may be necessary to provide
an **ID**.
The **Notes** field is not used internally by Consul and is meant to
be human-readable.
Checks that are associated with a service may also contain an optional
**DeregisterCriticalServiceAfter** field, which is a timeout in the
same duration format as **Interval** and **TTL**. If a check is in the
critical state for more than this configured value, then its
associated service (and all of its associated checks) will
automatically be deregistered. The minimum timeout is 1 minute, and
the process that reaps critical services runs every 30 seconds, so it
may take slightly longer than the configured timeout to trigger the
deregistration. This should generally be configured with a timeout
that's much, much longer than any expected recoverable outage for the
given service.
If a **Script** is provided, the check type is a script, and Consul
will evaluate the script every **Interval** to update the status.
If a **DockerContainerID** is provided, the check is a Docker check,
and Consul will evaluate the script every **Interval** in the given
container using the specified Shell. Note that Shell is currently only
supported for Docker checks.
An **HTTP** check will perform an HTTP GET request against the value of
**HTTP** (expected to be a URL) every **Interval**. If the response is
any 2xx code, the check is passing. If the response is
``429 Too Many Requests``, the check is **warning**.
Otherwise, the check is **critical**.
An **TCP** check will perform an TCP connection attempt against the
value of **TCP** (expected to be an IP/hostname and port combination)
every **Interval**. If the connection attempt is successful, the check
is **passing**. If the connection attempt is unsuccessful, the check
is **critical**. In the case of a hostname that resolves to both IPv4
and IPv6 addresses, an attempt will be made to both addresses, and the
first successful connection attempt will result in a successful check.
If a **TTL** type is used, then the TTL update endpoint must be used
periodically to update the state of the check.
The **ServiceID** field can be provided to associate the registered
check with an existing service provided by the agent.
The **Status** field can be provided to specify the initial state of
the health check. | [
"Registers",
"a",
"new",
"local",
"check"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/checks_endpoint.py#L34-L123 | train |
johnnoone/aioconsul | aioconsul/client/checks_endpoint.py | ChecksEndpoint.deregister | async def deregister(self, check):
"""Deregisters a local check
Parameters:
check (ObjectID): Check ID
Returns:
bool: ``True`` on success
The agent will take care of deregistering the check from the Catalog.
"""
check_id = extract_attr(check, keys=["CheckID", "ID"])
response = await self._api.get("/v1/agent/check/deregister", check_id)
return response.status == 200 | python | async def deregister(self, check):
"""Deregisters a local check
Parameters:
check (ObjectID): Check ID
Returns:
bool: ``True`` on success
The agent will take care of deregistering the check from the Catalog.
"""
check_id = extract_attr(check, keys=["CheckID", "ID"])
response = await self._api.get("/v1/agent/check/deregister", check_id)
return response.status == 200 | [
"async",
"def",
"deregister",
"(",
"self",
",",
"check",
")",
":",
"check_id",
"=",
"extract_attr",
"(",
"check",
",",
"keys",
"=",
"[",
"\"CheckID\"",
",",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/agent/check/deregister\"",
",",
"check_id",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Deregisters a local check
Parameters:
check (ObjectID): Check ID
Returns:
bool: ``True`` on success
The agent will take care of deregistering the check from the Catalog. | [
"Deregisters",
"a",
"local",
"check"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/checks_endpoint.py#L125-L137 | train |
johnnoone/aioconsul | aioconsul/client/checks_endpoint.py | ChecksEndpoint.mark | async def mark(self, check, status, *, note=None):
"""Marks a local check as passing, warning or critical
"""
check_id = extract_attr(check, keys=["CheckID", "ID"])
data = {
"Status": status,
"Output": note
}
response = await self._api.put("/v1/agent/check/update", check_id,
data=data)
return response.status == 200 | python | async def mark(self, check, status, *, note=None):
"""Marks a local check as passing, warning or critical
"""
check_id = extract_attr(check, keys=["CheckID", "ID"])
data = {
"Status": status,
"Output": note
}
response = await self._api.put("/v1/agent/check/update", check_id,
data=data)
return response.status == 200 | [
"async",
"def",
"mark",
"(",
"self",
",",
"check",
",",
"status",
",",
"*",
",",
"note",
"=",
"None",
")",
":",
"check_id",
"=",
"extract_attr",
"(",
"check",
",",
"keys",
"=",
"[",
"\"CheckID\"",
",",
"\"ID\"",
"]",
")",
"data",
"=",
"{",
"\"Status\"",
":",
"status",
",",
"\"Output\"",
":",
"note",
"}",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/agent/check/update\"",
",",
"check_id",
",",
"data",
"=",
"data",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Marks a local check as passing, warning or critical | [
"Marks",
"a",
"local",
"check",
"as",
"passing",
"warning",
"or",
"critical"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/checks_endpoint.py#L172-L182 | train |
whiteclover/dbpy | samples/orm.py | UserMapper.find_by_username | def find_by_username(self, username):
"""Return user by username if find in database otherwise None"""
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('username', username).execute()
)
if data:
return self.load(data[0], self.model) | python | def find_by_username(self, username):
"""Return user by username if find in database otherwise None"""
data = (db.select(self.table).select('username', 'email', 'real_name',
'password', 'bio', 'status', 'role', 'uid').
condition('username', username).execute()
)
if data:
return self.load(data[0], self.model) | [
"def",
"find_by_username",
"(",
"self",
",",
"username",
")",
":",
"data",
"=",
"(",
"db",
".",
"select",
"(",
"self",
".",
"table",
")",
".",
"select",
"(",
"'username'",
",",
"'email'",
",",
"'real_name'",
",",
"'password'",
",",
"'bio'",
",",
"'status'",
",",
"'role'",
",",
"'uid'",
")",
".",
"condition",
"(",
"'username'",
",",
"username",
")",
".",
"execute",
"(",
")",
")",
"if",
"data",
":",
"return",
"self",
".",
"load",
"(",
"data",
"[",
"0",
"]",
",",
"self",
".",
"model",
")"
] | Return user by username if find in database otherwise None | [
"Return",
"user",
"by",
"username",
"if",
"find",
"in",
"database",
"otherwise",
"None"
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L56-L63 | train |
whiteclover/dbpy | samples/orm.py | UserMapper.search | def search(self, **kw):
"""Find the users match the condition in kw"""
q = db.select(self.table).condition('status', 'active')
for k, v in kw:
q.condition(k, v)
data = q.execute()
users = []
for user in data:
users.append(self.load(user, self.model))
return users | python | def search(self, **kw):
"""Find the users match the condition in kw"""
q = db.select(self.table).condition('status', 'active')
for k, v in kw:
q.condition(k, v)
data = q.execute()
users = []
for user in data:
users.append(self.load(user, self.model))
return users | [
"def",
"search",
"(",
"self",
",",
"*",
"*",
"kw",
")",
":",
"q",
"=",
"db",
".",
"select",
"(",
"self",
".",
"table",
")",
".",
"condition",
"(",
"'status'",
",",
"'active'",
")",
"for",
"k",
",",
"v",
"in",
"kw",
":",
"q",
".",
"condition",
"(",
"k",
",",
"v",
")",
"data",
"=",
"q",
".",
"execute",
"(",
")",
"users",
"=",
"[",
"]",
"for",
"user",
"in",
"data",
":",
"users",
".",
"append",
"(",
"self",
".",
"load",
"(",
"user",
",",
"self",
".",
"model",
")",
")",
"return",
"users"
] | Find the users match the condition in kw | [
"Find",
"the",
"users",
"match",
"the",
"condition",
"in",
"kw"
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L70-L79 | train |
whiteclover/dbpy | samples/orm.py | PostMapper.paginate | def paginate(self, page=1, perpage=10, category=None):
"""Paginate the posts"""
q = db.select(self.table).fields('title', 'slug', 'description', 'html', 'css', 'js',
'category', 'status', 'comments', 'author', 'created', 'pid')
if category:
q.condition('category', category)
results = (q.limit(perpage).offset((page - 1) * perpage)
.order_by('created', 'DESC').execute())
return [self.load(data, self.model) for data in results] | python | def paginate(self, page=1, perpage=10, category=None):
"""Paginate the posts"""
q = db.select(self.table).fields('title', 'slug', 'description', 'html', 'css', 'js',
'category', 'status', 'comments', 'author', 'created', 'pid')
if category:
q.condition('category', category)
results = (q.limit(perpage).offset((page - 1) * perpage)
.order_by('created', 'DESC').execute())
return [self.load(data, self.model) for data in results] | [
"def",
"paginate",
"(",
"self",
",",
"page",
"=",
"1",
",",
"perpage",
"=",
"10",
",",
"category",
"=",
"None",
")",
":",
"q",
"=",
"db",
".",
"select",
"(",
"self",
".",
"table",
")",
".",
"fields",
"(",
"'title'",
",",
"'slug'",
",",
"'description'",
",",
"'html'",
",",
"'css'",
",",
"'js'",
",",
"'category'",
",",
"'status'",
",",
"'comments'",
",",
"'author'",
",",
"'created'",
",",
"'pid'",
")",
"if",
"category",
":",
"q",
".",
"condition",
"(",
"'category'",
",",
"category",
")",
"results",
"=",
"(",
"q",
".",
"limit",
"(",
"perpage",
")",
".",
"offset",
"(",
"(",
"page",
"-",
"1",
")",
"*",
"perpage",
")",
".",
"order_by",
"(",
"'created'",
",",
"'DESC'",
")",
".",
"execute",
"(",
")",
")",
"return",
"[",
"self",
".",
"load",
"(",
"data",
",",
"self",
".",
"model",
")",
"for",
"data",
"in",
"results",
"]"
] | Paginate the posts | [
"Paginate",
"the",
"posts"
] | 3d9ce85f55cfb39cced22081e525f79581b26b3a | https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/samples/orm.py#L124-L132 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.clear | def clear( self ):
"""
Clears the current scene of all the items and layers.
"""
self.setCurrentLayer(None)
self._layers = []
self._cache.clear()
super(XNodeScene, self).clear() | python | def clear( self ):
"""
Clears the current scene of all the items and layers.
"""
self.setCurrentLayer(None)
self._layers = []
self._cache.clear()
super(XNodeScene, self).clear() | [
"def",
"clear",
"(",
"self",
")",
":",
"self",
".",
"setCurrentLayer",
"(",
"None",
")",
"self",
".",
"_layers",
"=",
"[",
"]",
"self",
".",
"_cache",
".",
"clear",
"(",
")",
"super",
"(",
"XNodeScene",
",",
"self",
")",
".",
"clear",
"(",
")"
] | Clears the current scene of all the items and layers. | [
"Clears",
"the",
"current",
"scene",
"of",
"all",
"the",
"items",
"and",
"layers",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L535-L544 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.rebuild | def rebuild( self ):
"""
Rebuilds the grid lines based on the current settings and \
scene width. This method is triggered automatically, and \
shouldn't need to be manually called.
"""
rect = self.sceneRect()
x = rect.left()
y = rect.top()
w = rect.width()
h = rect.height()
# calculate background gridlines
cx = x + (w / 2)
cy = y + (h / 2)
self._centerLines = [QLine(cx, rect.top(), cx, rect.bottom()),
QLine(rect.left(), cy, rect.right(), cy) ]
# create the horizontal grid lines
delta = self.cellHeight()
minor_lines = []
major_lines = []
count = 1
while delta < (h / 2):
pos_line = QLine(x, cy + delta, x + w, cy + delta)
neg_line = QLine(x, cy - delta, x + w, cy - delta)
# every 10th line will be a major line
if count == 10:
major_lines.append(pos_line)
major_lines.append(neg_line)
count = 1
else:
minor_lines.append(pos_line)
minor_lines.append(neg_line)
# update the current y location
delta += self.cellHeight()
count += 1
# create the vertical grid lines
delta = self.cellWidth()
count = 1
while delta < (w / 2):
pos_line = QLine(cx + delta, y, cx + delta, y + h)
neg_line = QLine(cx - delta, y, cx - delta, y + h)
# every 10th line will be a major line
if count == 10:
major_lines.append(pos_line)
major_lines.append(neg_line)
count = 1
else:
minor_lines.append(pos_line)
minor_lines.append(neg_line)
# update the current y location
delta += self.cellWidth()
count += 1
# set the line cache
self._majorLines = major_lines
self._minorLines = minor_lines
# unmark the scene as being dirty
self.setDirty(False) | python | def rebuild( self ):
"""
Rebuilds the grid lines based on the current settings and \
scene width. This method is triggered automatically, and \
shouldn't need to be manually called.
"""
rect = self.sceneRect()
x = rect.left()
y = rect.top()
w = rect.width()
h = rect.height()
# calculate background gridlines
cx = x + (w / 2)
cy = y + (h / 2)
self._centerLines = [QLine(cx, rect.top(), cx, rect.bottom()),
QLine(rect.left(), cy, rect.right(), cy) ]
# create the horizontal grid lines
delta = self.cellHeight()
minor_lines = []
major_lines = []
count = 1
while delta < (h / 2):
pos_line = QLine(x, cy + delta, x + w, cy + delta)
neg_line = QLine(x, cy - delta, x + w, cy - delta)
# every 10th line will be a major line
if count == 10:
major_lines.append(pos_line)
major_lines.append(neg_line)
count = 1
else:
minor_lines.append(pos_line)
minor_lines.append(neg_line)
# update the current y location
delta += self.cellHeight()
count += 1
# create the vertical grid lines
delta = self.cellWidth()
count = 1
while delta < (w / 2):
pos_line = QLine(cx + delta, y, cx + delta, y + h)
neg_line = QLine(cx - delta, y, cx - delta, y + h)
# every 10th line will be a major line
if count == 10:
major_lines.append(pos_line)
major_lines.append(neg_line)
count = 1
else:
minor_lines.append(pos_line)
minor_lines.append(neg_line)
# update the current y location
delta += self.cellWidth()
count += 1
# set the line cache
self._majorLines = major_lines
self._minorLines = minor_lines
# unmark the scene as being dirty
self.setDirty(False) | [
"def",
"rebuild",
"(",
"self",
")",
":",
"rect",
"=",
"self",
".",
"sceneRect",
"(",
")",
"x",
"=",
"rect",
".",
"left",
"(",
")",
"y",
"=",
"rect",
".",
"top",
"(",
")",
"w",
"=",
"rect",
".",
"width",
"(",
")",
"h",
"=",
"rect",
".",
"height",
"(",
")",
"# calculate background gridlines",
"cx",
"=",
"x",
"+",
"(",
"w",
"/",
"2",
")",
"cy",
"=",
"y",
"+",
"(",
"h",
"/",
"2",
")",
"self",
".",
"_centerLines",
"=",
"[",
"QLine",
"(",
"cx",
",",
"rect",
".",
"top",
"(",
")",
",",
"cx",
",",
"rect",
".",
"bottom",
"(",
")",
")",
",",
"QLine",
"(",
"rect",
".",
"left",
"(",
")",
",",
"cy",
",",
"rect",
".",
"right",
"(",
")",
",",
"cy",
")",
"]",
"# create the horizontal grid lines",
"delta",
"=",
"self",
".",
"cellHeight",
"(",
")",
"minor_lines",
"=",
"[",
"]",
"major_lines",
"=",
"[",
"]",
"count",
"=",
"1",
"while",
"delta",
"<",
"(",
"h",
"/",
"2",
")",
":",
"pos_line",
"=",
"QLine",
"(",
"x",
",",
"cy",
"+",
"delta",
",",
"x",
"+",
"w",
",",
"cy",
"+",
"delta",
")",
"neg_line",
"=",
"QLine",
"(",
"x",
",",
"cy",
"-",
"delta",
",",
"x",
"+",
"w",
",",
"cy",
"-",
"delta",
")",
"# every 10th line will be a major line",
"if",
"count",
"==",
"10",
":",
"major_lines",
".",
"append",
"(",
"pos_line",
")",
"major_lines",
".",
"append",
"(",
"neg_line",
")",
"count",
"=",
"1",
"else",
":",
"minor_lines",
".",
"append",
"(",
"pos_line",
")",
"minor_lines",
".",
"append",
"(",
"neg_line",
")",
"# update the current y location",
"delta",
"+=",
"self",
".",
"cellHeight",
"(",
")",
"count",
"+=",
"1",
"# create the vertical grid lines",
"delta",
"=",
"self",
".",
"cellWidth",
"(",
")",
"count",
"=",
"1",
"while",
"delta",
"<",
"(",
"w",
"/",
"2",
")",
":",
"pos_line",
"=",
"QLine",
"(",
"cx",
"+",
"delta",
",",
"y",
",",
"cx",
"+",
"delta",
",",
"y",
"+",
"h",
")",
"neg_line",
"=",
"QLine",
"(",
"cx",
"-",
"delta",
",",
"y",
",",
"cx",
"-",
"delta",
",",
"y",
"+",
"h",
")",
"# every 10th line will be a major line",
"if",
"count",
"==",
"10",
":",
"major_lines",
".",
"append",
"(",
"pos_line",
")",
"major_lines",
".",
"append",
"(",
"neg_line",
")",
"count",
"=",
"1",
"else",
":",
"minor_lines",
".",
"append",
"(",
"pos_line",
")",
"minor_lines",
".",
"append",
"(",
"neg_line",
")",
"# update the current y location",
"delta",
"+=",
"self",
".",
"cellWidth",
"(",
")",
"count",
"+=",
"1",
"# set the line cache",
"self",
".",
"_majorLines",
"=",
"major_lines",
"self",
".",
"_minorLines",
"=",
"minor_lines",
"# unmark the scene as being dirty",
"self",
".",
"setDirty",
"(",
"False",
")"
] | Rebuilds the grid lines based on the current settings and \
scene width. This method is triggered automatically, and \
shouldn't need to be manually called. | [
"Rebuilds",
"the",
"grid",
"lines",
"based",
"on",
"the",
"current",
"settings",
"and",
"\\",
"scene",
"width",
".",
"This",
"method",
"is",
"triggered",
"automatically",
"and",
"\\",
"shouldn",
"t",
"need",
"to",
"be",
"manually",
"called",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L1127-L1193 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.selectAll | def selectAll( self ):
"""
Selects all the items in the scene.
"""
currLayer = self._currentLayer
for item in self.items():
layer = item.layer()
if ( layer == currLayer or not layer ):
item.setSelected(True) | python | def selectAll( self ):
"""
Selects all the items in the scene.
"""
currLayer = self._currentLayer
for item in self.items():
layer = item.layer()
if ( layer == currLayer or not layer ):
item.setSelected(True) | [
"def",
"selectAll",
"(",
"self",
")",
":",
"currLayer",
"=",
"self",
".",
"_currentLayer",
"for",
"item",
"in",
"self",
".",
"items",
"(",
")",
":",
"layer",
"=",
"item",
".",
"layer",
"(",
")",
"if",
"(",
"layer",
"==",
"currLayer",
"or",
"not",
"layer",
")",
":",
"item",
".",
"setSelected",
"(",
"True",
")"
] | Selects all the items in the scene. | [
"Selects",
"all",
"the",
"items",
"in",
"the",
"scene",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L1280-L1288 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.selectInvert | def selectInvert( self ):
"""
Inverts the currently selected items in the scene.
"""
currLayer = self._currentLayer
for item in self.items():
layer = item.layer()
if ( layer == currLayer or not layer ):
item.setSelected(not item.isSelected()) | python | def selectInvert( self ):
"""
Inverts the currently selected items in the scene.
"""
currLayer = self._currentLayer
for item in self.items():
layer = item.layer()
if ( layer == currLayer or not layer ):
item.setSelected(not item.isSelected()) | [
"def",
"selectInvert",
"(",
"self",
")",
":",
"currLayer",
"=",
"self",
".",
"_currentLayer",
"for",
"item",
"in",
"self",
".",
"items",
"(",
")",
":",
"layer",
"=",
"item",
".",
"layer",
"(",
")",
"if",
"(",
"layer",
"==",
"currLayer",
"or",
"not",
"layer",
")",
":",
"item",
".",
"setSelected",
"(",
"not",
"item",
".",
"isSelected",
"(",
")",
")"
] | Inverts the currently selected items in the scene. | [
"Inverts",
"the",
"currently",
"selected",
"items",
"in",
"the",
"scene",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L1290-L1298 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.selectNone | def selectNone( self ):
"""
Deselects all the items in the scene.
"""
currLayer = self._currentLayer
for item in self.items():
layer = item.layer()
if ( layer == currLayer or not layer ):
item.setSelected(False) | python | def selectNone( self ):
"""
Deselects all the items in the scene.
"""
currLayer = self._currentLayer
for item in self.items():
layer = item.layer()
if ( layer == currLayer or not layer ):
item.setSelected(False) | [
"def",
"selectNone",
"(",
"self",
")",
":",
"currLayer",
"=",
"self",
".",
"_currentLayer",
"for",
"item",
"in",
"self",
".",
"items",
"(",
")",
":",
"layer",
"=",
"item",
".",
"layer",
"(",
")",
"if",
"(",
"layer",
"==",
"currLayer",
"or",
"not",
"layer",
")",
":",
"item",
".",
"setSelected",
"(",
"False",
")"
] | Deselects all the items in the scene. | [
"Deselects",
"all",
"the",
"items",
"in",
"the",
"scene",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L1300-L1308 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.setViewMode | def setViewMode( self, state = True ):
"""
Starts the view mode for moving around the scene.
"""
if self._viewMode == state:
return
self._viewMode = state
if state:
self._mainView.setDragMode( self._mainView.ScrollHandDrag )
else:
self._mainView.setDragMode( self._mainView.RubberBandDrag )
self.emitViewModeChanged() | python | def setViewMode( self, state = True ):
"""
Starts the view mode for moving around the scene.
"""
if self._viewMode == state:
return
self._viewMode = state
if state:
self._mainView.setDragMode( self._mainView.ScrollHandDrag )
else:
self._mainView.setDragMode( self._mainView.RubberBandDrag )
self.emitViewModeChanged() | [
"def",
"setViewMode",
"(",
"self",
",",
"state",
"=",
"True",
")",
":",
"if",
"self",
".",
"_viewMode",
"==",
"state",
":",
"return",
"self",
".",
"_viewMode",
"=",
"state",
"if",
"state",
":",
"self",
".",
"_mainView",
".",
"setDragMode",
"(",
"self",
".",
"_mainView",
".",
"ScrollHandDrag",
")",
"else",
":",
"self",
".",
"_mainView",
".",
"setDragMode",
"(",
"self",
".",
"_mainView",
".",
"RubberBandDrag",
")",
"self",
".",
"emitViewModeChanged",
"(",
")"
] | Starts the view mode for moving around the scene. | [
"Starts",
"the",
"view",
"mode",
"for",
"moving",
"around",
"the",
"scene",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L1564-L1577 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodescene.py | XNodeScene.updateIsolated | def updateIsolated( self, force = False ):
"""
Updates the visible state of nodes based on whether or not they are
isolated.
"""
if ( not (self.isolationMode() or force) ):
return
# make sure all nodes are not being hidden because of isolation
if ( not self.isolationMode() ):
for node in self.nodes():
node.setIsolateHidden(False)
return
# make sure all the nodes are visible or hidden based on the selection
selected_nodes = self.selectedNodes()
isolated_nodes = set(selected_nodes)
connections = self.connections()
for connection in connections:
in_node = connection.inputNode()
out_node = connection.outputNode()
if ( in_node in selected_nodes or out_node in selected_nodes ):
isolated_nodes.add(in_node)
isolated_nodes.add(out_node)
for node in self.nodes():
node.setIsolateHidden(not node in isolated_nodes) | python | def updateIsolated( self, force = False ):
"""
Updates the visible state of nodes based on whether or not they are
isolated.
"""
if ( not (self.isolationMode() or force) ):
return
# make sure all nodes are not being hidden because of isolation
if ( not self.isolationMode() ):
for node in self.nodes():
node.setIsolateHidden(False)
return
# make sure all the nodes are visible or hidden based on the selection
selected_nodes = self.selectedNodes()
isolated_nodes = set(selected_nodes)
connections = self.connections()
for connection in connections:
in_node = connection.inputNode()
out_node = connection.outputNode()
if ( in_node in selected_nodes or out_node in selected_nodes ):
isolated_nodes.add(in_node)
isolated_nodes.add(out_node)
for node in self.nodes():
node.setIsolateHidden(not node in isolated_nodes) | [
"def",
"updateIsolated",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"if",
"(",
"not",
"(",
"self",
".",
"isolationMode",
"(",
")",
"or",
"force",
")",
")",
":",
"return",
"# make sure all nodes are not being hidden because of isolation",
"if",
"(",
"not",
"self",
".",
"isolationMode",
"(",
")",
")",
":",
"for",
"node",
"in",
"self",
".",
"nodes",
"(",
")",
":",
"node",
".",
"setIsolateHidden",
"(",
"False",
")",
"return",
"# make sure all the nodes are visible or hidden based on the selection",
"selected_nodes",
"=",
"self",
".",
"selectedNodes",
"(",
")",
"isolated_nodes",
"=",
"set",
"(",
"selected_nodes",
")",
"connections",
"=",
"self",
".",
"connections",
"(",
")",
"for",
"connection",
"in",
"connections",
":",
"in_node",
"=",
"connection",
".",
"inputNode",
"(",
")",
"out_node",
"=",
"connection",
".",
"outputNode",
"(",
")",
"if",
"(",
"in_node",
"in",
"selected_nodes",
"or",
"out_node",
"in",
"selected_nodes",
")",
":",
"isolated_nodes",
".",
"add",
"(",
"in_node",
")",
"isolated_nodes",
".",
"add",
"(",
"out_node",
")",
"for",
"node",
"in",
"self",
".",
"nodes",
"(",
")",
":",
"node",
".",
"setIsolateHidden",
"(",
"not",
"node",
"in",
"isolated_nodes",
")"
] | Updates the visible state of nodes based on whether or not they are
isolated. | [
"Updates",
"the",
"visible",
"state",
"of",
"nodes",
"based",
"on",
"whether",
"or",
"not",
"they",
"are",
"isolated",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodescene.py#L1662-L1689 | train |
pmuller/versions | versions/constraints.py | merge | def merge(constraints):
"""Merge ``constraints``.
It removes dupplicate, pruned and merged constraints.
:param constraints: Current constraints.
:type constraints: Iterable of :class:`.Constraint` objects.
:rtype: :func:`list` of :class:`.Constraint` objects.
:raises: :exc:`.ExclusiveConstraints`
"""
# Dictionary :class:`Operator`: set of :class:`Version`.
operators = defaultdict(set)
for constraint in constraints:
operators[constraint.operator].add(constraint.version)
# Get most recent version required by > constraints.
if gt in operators:
gt_ver = sorted(operators[gt])[-1]
else:
gt_ver = None
# Get most recent version required by >= constraints.
if ge in operators:
ge_ver = sorted(operators[ge])[-1]
else:
ge_ver = None
# Get least recent version required by < constraints.
if lt in operators:
lt_ver = sorted(operators[lt])[0]
else:
lt_ver = None
# Get least recent version required by <= constraints.
if le in operators:
le_ver = sorted(operators[le])[0]
else:
le_ver = None
# Most restrictive LT/LE constraint.
l_constraint = None
if le_ver:
if lt_ver:
le_constraint = Constraint(le, le_ver)
lt_constraint = Constraint(lt, lt_ver)
if le_ver < lt_ver:
# <= 1, < 2
l_constraint = le_constraint
l_less_restrictive_c = lt_constraint
else:
# <= 2, < 1
# <= 2, < 2
l_constraint = lt_constraint
l_less_restrictive_c = le_constraint
LOGGER.debug('Removed constraint %s because it is less '
'restrictive than %s', l_less_restrictive_c,
l_constraint)
else:
l_constraint = Constraint(le, le_ver)
elif lt_ver:
l_constraint = Constraint(lt, lt_ver)
# Most restrictive GT/GE constraint.
g_constraint = None
if ge_ver:
if gt_ver:
gt_constraint = Constraint(gt, gt_ver)
ge_constraint = Constraint(ge, ge_ver)
if ge_ver <= gt_ver:
# >= 1, > 2
# >= 2, > 2
g_constraint = gt_constraint
g_less_restrictive_c = ge_constraint
else:
# >= 2, > 1
g_constraint = ge_constraint
g_less_restrictive_c = gt_constraint
LOGGER.debug('Removed constraint %s because it is less '
'restrictive than %s', g_less_restrictive_c,
g_constraint)
else:
g_constraint = Constraint(ge, ge_ver)
elif gt_ver:
g_constraint = Constraint(gt, gt_ver)
# Check if g_constraint and l_constraint are conflicting
if g_constraint and l_constraint:
if g_constraint.version == l_constraint.version:
if g_constraint.operator == ge and l_constraint.operator == le:
# Merge >= and <= constraints on same version to a ==
# constraint
operators[eq].add(g_constraint.version)
LOGGER.debug('Merged constraints: %s and %s into ==%s',
l_constraint, g_constraint, g_constraint.version)
l_constraint, g_constraint = None, None
else:
raise ExclusiveConstraints(g_constraint, [l_constraint])
elif g_constraint.version > l_constraint.version:
raise ExclusiveConstraints(g_constraint, [l_constraint])
ne_constraints = [Constraint(ne, v) for v in operators[ne]]
eq_constraints = [Constraint(eq, v) for v in operators[eq]]
if eq_constraints:
eq_constraint = eq_constraints.pop()
# An eq constraint conflicts with other constraints
if g_constraint or l_constraint or ne_constraints or eq_constraints:
conflict_list = [c for c in (g_constraint, l_constraint) if c]
conflict_list.extend(ne_constraints)
conflict_list.extend(eq_constraints)
raise ExclusiveConstraints(eq_constraint, conflict_list)
return [eq_constraint]
else:
constraints = ne_constraints + [g_constraint, l_constraint]
return [c for c in constraints if c] | python | def merge(constraints):
"""Merge ``constraints``.
It removes dupplicate, pruned and merged constraints.
:param constraints: Current constraints.
:type constraints: Iterable of :class:`.Constraint` objects.
:rtype: :func:`list` of :class:`.Constraint` objects.
:raises: :exc:`.ExclusiveConstraints`
"""
# Dictionary :class:`Operator`: set of :class:`Version`.
operators = defaultdict(set)
for constraint in constraints:
operators[constraint.operator].add(constraint.version)
# Get most recent version required by > constraints.
if gt in operators:
gt_ver = sorted(operators[gt])[-1]
else:
gt_ver = None
# Get most recent version required by >= constraints.
if ge in operators:
ge_ver = sorted(operators[ge])[-1]
else:
ge_ver = None
# Get least recent version required by < constraints.
if lt in operators:
lt_ver = sorted(operators[lt])[0]
else:
lt_ver = None
# Get least recent version required by <= constraints.
if le in operators:
le_ver = sorted(operators[le])[0]
else:
le_ver = None
# Most restrictive LT/LE constraint.
l_constraint = None
if le_ver:
if lt_ver:
le_constraint = Constraint(le, le_ver)
lt_constraint = Constraint(lt, lt_ver)
if le_ver < lt_ver:
# <= 1, < 2
l_constraint = le_constraint
l_less_restrictive_c = lt_constraint
else:
# <= 2, < 1
# <= 2, < 2
l_constraint = lt_constraint
l_less_restrictive_c = le_constraint
LOGGER.debug('Removed constraint %s because it is less '
'restrictive than %s', l_less_restrictive_c,
l_constraint)
else:
l_constraint = Constraint(le, le_ver)
elif lt_ver:
l_constraint = Constraint(lt, lt_ver)
# Most restrictive GT/GE constraint.
g_constraint = None
if ge_ver:
if gt_ver:
gt_constraint = Constraint(gt, gt_ver)
ge_constraint = Constraint(ge, ge_ver)
if ge_ver <= gt_ver:
# >= 1, > 2
# >= 2, > 2
g_constraint = gt_constraint
g_less_restrictive_c = ge_constraint
else:
# >= 2, > 1
g_constraint = ge_constraint
g_less_restrictive_c = gt_constraint
LOGGER.debug('Removed constraint %s because it is less '
'restrictive than %s', g_less_restrictive_c,
g_constraint)
else:
g_constraint = Constraint(ge, ge_ver)
elif gt_ver:
g_constraint = Constraint(gt, gt_ver)
# Check if g_constraint and l_constraint are conflicting
if g_constraint and l_constraint:
if g_constraint.version == l_constraint.version:
if g_constraint.operator == ge and l_constraint.operator == le:
# Merge >= and <= constraints on same version to a ==
# constraint
operators[eq].add(g_constraint.version)
LOGGER.debug('Merged constraints: %s and %s into ==%s',
l_constraint, g_constraint, g_constraint.version)
l_constraint, g_constraint = None, None
else:
raise ExclusiveConstraints(g_constraint, [l_constraint])
elif g_constraint.version > l_constraint.version:
raise ExclusiveConstraints(g_constraint, [l_constraint])
ne_constraints = [Constraint(ne, v) for v in operators[ne]]
eq_constraints = [Constraint(eq, v) for v in operators[eq]]
if eq_constraints:
eq_constraint = eq_constraints.pop()
# An eq constraint conflicts with other constraints
if g_constraint or l_constraint or ne_constraints or eq_constraints:
conflict_list = [c for c in (g_constraint, l_constraint) if c]
conflict_list.extend(ne_constraints)
conflict_list.extend(eq_constraints)
raise ExclusiveConstraints(eq_constraint, conflict_list)
return [eq_constraint]
else:
constraints = ne_constraints + [g_constraint, l_constraint]
return [c for c in constraints if c] | [
"def",
"merge",
"(",
"constraints",
")",
":",
"# Dictionary :class:`Operator`: set of :class:`Version`.",
"operators",
"=",
"defaultdict",
"(",
"set",
")",
"for",
"constraint",
"in",
"constraints",
":",
"operators",
"[",
"constraint",
".",
"operator",
"]",
".",
"add",
"(",
"constraint",
".",
"version",
")",
"# Get most recent version required by > constraints.",
"if",
"gt",
"in",
"operators",
":",
"gt_ver",
"=",
"sorted",
"(",
"operators",
"[",
"gt",
"]",
")",
"[",
"-",
"1",
"]",
"else",
":",
"gt_ver",
"=",
"None",
"# Get most recent version required by >= constraints.",
"if",
"ge",
"in",
"operators",
":",
"ge_ver",
"=",
"sorted",
"(",
"operators",
"[",
"ge",
"]",
")",
"[",
"-",
"1",
"]",
"else",
":",
"ge_ver",
"=",
"None",
"# Get least recent version required by < constraints.",
"if",
"lt",
"in",
"operators",
":",
"lt_ver",
"=",
"sorted",
"(",
"operators",
"[",
"lt",
"]",
")",
"[",
"0",
"]",
"else",
":",
"lt_ver",
"=",
"None",
"# Get least recent version required by <= constraints.",
"if",
"le",
"in",
"operators",
":",
"le_ver",
"=",
"sorted",
"(",
"operators",
"[",
"le",
"]",
")",
"[",
"0",
"]",
"else",
":",
"le_ver",
"=",
"None",
"# Most restrictive LT/LE constraint.",
"l_constraint",
"=",
"None",
"if",
"le_ver",
":",
"if",
"lt_ver",
":",
"le_constraint",
"=",
"Constraint",
"(",
"le",
",",
"le_ver",
")",
"lt_constraint",
"=",
"Constraint",
"(",
"lt",
",",
"lt_ver",
")",
"if",
"le_ver",
"<",
"lt_ver",
":",
"# <= 1, < 2",
"l_constraint",
"=",
"le_constraint",
"l_less_restrictive_c",
"=",
"lt_constraint",
"else",
":",
"# <= 2, < 1",
"# <= 2, < 2",
"l_constraint",
"=",
"lt_constraint",
"l_less_restrictive_c",
"=",
"le_constraint",
"LOGGER",
".",
"debug",
"(",
"'Removed constraint %s because it is less '",
"'restrictive than %s'",
",",
"l_less_restrictive_c",
",",
"l_constraint",
")",
"else",
":",
"l_constraint",
"=",
"Constraint",
"(",
"le",
",",
"le_ver",
")",
"elif",
"lt_ver",
":",
"l_constraint",
"=",
"Constraint",
"(",
"lt",
",",
"lt_ver",
")",
"# Most restrictive GT/GE constraint.",
"g_constraint",
"=",
"None",
"if",
"ge_ver",
":",
"if",
"gt_ver",
":",
"gt_constraint",
"=",
"Constraint",
"(",
"gt",
",",
"gt_ver",
")",
"ge_constraint",
"=",
"Constraint",
"(",
"ge",
",",
"ge_ver",
")",
"if",
"ge_ver",
"<=",
"gt_ver",
":",
"# >= 1, > 2",
"# >= 2, > 2",
"g_constraint",
"=",
"gt_constraint",
"g_less_restrictive_c",
"=",
"ge_constraint",
"else",
":",
"# >= 2, > 1",
"g_constraint",
"=",
"ge_constraint",
"g_less_restrictive_c",
"=",
"gt_constraint",
"LOGGER",
".",
"debug",
"(",
"'Removed constraint %s because it is less '",
"'restrictive than %s'",
",",
"g_less_restrictive_c",
",",
"g_constraint",
")",
"else",
":",
"g_constraint",
"=",
"Constraint",
"(",
"ge",
",",
"ge_ver",
")",
"elif",
"gt_ver",
":",
"g_constraint",
"=",
"Constraint",
"(",
"gt",
",",
"gt_ver",
")",
"# Check if g_constraint and l_constraint are conflicting",
"if",
"g_constraint",
"and",
"l_constraint",
":",
"if",
"g_constraint",
".",
"version",
"==",
"l_constraint",
".",
"version",
":",
"if",
"g_constraint",
".",
"operator",
"==",
"ge",
"and",
"l_constraint",
".",
"operator",
"==",
"le",
":",
"# Merge >= and <= constraints on same version to a ==",
"# constraint",
"operators",
"[",
"eq",
"]",
".",
"add",
"(",
"g_constraint",
".",
"version",
")",
"LOGGER",
".",
"debug",
"(",
"'Merged constraints: %s and %s into ==%s'",
",",
"l_constraint",
",",
"g_constraint",
",",
"g_constraint",
".",
"version",
")",
"l_constraint",
",",
"g_constraint",
"=",
"None",
",",
"None",
"else",
":",
"raise",
"ExclusiveConstraints",
"(",
"g_constraint",
",",
"[",
"l_constraint",
"]",
")",
"elif",
"g_constraint",
".",
"version",
">",
"l_constraint",
".",
"version",
":",
"raise",
"ExclusiveConstraints",
"(",
"g_constraint",
",",
"[",
"l_constraint",
"]",
")",
"ne_constraints",
"=",
"[",
"Constraint",
"(",
"ne",
",",
"v",
")",
"for",
"v",
"in",
"operators",
"[",
"ne",
"]",
"]",
"eq_constraints",
"=",
"[",
"Constraint",
"(",
"eq",
",",
"v",
")",
"for",
"v",
"in",
"operators",
"[",
"eq",
"]",
"]",
"if",
"eq_constraints",
":",
"eq_constraint",
"=",
"eq_constraints",
".",
"pop",
"(",
")",
"# An eq constraint conflicts with other constraints",
"if",
"g_constraint",
"or",
"l_constraint",
"or",
"ne_constraints",
"or",
"eq_constraints",
":",
"conflict_list",
"=",
"[",
"c",
"for",
"c",
"in",
"(",
"g_constraint",
",",
"l_constraint",
")",
"if",
"c",
"]",
"conflict_list",
".",
"extend",
"(",
"ne_constraints",
")",
"conflict_list",
".",
"extend",
"(",
"eq_constraints",
")",
"raise",
"ExclusiveConstraints",
"(",
"eq_constraint",
",",
"conflict_list",
")",
"return",
"[",
"eq_constraint",
"]",
"else",
":",
"constraints",
"=",
"ne_constraints",
"+",
"[",
"g_constraint",
",",
"l_constraint",
"]",
"return",
"[",
"c",
"for",
"c",
"in",
"constraints",
"if",
"c",
"]"
] | Merge ``constraints``.
It removes dupplicate, pruned and merged constraints.
:param constraints: Current constraints.
:type constraints: Iterable of :class:`.Constraint` objects.
:rtype: :func:`list` of :class:`.Constraint` objects.
:raises: :exc:`.ExclusiveConstraints` | [
"Merge",
"constraints",
"."
] | 951bc3fd99b6a675190f11ee0752af1d7ff5b440 | https://github.com/pmuller/versions/blob/951bc3fd99b6a675190f11ee0752af1d7ff5b440/versions/constraints.py#L109-L224 | train |
pmuller/versions | versions/constraints.py | Constraints.match | def match(self, version):
"""Match ``version`` with this collection of constraints.
:param version: Version to match against the constraint.
:type version: :ref:`version expression <version-expressions>` or \
:class:`.Version`
:rtype: ``True`` if ``version`` satisfies the constraint, \
``False`` if it doesn't.
"""
return all(constraint.match(version)
for constraint in self.constraints) | python | def match(self, version):
"""Match ``version`` with this collection of constraints.
:param version: Version to match against the constraint.
:type version: :ref:`version expression <version-expressions>` or \
:class:`.Version`
:rtype: ``True`` if ``version`` satisfies the constraint, \
``False`` if it doesn't.
"""
return all(constraint.match(version)
for constraint in self.constraints) | [
"def",
"match",
"(",
"self",
",",
"version",
")",
":",
"return",
"all",
"(",
"constraint",
".",
"match",
"(",
"version",
")",
"for",
"constraint",
"in",
"self",
".",
"constraints",
")"
] | Match ``version`` with this collection of constraints.
:param version: Version to match against the constraint.
:type version: :ref:`version expression <version-expressions>` or \
:class:`.Version`
:rtype: ``True`` if ``version`` satisfies the constraint, \
``False`` if it doesn't. | [
"Match",
"version",
"with",
"this",
"collection",
"of",
"constraints",
"."
] | 951bc3fd99b6a675190f11ee0752af1d7ff5b440 | https://github.com/pmuller/versions/blob/951bc3fd99b6a675190f11ee0752af1d7ff5b440/versions/constraints.py#L46-L56 | train |
evansd/django-envsettings | envsettings/cache.py | CacheSettings.set_memcached_backend | def set_memcached_backend(self, config):
"""
Select the most suitable Memcached backend based on the config and
on what's installed
"""
# This is the preferred backend as it is the fastest and most fully
# featured, so we use this by default
config['BACKEND'] = 'django_pylibmc.memcached.PyLibMCCache'
if is_importable(config['BACKEND']):
return
# Otherwise, binary connections can use this pure Python implementation
if config.get('BINARY') and is_importable('django_bmemcached'):
config['BACKEND'] = 'django_bmemcached.memcached.BMemcached'
return
# For text-based connections without any authentication we can fall
# back to Django's core backends if the supporting libraries are
# installed
if not any([config.get(key) for key in ('BINARY', 'USERNAME', 'PASSWORD')]):
if is_importable('pylibmc'):
config['BACKEND'] = \
'django.core.cache.backends.memcached.PyLibMCCache'
elif is_importable('memcached'):
config['BACKEND'] = \
'django.core.cache.backends.memcached.MemcachedCache' | python | def set_memcached_backend(self, config):
"""
Select the most suitable Memcached backend based on the config and
on what's installed
"""
# This is the preferred backend as it is the fastest and most fully
# featured, so we use this by default
config['BACKEND'] = 'django_pylibmc.memcached.PyLibMCCache'
if is_importable(config['BACKEND']):
return
# Otherwise, binary connections can use this pure Python implementation
if config.get('BINARY') and is_importable('django_bmemcached'):
config['BACKEND'] = 'django_bmemcached.memcached.BMemcached'
return
# For text-based connections without any authentication we can fall
# back to Django's core backends if the supporting libraries are
# installed
if not any([config.get(key) for key in ('BINARY', 'USERNAME', 'PASSWORD')]):
if is_importable('pylibmc'):
config['BACKEND'] = \
'django.core.cache.backends.memcached.PyLibMCCache'
elif is_importable('memcached'):
config['BACKEND'] = \
'django.core.cache.backends.memcached.MemcachedCache' | [
"def",
"set_memcached_backend",
"(",
"self",
",",
"config",
")",
":",
"# This is the preferred backend as it is the fastest and most fully",
"# featured, so we use this by default",
"config",
"[",
"'BACKEND'",
"]",
"=",
"'django_pylibmc.memcached.PyLibMCCache'",
"if",
"is_importable",
"(",
"config",
"[",
"'BACKEND'",
"]",
")",
":",
"return",
"# Otherwise, binary connections can use this pure Python implementation",
"if",
"config",
".",
"get",
"(",
"'BINARY'",
")",
"and",
"is_importable",
"(",
"'django_bmemcached'",
")",
":",
"config",
"[",
"'BACKEND'",
"]",
"=",
"'django_bmemcached.memcached.BMemcached'",
"return",
"# For text-based connections without any authentication we can fall",
"# back to Django's core backends if the supporting libraries are",
"# installed",
"if",
"not",
"any",
"(",
"[",
"config",
".",
"get",
"(",
"key",
")",
"for",
"key",
"in",
"(",
"'BINARY'",
",",
"'USERNAME'",
",",
"'PASSWORD'",
")",
"]",
")",
":",
"if",
"is_importable",
"(",
"'pylibmc'",
")",
":",
"config",
"[",
"'BACKEND'",
"]",
"=",
"'django.core.cache.backends.memcached.PyLibMCCache'",
"elif",
"is_importable",
"(",
"'memcached'",
")",
":",
"config",
"[",
"'BACKEND'",
"]",
"=",
"'django.core.cache.backends.memcached.MemcachedCache'"
] | Select the most suitable Memcached backend based on the config and
on what's installed | [
"Select",
"the",
"most",
"suitable",
"Memcached",
"backend",
"based",
"on",
"the",
"config",
"and",
"on",
"what",
"s",
"installed"
] | 541932af261d5369f211f836a238dc020ee316e8 | https://github.com/evansd/django-envsettings/blob/541932af261d5369f211f836a238dc020ee316e8/envsettings/cache.py#L63-L86 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py | XOrbQueryEntryWidget.addEntry | def addEntry(self):
"""
This will either add a new widget or switch the joiner based on the
state of the entry
"""
joiner = self.joiner()
curr_joiner = self._containerWidget.currentJoiner()
# update the joining option if it is modified
if joiner != curr_joiner:
if not self._last:
self.updateJoin()
return
self._containerWidget.setCurrentJoiner(joiner)
# otherwise, add a new entry
self._containerWidget.addEntry(entry=self) | python | def addEntry(self):
"""
This will either add a new widget or switch the joiner based on the
state of the entry
"""
joiner = self.joiner()
curr_joiner = self._containerWidget.currentJoiner()
# update the joining option if it is modified
if joiner != curr_joiner:
if not self._last:
self.updateJoin()
return
self._containerWidget.setCurrentJoiner(joiner)
# otherwise, add a new entry
self._containerWidget.addEntry(entry=self) | [
"def",
"addEntry",
"(",
"self",
")",
":",
"joiner",
"=",
"self",
".",
"joiner",
"(",
")",
"curr_joiner",
"=",
"self",
".",
"_containerWidget",
".",
"currentJoiner",
"(",
")",
"# update the joining option if it is modified\r",
"if",
"joiner",
"!=",
"curr_joiner",
":",
"if",
"not",
"self",
".",
"_last",
":",
"self",
".",
"updateJoin",
"(",
")",
"return",
"self",
".",
"_containerWidget",
".",
"setCurrentJoiner",
"(",
"joiner",
")",
"# otherwise, add a new entry\r",
"self",
".",
"_containerWidget",
".",
"addEntry",
"(",
"entry",
"=",
"self",
")"
] | This will either add a new widget or switch the joiner based on the
state of the entry | [
"This",
"will",
"either",
"add",
"a",
"new",
"widget",
"or",
"switch",
"the",
"joiner",
"based",
"on",
"the",
"state",
"of",
"the",
"entry"
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py#L68-L85 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py | XOrbQueryEntryWidget.assignPlugin | def assignPlugin(self):
"""
Assigns an editor based on the current column for this schema.
"""
self.uiOperatorDDL.blockSignals(True)
self.uiOperatorDDL.clear()
plugin = self.currentPlugin()
if plugin:
flags = 0
if not self.queryWidget().showReferencePlugins():
flags |= plugin.Flags.ReferenceRequired
self.uiOperatorDDL.addItems(plugin.operators(ignore=flags))
self.uiOperatorDDL.blockSignals(False)
self.assignEditor() | python | def assignPlugin(self):
"""
Assigns an editor based on the current column for this schema.
"""
self.uiOperatorDDL.blockSignals(True)
self.uiOperatorDDL.clear()
plugin = self.currentPlugin()
if plugin:
flags = 0
if not self.queryWidget().showReferencePlugins():
flags |= plugin.Flags.ReferenceRequired
self.uiOperatorDDL.addItems(plugin.operators(ignore=flags))
self.uiOperatorDDL.blockSignals(False)
self.assignEditor() | [
"def",
"assignPlugin",
"(",
"self",
")",
":",
"self",
".",
"uiOperatorDDL",
".",
"blockSignals",
"(",
"True",
")",
"self",
".",
"uiOperatorDDL",
".",
"clear",
"(",
")",
"plugin",
"=",
"self",
".",
"currentPlugin",
"(",
")",
"if",
"plugin",
":",
"flags",
"=",
"0",
"if",
"not",
"self",
".",
"queryWidget",
"(",
")",
".",
"showReferencePlugins",
"(",
")",
":",
"flags",
"|=",
"plugin",
".",
"Flags",
".",
"ReferenceRequired",
"self",
".",
"uiOperatorDDL",
".",
"addItems",
"(",
"plugin",
".",
"operators",
"(",
"ignore",
"=",
"flags",
")",
")",
"self",
".",
"uiOperatorDDL",
".",
"blockSignals",
"(",
"False",
")",
"self",
".",
"assignEditor",
"(",
")"
] | Assigns an editor based on the current column for this schema. | [
"Assigns",
"an",
"editor",
"based",
"on",
"the",
"current",
"column",
"for",
"this",
"schema",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py#L88-L104 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py | XOrbQueryEntryWidget.assignEditor | def assignEditor(self):
"""
Assigns the editor for this entry based on the plugin.
"""
plugin = self.currentPlugin()
column = self.currentColumn()
value = self.currentValue()
if not plugin:
self.setEditor(None)
return
self.setUpdatesEnabled(False)
self.blockSignals(True)
op = self.uiOperatorDDL.currentText()
self.setEditor(plugin.createEditor(self, column, op, value))
self.setUpdatesEnabled(True)
self.blockSignals(False) | python | def assignEditor(self):
"""
Assigns the editor for this entry based on the plugin.
"""
plugin = self.currentPlugin()
column = self.currentColumn()
value = self.currentValue()
if not plugin:
self.setEditor(None)
return
self.setUpdatesEnabled(False)
self.blockSignals(True)
op = self.uiOperatorDDL.currentText()
self.setEditor(plugin.createEditor(self, column, op, value))
self.setUpdatesEnabled(True)
self.blockSignals(False) | [
"def",
"assignEditor",
"(",
"self",
")",
":",
"plugin",
"=",
"self",
".",
"currentPlugin",
"(",
")",
"column",
"=",
"self",
".",
"currentColumn",
"(",
")",
"value",
"=",
"self",
".",
"currentValue",
"(",
")",
"if",
"not",
"plugin",
":",
"self",
".",
"setEditor",
"(",
"None",
")",
"return",
"self",
".",
"setUpdatesEnabled",
"(",
"False",
")",
"self",
".",
"blockSignals",
"(",
"True",
")",
"op",
"=",
"self",
".",
"uiOperatorDDL",
".",
"currentText",
"(",
")",
"self",
".",
"setEditor",
"(",
"plugin",
".",
"createEditor",
"(",
"self",
",",
"column",
",",
"op",
",",
"value",
")",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"True",
")",
"self",
".",
"blockSignals",
"(",
"False",
")"
] | Assigns the editor for this entry based on the plugin. | [
"Assigns",
"the",
"editor",
"for",
"this",
"entry",
"based",
"on",
"the",
"plugin",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py#L107-L124 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py | XOrbQueryEntryWidget.refreshButtons | def refreshButtons(self):
"""
Refreshes the buttons for building this sql query.
"""
last = self._last
first = self._first
joiner = self._containerWidget.currentJoiner()
# the first button set can contain the toggle options
if first:
self.uiJoinSBTN.setActionTexts(['AND', 'OR'])
elif joiner == QueryCompound.Op.And:
self.uiJoinSBTN.setActionTexts(['AND'])
else:
self.uiJoinSBTN.setActionTexts(['OR'])
# the last option should not show an active action
if last:
self.uiJoinSBTN.setCurrentAction(None)
# otherwise, highlight the proper action
else:
act = self.uiJoinSBTN.findAction(QueryCompound.Op[joiner].upper())
self.uiJoinSBTN.setCurrentAction(act)
enable = QueryCompound.typecheck(self._query) or self.isChecked()
self.uiEnterBTN.setEnabled(enable) | python | def refreshButtons(self):
"""
Refreshes the buttons for building this sql query.
"""
last = self._last
first = self._first
joiner = self._containerWidget.currentJoiner()
# the first button set can contain the toggle options
if first:
self.uiJoinSBTN.setActionTexts(['AND', 'OR'])
elif joiner == QueryCompound.Op.And:
self.uiJoinSBTN.setActionTexts(['AND'])
else:
self.uiJoinSBTN.setActionTexts(['OR'])
# the last option should not show an active action
if last:
self.uiJoinSBTN.setCurrentAction(None)
# otherwise, highlight the proper action
else:
act = self.uiJoinSBTN.findAction(QueryCompound.Op[joiner].upper())
self.uiJoinSBTN.setCurrentAction(act)
enable = QueryCompound.typecheck(self._query) or self.isChecked()
self.uiEnterBTN.setEnabled(enable) | [
"def",
"refreshButtons",
"(",
"self",
")",
":",
"last",
"=",
"self",
".",
"_last",
"first",
"=",
"self",
".",
"_first",
"joiner",
"=",
"self",
".",
"_containerWidget",
".",
"currentJoiner",
"(",
")",
"# the first button set can contain the toggle options\r",
"if",
"first",
":",
"self",
".",
"uiJoinSBTN",
".",
"setActionTexts",
"(",
"[",
"'AND'",
",",
"'OR'",
"]",
")",
"elif",
"joiner",
"==",
"QueryCompound",
".",
"Op",
".",
"And",
":",
"self",
".",
"uiJoinSBTN",
".",
"setActionTexts",
"(",
"[",
"'AND'",
"]",
")",
"else",
":",
"self",
".",
"uiJoinSBTN",
".",
"setActionTexts",
"(",
"[",
"'OR'",
"]",
")",
"# the last option should not show an active action\r",
"if",
"last",
":",
"self",
".",
"uiJoinSBTN",
".",
"setCurrentAction",
"(",
"None",
")",
"# otherwise, highlight the proper action\r",
"else",
":",
"act",
"=",
"self",
".",
"uiJoinSBTN",
".",
"findAction",
"(",
"QueryCompound",
".",
"Op",
"[",
"joiner",
"]",
".",
"upper",
"(",
")",
")",
"self",
".",
"uiJoinSBTN",
".",
"setCurrentAction",
"(",
"act",
")",
"enable",
"=",
"QueryCompound",
".",
"typecheck",
"(",
"self",
".",
"_query",
")",
"or",
"self",
".",
"isChecked",
"(",
")",
"self",
".",
"uiEnterBTN",
".",
"setEnabled",
"(",
"enable",
")"
] | Refreshes the buttons for building this sql query. | [
"Refreshes",
"the",
"buttons",
"for",
"building",
"this",
"sql",
"query",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py#L223-L250 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py | XOrbQueryEntryWidget.updateJoin | def updateJoin(self):
"""
Updates the joining method used by the system.
"""
text = self.uiJoinSBTN.currentAction().text()
if text == 'AND':
joiner = QueryCompound.Op.And
else:
joiner = QueryCompound.Op.Or
self._containerWidget.setCurrentJoiner(self.joiner()) | python | def updateJoin(self):
"""
Updates the joining method used by the system.
"""
text = self.uiJoinSBTN.currentAction().text()
if text == 'AND':
joiner = QueryCompound.Op.And
else:
joiner = QueryCompound.Op.Or
self._containerWidget.setCurrentJoiner(self.joiner()) | [
"def",
"updateJoin",
"(",
"self",
")",
":",
"text",
"=",
"self",
".",
"uiJoinSBTN",
".",
"currentAction",
"(",
")",
".",
"text",
"(",
")",
"if",
"text",
"==",
"'AND'",
":",
"joiner",
"=",
"QueryCompound",
".",
"Op",
".",
"And",
"else",
":",
"joiner",
"=",
"QueryCompound",
".",
"Op",
".",
"Or",
"self",
".",
"_containerWidget",
".",
"setCurrentJoiner",
"(",
"self",
".",
"joiner",
"(",
")",
")"
] | Updates the joining method used by the system. | [
"Updates",
"the",
"joining",
"method",
"used",
"by",
"the",
"system",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbqueryentrywidget.py#L350-L360 | train |
lingpy/sinopy | src/sinopy/sinopy.py | is_chinese | def is_chinese(name):
"""
Check if a symbol is a Chinese character.
Note
----
Taken from http://stackoverflow.com/questions/16441633/python-2-7-test-if-characters-in-a-string-are-all-chinese-characters
"""
if not name:
return False
for ch in name:
ordch = ord(ch)
if not (0x3400 <= ordch <= 0x9fff) and not (0x20000 <= ordch <= 0x2ceaf) \
and not (0xf900 <= ordch <= ordch) and not (0x2f800 <= ordch <= 0x2fa1f):
return False
return True | python | def is_chinese(name):
"""
Check if a symbol is a Chinese character.
Note
----
Taken from http://stackoverflow.com/questions/16441633/python-2-7-test-if-characters-in-a-string-are-all-chinese-characters
"""
if not name:
return False
for ch in name:
ordch = ord(ch)
if not (0x3400 <= ordch <= 0x9fff) and not (0x20000 <= ordch <= 0x2ceaf) \
and not (0xf900 <= ordch <= ordch) and not (0x2f800 <= ordch <= 0x2fa1f):
return False
return True | [
"def",
"is_chinese",
"(",
"name",
")",
":",
"if",
"not",
"name",
":",
"return",
"False",
"for",
"ch",
"in",
"name",
":",
"ordch",
"=",
"ord",
"(",
"ch",
")",
"if",
"not",
"(",
"0x3400",
"<=",
"ordch",
"<=",
"0x9fff",
")",
"and",
"not",
"(",
"0x20000",
"<=",
"ordch",
"<=",
"0x2ceaf",
")",
"and",
"not",
"(",
"0xf900",
"<=",
"ordch",
"<=",
"ordch",
")",
"and",
"not",
"(",
"0x2f800",
"<=",
"ordch",
"<=",
"0x2fa1f",
")",
":",
"return",
"False",
"return",
"True"
] | Check if a symbol is a Chinese character.
Note
----
Taken from http://stackoverflow.com/questions/16441633/python-2-7-test-if-characters-in-a-string-are-all-chinese-characters | [
"Check",
"if",
"a",
"symbol",
"is",
"a",
"Chinese",
"character",
"."
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L10-L26 | train |
lingpy/sinopy | src/sinopy/sinopy.py | pinyin | def pinyin(char, variant='mandarin', sep=' ', out='tones'):
"""
Retrieve Pinyin of a character.
"""
if len(char) > 1:
return sep.join([pinyin(c, variant=variant, sep=sep, out=out) for c in char])
if not is_chinese(char):
return char
if char in _cd.GBK:
char = gbk2big5(char)
out_char = _cd.UNIHAN.get(char, {variant: '?({0}'.format(char)}).get(variant, '!({0})'.format(char))
if out != 'tones':
out_char = ''.join([tone_converter.get(x, x) for x in out_char])
return out_char | python | def pinyin(char, variant='mandarin', sep=' ', out='tones'):
"""
Retrieve Pinyin of a character.
"""
if len(char) > 1:
return sep.join([pinyin(c, variant=variant, sep=sep, out=out) for c in char])
if not is_chinese(char):
return char
if char in _cd.GBK:
char = gbk2big5(char)
out_char = _cd.UNIHAN.get(char, {variant: '?({0}'.format(char)}).get(variant, '!({0})'.format(char))
if out != 'tones':
out_char = ''.join([tone_converter.get(x, x) for x in out_char])
return out_char | [
"def",
"pinyin",
"(",
"char",
",",
"variant",
"=",
"'mandarin'",
",",
"sep",
"=",
"' '",
",",
"out",
"=",
"'tones'",
")",
":",
"if",
"len",
"(",
"char",
")",
">",
"1",
":",
"return",
"sep",
".",
"join",
"(",
"[",
"pinyin",
"(",
"c",
",",
"variant",
"=",
"variant",
",",
"sep",
"=",
"sep",
",",
"out",
"=",
"out",
")",
"for",
"c",
"in",
"char",
"]",
")",
"if",
"not",
"is_chinese",
"(",
"char",
")",
":",
"return",
"char",
"if",
"char",
"in",
"_cd",
".",
"GBK",
":",
"char",
"=",
"gbk2big5",
"(",
"char",
")",
"out_char",
"=",
"_cd",
".",
"UNIHAN",
".",
"get",
"(",
"char",
",",
"{",
"variant",
":",
"'?({0}'",
".",
"format",
"(",
"char",
")",
"}",
")",
".",
"get",
"(",
"variant",
",",
"'!({0})'",
".",
"format",
"(",
"char",
")",
")",
"if",
"out",
"!=",
"'tones'",
":",
"out_char",
"=",
"''",
".",
"join",
"(",
"[",
"tone_converter",
".",
"get",
"(",
"x",
",",
"x",
")",
"for",
"x",
"in",
"out_char",
"]",
")",
"return",
"out_char"
] | Retrieve Pinyin of a character. | [
"Retrieve",
"Pinyin",
"of",
"a",
"character",
"."
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L29-L47 | train |
lingpy/sinopy | src/sinopy/sinopy.py | parse_baxter | def parse_baxter(reading):
"""
Parse a Baxter string and render it with all its contents, namely
initial, medial, final, and tone.
"""
initial = ''
medial = ''
final = ''
tone = ''
# determine environments
inienv = True
medienv = False
finenv = False
tonenv = False
inichars = "pbmrtdnkgnsyhzl'x"
chars = list(reading)
for char in chars:
# switch environments
if char in 'jw' and not finenv:
inienv,medienv,finenv,tonenv = False,True,False,False
elif char not in inichars or finenv:
if char in 'XH':
inienv,medienv,finenv,tonenv = False,False,False,True
else:
inienv,medienv,finenv,tonenv = False,False,True,False
# fill in slots
if inienv:
initial += char
if medienv:
medial += char
if finenv:
final += char
if tonenv:
tone += char
# post-parse tone
if not tone and final[-1] in 'ptk':
tone = 'R'
elif not tone:
tone = 'P'
# post-parse medial
if 'j' not in medial and 'y' in initial:
medial += 'j'
# post-parse labial
if final[0] in 'u' and 'w' not in medial:
medial = 'w' + medial
return initial,medial,final,tone | python | def parse_baxter(reading):
"""
Parse a Baxter string and render it with all its contents, namely
initial, medial, final, and tone.
"""
initial = ''
medial = ''
final = ''
tone = ''
# determine environments
inienv = True
medienv = False
finenv = False
tonenv = False
inichars = "pbmrtdnkgnsyhzl'x"
chars = list(reading)
for char in chars:
# switch environments
if char in 'jw' and not finenv:
inienv,medienv,finenv,tonenv = False,True,False,False
elif char not in inichars or finenv:
if char in 'XH':
inienv,medienv,finenv,tonenv = False,False,False,True
else:
inienv,medienv,finenv,tonenv = False,False,True,False
# fill in slots
if inienv:
initial += char
if medienv:
medial += char
if finenv:
final += char
if tonenv:
tone += char
# post-parse tone
if not tone and final[-1] in 'ptk':
tone = 'R'
elif not tone:
tone = 'P'
# post-parse medial
if 'j' not in medial and 'y' in initial:
medial += 'j'
# post-parse labial
if final[0] in 'u' and 'w' not in medial:
medial = 'w' + medial
return initial,medial,final,tone | [
"def",
"parse_baxter",
"(",
"reading",
")",
":",
"initial",
"=",
"''",
"medial",
"=",
"''",
"final",
"=",
"''",
"tone",
"=",
"''",
"# determine environments",
"inienv",
"=",
"True",
"medienv",
"=",
"False",
"finenv",
"=",
"False",
"tonenv",
"=",
"False",
"inichars",
"=",
"\"pbmrtdnkgnsyhzl'x\"",
"chars",
"=",
"list",
"(",
"reading",
")",
"for",
"char",
"in",
"chars",
":",
"# switch environments",
"if",
"char",
"in",
"'jw'",
"and",
"not",
"finenv",
":",
"inienv",
",",
"medienv",
",",
"finenv",
",",
"tonenv",
"=",
"False",
",",
"True",
",",
"False",
",",
"False",
"elif",
"char",
"not",
"in",
"inichars",
"or",
"finenv",
":",
"if",
"char",
"in",
"'XH'",
":",
"inienv",
",",
"medienv",
",",
"finenv",
",",
"tonenv",
"=",
"False",
",",
"False",
",",
"False",
",",
"True",
"else",
":",
"inienv",
",",
"medienv",
",",
"finenv",
",",
"tonenv",
"=",
"False",
",",
"False",
",",
"True",
",",
"False",
"# fill in slots",
"if",
"inienv",
":",
"initial",
"+=",
"char",
"if",
"medienv",
":",
"medial",
"+=",
"char",
"if",
"finenv",
":",
"final",
"+=",
"char",
"if",
"tonenv",
":",
"tone",
"+=",
"char",
"# post-parse tone",
"if",
"not",
"tone",
"and",
"final",
"[",
"-",
"1",
"]",
"in",
"'ptk'",
":",
"tone",
"=",
"'R'",
"elif",
"not",
"tone",
":",
"tone",
"=",
"'P'",
"# post-parse medial",
"if",
"'j'",
"not",
"in",
"medial",
"and",
"'y'",
"in",
"initial",
":",
"medial",
"+=",
"'j'",
"# post-parse labial",
"if",
"final",
"[",
"0",
"]",
"in",
"'u'",
"and",
"'w'",
"not",
"in",
"medial",
":",
"medial",
"=",
"'w'",
"+",
"medial",
"return",
"initial",
",",
"medial",
",",
"final",
",",
"tone"
] | Parse a Baxter string and render it with all its contents, namely
initial, medial, final, and tone. | [
"Parse",
"a",
"Baxter",
"string",
"and",
"render",
"it",
"with",
"all",
"its",
"contents",
"namely",
"initial",
"medial",
"final",
"and",
"tone",
"."
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L69-L128 | train |
lingpy/sinopy | src/sinopy/sinopy.py | chars2gloss | def chars2gloss(chars):
"""
Get the TLS basic gloss for a characters.
"""
out = []
chars = gbk2big5(chars)
for char in chars:
tmp = []
if char in _cd.TLS:
for entry in _cd.TLS[char]:
baxter = _cd.TLS[char][entry]['UNIHAN_GLOSS']
if baxter != '?':
tmp += [baxter]
out += [','.join(tmp)]
return out | python | def chars2gloss(chars):
"""
Get the TLS basic gloss for a characters.
"""
out = []
chars = gbk2big5(chars)
for char in chars:
tmp = []
if char in _cd.TLS:
for entry in _cd.TLS[char]:
baxter = _cd.TLS[char][entry]['UNIHAN_GLOSS']
if baxter != '?':
tmp += [baxter]
out += [','.join(tmp)]
return out | [
"def",
"chars2gloss",
"(",
"chars",
")",
":",
"out",
"=",
"[",
"]",
"chars",
"=",
"gbk2big5",
"(",
"chars",
")",
"for",
"char",
"in",
"chars",
":",
"tmp",
"=",
"[",
"]",
"if",
"char",
"in",
"_cd",
".",
"TLS",
":",
"for",
"entry",
"in",
"_cd",
".",
"TLS",
"[",
"char",
"]",
":",
"baxter",
"=",
"_cd",
".",
"TLS",
"[",
"char",
"]",
"[",
"entry",
"]",
"[",
"'UNIHAN_GLOSS'",
"]",
"if",
"baxter",
"!=",
"'?'",
":",
"tmp",
"+=",
"[",
"baxter",
"]",
"out",
"+=",
"[",
"','",
".",
"join",
"(",
"tmp",
")",
"]",
"return",
"out"
] | Get the TLS basic gloss for a characters. | [
"Get",
"the",
"TLS",
"basic",
"gloss",
"for",
"a",
"characters",
"."
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L150-L164 | train |
lingpy/sinopy | src/sinopy/sinopy.py | baxter2ipa | def baxter2ipa(mch, segmented=False):
"""
Very simple aber convient-enough conversion from baxter MCH to IPA MCH.
this is also more or less already implemented in MiddleChinese
"""
out = mch
if out[-1] in 'ptk':
out += 'R'
elif out[-1] not in 'XHP':
out += 'P'
for s,t in _cd.GY['ipa']:
out = out.replace(s,t)
if segmented:
return parse_chinese_morphemes(out)
return out | python | def baxter2ipa(mch, segmented=False):
"""
Very simple aber convient-enough conversion from baxter MCH to IPA MCH.
this is also more or less already implemented in MiddleChinese
"""
out = mch
if out[-1] in 'ptk':
out += 'R'
elif out[-1] not in 'XHP':
out += 'P'
for s,t in _cd.GY['ipa']:
out = out.replace(s,t)
if segmented:
return parse_chinese_morphemes(out)
return out | [
"def",
"baxter2ipa",
"(",
"mch",
",",
"segmented",
"=",
"False",
")",
":",
"out",
"=",
"mch",
"if",
"out",
"[",
"-",
"1",
"]",
"in",
"'ptk'",
":",
"out",
"+=",
"'R'",
"elif",
"out",
"[",
"-",
"1",
"]",
"not",
"in",
"'XHP'",
":",
"out",
"+=",
"'P'",
"for",
"s",
",",
"t",
"in",
"_cd",
".",
"GY",
"[",
"'ipa'",
"]",
":",
"out",
"=",
"out",
".",
"replace",
"(",
"s",
",",
"t",
")",
"if",
"segmented",
":",
"return",
"parse_chinese_morphemes",
"(",
"out",
")",
"return",
"out"
] | Very simple aber convient-enough conversion from baxter MCH to IPA MCH.
this is also more or less already implemented in MiddleChinese | [
"Very",
"simple",
"aber",
"convient",
"-",
"enough",
"conversion",
"from",
"baxter",
"MCH",
"to",
"IPA",
"MCH",
".",
"this",
"is",
"also",
"more",
"or",
"less",
"already",
"implemented",
"in",
"MiddleChinese"
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L329-L345 | train |
lingpy/sinopy | src/sinopy/sinopy.py | gbk2big5 | def gbk2big5(chars):
"""
Convert from gbk format to big5 representation of chars.
"""
out = ''
for char in chars:
if char in _cd.GBK:
out += _cd.BIG5[_cd.GBK.index(char)]
else:
out += char
return out | python | def gbk2big5(chars):
"""
Convert from gbk format to big5 representation of chars.
"""
out = ''
for char in chars:
if char in _cd.GBK:
out += _cd.BIG5[_cd.GBK.index(char)]
else:
out += char
return out | [
"def",
"gbk2big5",
"(",
"chars",
")",
":",
"out",
"=",
"''",
"for",
"char",
"in",
"chars",
":",
"if",
"char",
"in",
"_cd",
".",
"GBK",
":",
"out",
"+=",
"_cd",
".",
"BIG5",
"[",
"_cd",
".",
"GBK",
".",
"index",
"(",
"char",
")",
"]",
"else",
":",
"out",
"+=",
"char",
"return",
"out"
] | Convert from gbk format to big5 representation of chars. | [
"Convert",
"from",
"gbk",
"format",
"to",
"big5",
"representation",
"of",
"chars",
"."
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L348-L358 | train |
lingpy/sinopy | src/sinopy/sinopy.py | big52gbk | def big52gbk(chars):
"""
Convert from long chars to short chars.
"""
out = ''
for char in chars:
if char in _cd.BIG5:
out += _cd.GBK[_cd.BIG5.index(char)]
else:
out += char
return out | python | def big52gbk(chars):
"""
Convert from long chars to short chars.
"""
out = ''
for char in chars:
if char in _cd.BIG5:
out += _cd.GBK[_cd.BIG5.index(char)]
else:
out += char
return out | [
"def",
"big52gbk",
"(",
"chars",
")",
":",
"out",
"=",
"''",
"for",
"char",
"in",
"chars",
":",
"if",
"char",
"in",
"_cd",
".",
"BIG5",
":",
"out",
"+=",
"_cd",
".",
"GBK",
"[",
"_cd",
".",
"BIG5",
".",
"index",
"(",
"char",
")",
"]",
"else",
":",
"out",
"+=",
"char",
"return",
"out"
] | Convert from long chars to short chars. | [
"Convert",
"from",
"long",
"chars",
"to",
"short",
"chars",
"."
] | 59a47fcdfae3e0000ac6d2b3d7919bf875ec2056 | https://github.com/lingpy/sinopy/blob/59a47fcdfae3e0000ac6d2b3d7919bf875ec2056/src/sinopy/sinopy.py#L361-L371 | train |
cts2/pyjxslt | pyjxslt-python/src/pyjxslt/XSLTGateway.py | Gateway.add_transform | def add_transform(self, key, xslt):
""" Add or update a transform.
@param key: Transform key to use when executing transformations
@param xslt: Text or file name of an xslt transform
"""
self._remove_converter(key)
self._xsltLibrary[key] = xslt
self._add_converter(key) | python | def add_transform(self, key, xslt):
""" Add or update a transform.
@param key: Transform key to use when executing transformations
@param xslt: Text or file name of an xslt transform
"""
self._remove_converter(key)
self._xsltLibrary[key] = xslt
self._add_converter(key) | [
"def",
"add_transform",
"(",
"self",
",",
"key",
",",
"xslt",
")",
":",
"self",
".",
"_remove_converter",
"(",
"key",
")",
"self",
".",
"_xsltLibrary",
"[",
"key",
"]",
"=",
"xslt",
"self",
".",
"_add_converter",
"(",
"key",
")"
] | Add or update a transform.
@param key: Transform key to use when executing transformations
@param xslt: Text or file name of an xslt transform | [
"Add",
"or",
"update",
"a",
"transform",
"."
] | 66cd9233186cf5000d32e3a5b572e0002a8361c4 | https://github.com/cts2/pyjxslt/blob/66cd9233186cf5000d32e3a5b572e0002a8361c4/pyjxslt-python/src/pyjxslt/XSLTGateway.py#L92-L101 | train |
cts2/pyjxslt | pyjxslt-python/src/pyjxslt/XSLTGateway.py | Gateway._refresh_converters | def _refresh_converters(self):
""" Refresh all of the converters in the py4j library
@return: True if all converters were succesfully updated
"""
self._converters.clear()
return reduce(lambda a, b: a and b, [self._add_converter(k) for k in list(self._xsltLibrary.keys())], True) | python | def _refresh_converters(self):
""" Refresh all of the converters in the py4j library
@return: True if all converters were succesfully updated
"""
self._converters.clear()
return reduce(lambda a, b: a and b, [self._add_converter(k) for k in list(self._xsltLibrary.keys())], True) | [
"def",
"_refresh_converters",
"(",
"self",
")",
":",
"self",
".",
"_converters",
".",
"clear",
"(",
")",
"return",
"reduce",
"(",
"lambda",
"a",
",",
"b",
":",
"a",
"and",
"b",
",",
"[",
"self",
".",
"_add_converter",
"(",
"k",
")",
"for",
"k",
"in",
"list",
"(",
"self",
".",
"_xsltLibrary",
".",
"keys",
"(",
")",
")",
"]",
",",
"True",
")"
] | Refresh all of the converters in the py4j library
@return: True if all converters were succesfully updated | [
"Refresh",
"all",
"of",
"the",
"converters",
"in",
"the",
"py4j",
"library"
] | 66cd9233186cf5000d32e3a5b572e0002a8361c4 | https://github.com/cts2/pyjxslt/blob/66cd9233186cf5000d32e3a5b572e0002a8361c4/pyjxslt-python/src/pyjxslt/XSLTGateway.py#L110-L115 | train |
cts2/pyjxslt | pyjxslt-python/src/pyjxslt/XSLTGateway.py | Gateway.transform | def transform(self, key, xml, **kwargs):
"""
Transform the supplied XML using the transform identified by key
@param key: name of the transform to apply
@param xml: XML to transform
@param kwargs: XSLT parameters
@return: Transform output or None if transform failed
"""
if key in self._xsltLibrary and self.gateway_connected() and key in self._converters:
return self._converters[key].transform(xml, self._parms(**kwargs))
return None | python | def transform(self, key, xml, **kwargs):
"""
Transform the supplied XML using the transform identified by key
@param key: name of the transform to apply
@param xml: XML to transform
@param kwargs: XSLT parameters
@return: Transform output or None if transform failed
"""
if key in self._xsltLibrary and self.gateway_connected() and key in self._converters:
return self._converters[key].transform(xml, self._parms(**kwargs))
return None | [
"def",
"transform",
"(",
"self",
",",
"key",
",",
"xml",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"key",
"in",
"self",
".",
"_xsltLibrary",
"and",
"self",
".",
"gateway_connected",
"(",
")",
"and",
"key",
"in",
"self",
".",
"_converters",
":",
"return",
"self",
".",
"_converters",
"[",
"key",
"]",
".",
"transform",
"(",
"xml",
",",
"self",
".",
"_parms",
"(",
"*",
"*",
"kwargs",
")",
")",
"return",
"None"
] | Transform the supplied XML using the transform identified by key
@param key: name of the transform to apply
@param xml: XML to transform
@param kwargs: XSLT parameters
@return: Transform output or None if transform failed | [
"Transform",
"the",
"supplied",
"XML",
"using",
"the",
"transform",
"identified",
"by",
"key"
] | 66cd9233186cf5000d32e3a5b572e0002a8361c4 | https://github.com/cts2/pyjxslt/blob/66cd9233186cf5000d32e3a5b572e0002a8361c4/pyjxslt-python/src/pyjxslt/XSLTGateway.py#L139-L149 | train |
johnnoone/aioconsul | aioconsul/client/query_endpoint.py | QueryEndpoint.items | async def items(self, *, dc=None, watch=None, consistency=None):
"""Provides a listing of all prepared queries
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
Collection: List of prepared queries
This returns a list of prepared queries, which looks like::
[
{
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "my-query",
"Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"Token": "<hidden>",
"Service": {
"Service": "redis",
"Failover": {
"NearestN": 3,
"Datacenters": ["dc1", "dc2"]
},
"OnlyPassing": False,
"Tags": ["master", "!experimental"]
},
"DNS": {
"TTL": timedelta(seconds=10)
},
"RaftIndex": {
"CreateIndex": 23,
"ModifyIndex": 42
}
}
]
"""
response = await self._api.get("/v1/query", params={"dc": dc})
return response.body | python | async def items(self, *, dc=None, watch=None, consistency=None):
"""Provides a listing of all prepared queries
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
Collection: List of prepared queries
This returns a list of prepared queries, which looks like::
[
{
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "my-query",
"Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"Token": "<hidden>",
"Service": {
"Service": "redis",
"Failover": {
"NearestN": 3,
"Datacenters": ["dc1", "dc2"]
},
"OnlyPassing": False,
"Tags": ["master", "!experimental"]
},
"DNS": {
"TTL": timedelta(seconds=10)
},
"RaftIndex": {
"CreateIndex": 23,
"ModifyIndex": 42
}
}
]
"""
response = await self._api.get("/v1/query", params={"dc": dc})
return response.body | [
"async",
"def",
"items",
"(",
"self",
",",
"*",
",",
"dc",
"=",
"None",
",",
"watch",
"=",
"None",
",",
"consistency",
"=",
"None",
")",
":",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/query\"",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
")",
"return",
"response",
".",
"body"
] | Provides a listing of all prepared queries
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
Collection: List of prepared queries
This returns a list of prepared queries, which looks like::
[
{
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05",
"Name": "my-query",
"Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"Token": "<hidden>",
"Service": {
"Service": "redis",
"Failover": {
"NearestN": 3,
"Datacenters": ["dc1", "dc2"]
},
"OnlyPassing": False,
"Tags": ["master", "!experimental"]
},
"DNS": {
"TTL": timedelta(seconds=10)
},
"RaftIndex": {
"CreateIndex": 23,
"ModifyIndex": 42
}
}
] | [
"Provides",
"a",
"listing",
"of",
"all",
"prepared",
"queries"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/query_endpoint.py#L15-L54 | train |
johnnoone/aioconsul | aioconsul/client/query_endpoint.py | QueryEndpoint.create | async def create(self, query, *, dc=None):
"""Creates a new prepared query
Parameters:
Query (Object): Query definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: New query ID
The create operation expects a body that defines the prepared query,
like this example::
{
"Name": "my-query",
"Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"Token": "",
"Near": "node1",
"Service": {
"Service": "redis",
"Failover": {
"NearestN": 3,
"Datacenters": ["dc1", "dc2"]
},
"OnlyPassing": False,
"Tags": ["master", "!experimental"]
},
"DNS": {
"TTL": timedelta(seconds=10)
}
}
Only the **Service** field inside the **Service** structure is
mandatory, all other fields will take their default values if they
are not included.
**Name** is an optional friendly name that can be used to execute a
query instead of using its ID.
**Session** provides a way to automatically remove a prepared query
when the given session is invalidated. This is optional, and if not
given the prepared query must be manually removed when no longer
needed.
**Token**, if specified, is a captured ACL Token that is reused as the
ACL Token every time the query is executed. This allows queries to be
executed by clients with lesser or even no ACL Token, so this should
be used with care. The token itself can only be seen by clients with a
management token. If the **Token** field is left blank or omitted, the
client's ACL Token will be used to determine if they have access to the
service being queried. If the client does not supply an ACL Token, the
anonymous token will be used.
**Near** allows specifying a particular node to sort near based on
distance sorting using Network Coordinates. The nearest instance to
the specified node will be returned first, and subsequent nodes in the
response will be sorted in ascending order of estimated round-trip
times. If the node given does not exist, the nodes in the response
will be shuffled. Using the magic **_agent** value is supported, and
will automatically return results nearest the agent servicing the
request. If unspecified, the response will be shuffled by default.
The set of fields inside the **Service** structure define the
query's behavior.
**Service** is the name of the service to query. This is required.
**Failover** contains two fields, both of which are optional, and
determine what happens if no healthy nodes are available in the local
datacenter when the query is executed. It allows the use of nodes in
other datacenters with very little configuration.
If **NearestN** is set to a value greater than zero, then the query
will be forwarded to up to **NearestN** other datacenters based on
their estimated network round trip time using Network Coordinates from
the WAN gossip pool. The median round trip time from the server
handling the query to the servers in the remote datacenter is used to
determine the priority. The default value is zero. All Consul servers
must be running version 0.6.0 or above in order for this feature to
work correctly. If any servers are not running the required version of
Consul they will be considered last since they won't have any
available network coordinate information.
**Datacenters** contains a fixed list of remote datacenters to forward
the query to if there are no healthy nodes in the local datacenter.
Datacenters are queried in the order given in the list. If this option
is combined with **NearestN**, then the **NearestN** queries will be
performed first, followed by the list given by **Datacenters**. A
given datacenter will only be queried one time during a failover, even
if it is selected by both **NearestN** and is listed in
**Datacenters**. The default value is an empty list.
**OnlyPassing** controls the behavior of the query's health check
filtering. If this is set to false, the results will include nodes
with checks in the passing as well as the warning states. If this is
set to true, only nodes with checks in the passing state will be
returned. The default value is False.
**Tags** provides a list of service tags to filter the query results.
For a service to pass the tag filter it must have all of the required
tags, and none of the excluded tags (prefixed with ``!``).
The default value is an empty list, which does no tag filtering.
**TTL** in the **DNS** structure is a duration string that can use "s"
as a suffix for seconds. It controls how the TTL is set when query
results are served over DNS. If this isn't specified, then the Consul
agent configuration for the given service will be used
(see DNS Caching). If this is specified, it will take precedence over
any Consul agent-specific configuration. If no TTL is specified here
or at the Consul agent level, then the TTL will default to 0.
It returns the ID of the created query::
{
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05"
}
"""
if "Token" in query:
# in case of a full token object...
query["Token"] = extract_attr(query["Token"], keys=["ID"])
response = await self._api.post("/v1/query",
params={"dc": dc}, data=query)
return response.body | python | async def create(self, query, *, dc=None):
"""Creates a new prepared query
Parameters:
Query (Object): Query definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: New query ID
The create operation expects a body that defines the prepared query,
like this example::
{
"Name": "my-query",
"Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"Token": "",
"Near": "node1",
"Service": {
"Service": "redis",
"Failover": {
"NearestN": 3,
"Datacenters": ["dc1", "dc2"]
},
"OnlyPassing": False,
"Tags": ["master", "!experimental"]
},
"DNS": {
"TTL": timedelta(seconds=10)
}
}
Only the **Service** field inside the **Service** structure is
mandatory, all other fields will take their default values if they
are not included.
**Name** is an optional friendly name that can be used to execute a
query instead of using its ID.
**Session** provides a way to automatically remove a prepared query
when the given session is invalidated. This is optional, and if not
given the prepared query must be manually removed when no longer
needed.
**Token**, if specified, is a captured ACL Token that is reused as the
ACL Token every time the query is executed. This allows queries to be
executed by clients with lesser or even no ACL Token, so this should
be used with care. The token itself can only be seen by clients with a
management token. If the **Token** field is left blank or omitted, the
client's ACL Token will be used to determine if they have access to the
service being queried. If the client does not supply an ACL Token, the
anonymous token will be used.
**Near** allows specifying a particular node to sort near based on
distance sorting using Network Coordinates. The nearest instance to
the specified node will be returned first, and subsequent nodes in the
response will be sorted in ascending order of estimated round-trip
times. If the node given does not exist, the nodes in the response
will be shuffled. Using the magic **_agent** value is supported, and
will automatically return results nearest the agent servicing the
request. If unspecified, the response will be shuffled by default.
The set of fields inside the **Service** structure define the
query's behavior.
**Service** is the name of the service to query. This is required.
**Failover** contains two fields, both of which are optional, and
determine what happens if no healthy nodes are available in the local
datacenter when the query is executed. It allows the use of nodes in
other datacenters with very little configuration.
If **NearestN** is set to a value greater than zero, then the query
will be forwarded to up to **NearestN** other datacenters based on
their estimated network round trip time using Network Coordinates from
the WAN gossip pool. The median round trip time from the server
handling the query to the servers in the remote datacenter is used to
determine the priority. The default value is zero. All Consul servers
must be running version 0.6.0 or above in order for this feature to
work correctly. If any servers are not running the required version of
Consul they will be considered last since they won't have any
available network coordinate information.
**Datacenters** contains a fixed list of remote datacenters to forward
the query to if there are no healthy nodes in the local datacenter.
Datacenters are queried in the order given in the list. If this option
is combined with **NearestN**, then the **NearestN** queries will be
performed first, followed by the list given by **Datacenters**. A
given datacenter will only be queried one time during a failover, even
if it is selected by both **NearestN** and is listed in
**Datacenters**. The default value is an empty list.
**OnlyPassing** controls the behavior of the query's health check
filtering. If this is set to false, the results will include nodes
with checks in the passing as well as the warning states. If this is
set to true, only nodes with checks in the passing state will be
returned. The default value is False.
**Tags** provides a list of service tags to filter the query results.
For a service to pass the tag filter it must have all of the required
tags, and none of the excluded tags (prefixed with ``!``).
The default value is an empty list, which does no tag filtering.
**TTL** in the **DNS** structure is a duration string that can use "s"
as a suffix for seconds. It controls how the TTL is set when query
results are served over DNS. If this isn't specified, then the Consul
agent configuration for the given service will be used
(see DNS Caching). If this is specified, it will take precedence over
any Consul agent-specific configuration. If no TTL is specified here
or at the Consul agent level, then the TTL will default to 0.
It returns the ID of the created query::
{
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05"
}
"""
if "Token" in query:
# in case of a full token object...
query["Token"] = extract_attr(query["Token"], keys=["ID"])
response = await self._api.post("/v1/query",
params={"dc": dc}, data=query)
return response.body | [
"async",
"def",
"create",
"(",
"self",
",",
"query",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"if",
"\"Token\"",
"in",
"query",
":",
"# in case of a full token object...",
"query",
"[",
"\"Token\"",
"]",
"=",
"extract_attr",
"(",
"query",
"[",
"\"Token\"",
"]",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"post",
"(",
"\"/v1/query\"",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
",",
"data",
"=",
"query",
")",
"return",
"response",
".",
"body"
] | Creates a new prepared query
Parameters:
Query (Object): Query definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: New query ID
The create operation expects a body that defines the prepared query,
like this example::
{
"Name": "my-query",
"Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"Token": "",
"Near": "node1",
"Service": {
"Service": "redis",
"Failover": {
"NearestN": 3,
"Datacenters": ["dc1", "dc2"]
},
"OnlyPassing": False,
"Tags": ["master", "!experimental"]
},
"DNS": {
"TTL": timedelta(seconds=10)
}
}
Only the **Service** field inside the **Service** structure is
mandatory, all other fields will take their default values if they
are not included.
**Name** is an optional friendly name that can be used to execute a
query instead of using its ID.
**Session** provides a way to automatically remove a prepared query
when the given session is invalidated. This is optional, and if not
given the prepared query must be manually removed when no longer
needed.
**Token**, if specified, is a captured ACL Token that is reused as the
ACL Token every time the query is executed. This allows queries to be
executed by clients with lesser or even no ACL Token, so this should
be used with care. The token itself can only be seen by clients with a
management token. If the **Token** field is left blank or omitted, the
client's ACL Token will be used to determine if they have access to the
service being queried. If the client does not supply an ACL Token, the
anonymous token will be used.
**Near** allows specifying a particular node to sort near based on
distance sorting using Network Coordinates. The nearest instance to
the specified node will be returned first, and subsequent nodes in the
response will be sorted in ascending order of estimated round-trip
times. If the node given does not exist, the nodes in the response
will be shuffled. Using the magic **_agent** value is supported, and
will automatically return results nearest the agent servicing the
request. If unspecified, the response will be shuffled by default.
The set of fields inside the **Service** structure define the
query's behavior.
**Service** is the name of the service to query. This is required.
**Failover** contains two fields, both of which are optional, and
determine what happens if no healthy nodes are available in the local
datacenter when the query is executed. It allows the use of nodes in
other datacenters with very little configuration.
If **NearestN** is set to a value greater than zero, then the query
will be forwarded to up to **NearestN** other datacenters based on
their estimated network round trip time using Network Coordinates from
the WAN gossip pool. The median round trip time from the server
handling the query to the servers in the remote datacenter is used to
determine the priority. The default value is zero. All Consul servers
must be running version 0.6.0 or above in order for this feature to
work correctly. If any servers are not running the required version of
Consul they will be considered last since they won't have any
available network coordinate information.
**Datacenters** contains a fixed list of remote datacenters to forward
the query to if there are no healthy nodes in the local datacenter.
Datacenters are queried in the order given in the list. If this option
is combined with **NearestN**, then the **NearestN** queries will be
performed first, followed by the list given by **Datacenters**. A
given datacenter will only be queried one time during a failover, even
if it is selected by both **NearestN** and is listed in
**Datacenters**. The default value is an empty list.
**OnlyPassing** controls the behavior of the query's health check
filtering. If this is set to false, the results will include nodes
with checks in the passing as well as the warning states. If this is
set to true, only nodes with checks in the passing state will be
returned. The default value is False.
**Tags** provides a list of service tags to filter the query results.
For a service to pass the tag filter it must have all of the required
tags, and none of the excluded tags (prefixed with ``!``).
The default value is an empty list, which does no tag filtering.
**TTL** in the **DNS** structure is a duration string that can use "s"
as a suffix for seconds. It controls how the TTL is set when query
results are served over DNS. If this isn't specified, then the Consul
agent configuration for the given service will be used
(see DNS Caching). If this is specified, it will take precedence over
any Consul agent-specific configuration. If no TTL is specified here
or at the Consul agent level, then the TTL will default to 0.
It returns the ID of the created query::
{
"ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05"
} | [
"Creates",
"a",
"new",
"prepared",
"query"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/query_endpoint.py#L56-L180 | train |
johnnoone/aioconsul | aioconsul/client/query_endpoint.py | QueryEndpoint.update | async def update(self, query, *, dc=None):
"""Updates existing prepared query
Parameters:
Query (Object): Query definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success
"""
query_id = extract_attr(query, keys=["ID"])
response = await self._api.put("/v1/query", query_id,
params={"dc": dc}, data=query)
return response.status == 200 | python | async def update(self, query, *, dc=None):
"""Updates existing prepared query
Parameters:
Query (Object): Query definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success
"""
query_id = extract_attr(query, keys=["ID"])
response = await self._api.put("/v1/query", query_id,
params={"dc": dc}, data=query)
return response.status == 200 | [
"async",
"def",
"update",
"(",
"self",
",",
"query",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"query_id",
"=",
"extract_attr",
"(",
"query",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"put",
"(",
"\"/v1/query\"",
",",
"query_id",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
",",
"data",
"=",
"query",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Updates existing prepared query
Parameters:
Query (Object): Query definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success | [
"Updates",
"existing",
"prepared",
"query"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/query_endpoint.py#L203-L216 | train |
johnnoone/aioconsul | aioconsul/client/query_endpoint.py | QueryEndpoint.delete | async def delete(self, query, *, dc=None):
"""Delete existing prepared query
Parameters:
query (ObjectID): Query ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Results:
bool: ``True`` on success
"""
query_id = extract_attr(query, keys=["ID"])
response = await self._api.delete("/v1/query", query_id,
params={"dc": dc})
return response.status == 200 | python | async def delete(self, query, *, dc=None):
"""Delete existing prepared query
Parameters:
query (ObjectID): Query ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Results:
bool: ``True`` on success
"""
query_id = extract_attr(query, keys=["ID"])
response = await self._api.delete("/v1/query", query_id,
params={"dc": dc})
return response.status == 200 | [
"async",
"def",
"delete",
"(",
"self",
",",
"query",
",",
"*",
",",
"dc",
"=",
"None",
")",
":",
"query_id",
"=",
"extract_attr",
"(",
"query",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"delete",
"(",
"\"/v1/query\"",
",",
"query_id",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
"}",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | Delete existing prepared query
Parameters:
query (ObjectID): Query ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Results:
bool: ``True`` on success | [
"Delete",
"existing",
"prepared",
"query"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/query_endpoint.py#L218-L231 | train |
johnnoone/aioconsul | aioconsul/client/query_endpoint.py | QueryEndpoint.execute | async def execute(self, query, *,
dc=None, near=None, limit=None, consistency=None):
"""Executes a prepared query
Parameters:
query (ObjectID): Query ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
near (str): Sort the resulting list in ascending order based on
the estimated round trip time from that node
limit (int): Limit the list's size to the given number of nodes
consistency (Consistency): Force consistency
Returns:
Object:
Raises:
NotFound: the query does not exist
Returns a body like this::
{
"Service": "redis",
"Nodes": [
{
"Node": {
"Node": "foobar",
"Address": "10.1.10.12",
"TaggedAddresses": {
"lan": "10.1.10.12",
"wan": "10.1.10.12"
}
},
"Service": {
"ID": "redis",
"Service": "redis",
"Tags": None,
"Port": 8000
},
"Checks": [
{
"Node": "foobar",
"CheckID": "service:redis",
"Name": "Service 'redis' check",
"Status": "passing",
"Notes": "",
"Output": "",
"ServiceID": "redis",
"ServiceName": "redis"
},
{
"Node": "foobar",
"CheckID": "serfHealth",
"Name": "Serf Health Status",
"Status": "passing",
"Notes": "",
"Output": "",
"ServiceID": "",
"ServiceName": ""
}
],
"DNS": {
"TTL": timedelta(seconds=10)
},
"Datacenter": "dc3",
"Failovers": 2
}
]
}
The **Nodes** section contains the list of healthy nodes providing
the given service, as specified by the constraints of the prepared
query.
**Service** has the service name that the query was selecting. This is
useful for context in case an empty list of nodes is returned.
**DNS** has information used when serving the results over DNS. This
is just a copy of the structure given when the prepared query was
created.
**Datacenter** has the datacenter that ultimately provided the list of
nodes and **Failovers** has the number of remote datacenters that were
queried while executing the query. This provides some insight into
where the data came from. This will be zero during non-failover
operations where there were healthy nodes found in the local
datacenter.
"""
query_id = extract_attr(query, keys=["ID"])
response = await self._api.get(
"/v1/query/%s/execute" % query_id,
params={"dc": dc, "near": near, "limit": limit},
consistency=consistency)
return response.body | python | async def execute(self, query, *,
dc=None, near=None, limit=None, consistency=None):
"""Executes a prepared query
Parameters:
query (ObjectID): Query ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
near (str): Sort the resulting list in ascending order based on
the estimated round trip time from that node
limit (int): Limit the list's size to the given number of nodes
consistency (Consistency): Force consistency
Returns:
Object:
Raises:
NotFound: the query does not exist
Returns a body like this::
{
"Service": "redis",
"Nodes": [
{
"Node": {
"Node": "foobar",
"Address": "10.1.10.12",
"TaggedAddresses": {
"lan": "10.1.10.12",
"wan": "10.1.10.12"
}
},
"Service": {
"ID": "redis",
"Service": "redis",
"Tags": None,
"Port": 8000
},
"Checks": [
{
"Node": "foobar",
"CheckID": "service:redis",
"Name": "Service 'redis' check",
"Status": "passing",
"Notes": "",
"Output": "",
"ServiceID": "redis",
"ServiceName": "redis"
},
{
"Node": "foobar",
"CheckID": "serfHealth",
"Name": "Serf Health Status",
"Status": "passing",
"Notes": "",
"Output": "",
"ServiceID": "",
"ServiceName": ""
}
],
"DNS": {
"TTL": timedelta(seconds=10)
},
"Datacenter": "dc3",
"Failovers": 2
}
]
}
The **Nodes** section contains the list of healthy nodes providing
the given service, as specified by the constraints of the prepared
query.
**Service** has the service name that the query was selecting. This is
useful for context in case an empty list of nodes is returned.
**DNS** has information used when serving the results over DNS. This
is just a copy of the structure given when the prepared query was
created.
**Datacenter** has the datacenter that ultimately provided the list of
nodes and **Failovers** has the number of remote datacenters that were
queried while executing the query. This provides some insight into
where the data came from. This will be zero during non-failover
operations where there were healthy nodes found in the local
datacenter.
"""
query_id = extract_attr(query, keys=["ID"])
response = await self._api.get(
"/v1/query/%s/execute" % query_id,
params={"dc": dc, "near": near, "limit": limit},
consistency=consistency)
return response.body | [
"async",
"def",
"execute",
"(",
"self",
",",
"query",
",",
"*",
",",
"dc",
"=",
"None",
",",
"near",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"consistency",
"=",
"None",
")",
":",
"query_id",
"=",
"extract_attr",
"(",
"query",
",",
"keys",
"=",
"[",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/query/%s/execute\"",
"%",
"query_id",
",",
"params",
"=",
"{",
"\"dc\"",
":",
"dc",
",",
"\"near\"",
":",
"near",
",",
"\"limit\"",
":",
"limit",
"}",
",",
"consistency",
"=",
"consistency",
")",
"return",
"response",
".",
"body"
] | Executes a prepared query
Parameters:
query (ObjectID): Query ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
near (str): Sort the resulting list in ascending order based on
the estimated round trip time from that node
limit (int): Limit the list's size to the given number of nodes
consistency (Consistency): Force consistency
Returns:
Object:
Raises:
NotFound: the query does not exist
Returns a body like this::
{
"Service": "redis",
"Nodes": [
{
"Node": {
"Node": "foobar",
"Address": "10.1.10.12",
"TaggedAddresses": {
"lan": "10.1.10.12",
"wan": "10.1.10.12"
}
},
"Service": {
"ID": "redis",
"Service": "redis",
"Tags": None,
"Port": 8000
},
"Checks": [
{
"Node": "foobar",
"CheckID": "service:redis",
"Name": "Service 'redis' check",
"Status": "passing",
"Notes": "",
"Output": "",
"ServiceID": "redis",
"ServiceName": "redis"
},
{
"Node": "foobar",
"CheckID": "serfHealth",
"Name": "Serf Health Status",
"Status": "passing",
"Notes": "",
"Output": "",
"ServiceID": "",
"ServiceName": ""
}
],
"DNS": {
"TTL": timedelta(seconds=10)
},
"Datacenter": "dc3",
"Failovers": 2
}
]
}
The **Nodes** section contains the list of healthy nodes providing
the given service, as specified by the constraints of the prepared
query.
**Service** has the service name that the query was selecting. This is
useful for context in case an empty list of nodes is returned.
**DNS** has information used when serving the results over DNS. This
is just a copy of the structure given when the prepared query was
created.
**Datacenter** has the datacenter that ultimately provided the list of
nodes and **Failovers** has the number of remote datacenters that were
queried while executing the query. This provides some insight into
where the data came from. This will be zero during non-failover
operations where there were healthy nodes found in the local
datacenter. | [
"Executes",
"a",
"prepared",
"query"
] | 02f7a529d7dc2e49bed942111067aa5faf320e90 | https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/query_endpoint.py#L233-L324 | train |
bitesofcode/projexui | projexui/widgets/xorbtreewidget/xorbgroupitem.py | XOrbGroupItem.load | def load(self):
"""
Loads the records from the query set linked with this item.
"""
if self._loaded:
return
rset = self.recordSet()
QApplication.setOverrideCursor(Qt.WaitCursor)
self.loadRecords(rset)
QApplication.restoreOverrideCursor() | python | def load(self):
"""
Loads the records from the query set linked with this item.
"""
if self._loaded:
return
rset = self.recordSet()
QApplication.setOverrideCursor(Qt.WaitCursor)
self.loadRecords(rset)
QApplication.restoreOverrideCursor() | [
"def",
"load",
"(",
"self",
")",
":",
"if",
"self",
".",
"_loaded",
":",
"return",
"rset",
"=",
"self",
".",
"recordSet",
"(",
")",
"QApplication",
".",
"setOverrideCursor",
"(",
"Qt",
".",
"WaitCursor",
")",
"self",
".",
"loadRecords",
"(",
"rset",
")",
"QApplication",
".",
"restoreOverrideCursor",
"(",
")"
] | Loads the records from the query set linked with this item. | [
"Loads",
"the",
"records",
"from",
"the",
"query",
"set",
"linked",
"with",
"this",
"item",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbtreewidget/xorbgroupitem.py#L117-L128 | train |
intelsdi-x/snap-plugin-lib-py | snap_plugin/v1/plugin.py | _tabulate | def _tabulate(rows, headers, spacing=5):
"""Prepare simple table with spacing based on content"""
if len(rows) == 0:
return "None\n"
assert len(rows[0]) == len(headers)
count = len(rows[0])
widths = [0 for _ in range(count)]
rows = [headers] + rows
for row in rows:
for index, field in enumerate(row):
if len(str(field)) > widths[index]:
widths[index] = len(str(field))
output = ""
for row in rows:
for index, field in enumerate(row):
field = str(field)
output += field + (widths[index] - len(field) + spacing) * " "
output += "\n"
return output | python | def _tabulate(rows, headers, spacing=5):
"""Prepare simple table with spacing based on content"""
if len(rows) == 0:
return "None\n"
assert len(rows[0]) == len(headers)
count = len(rows[0])
widths = [0 for _ in range(count)]
rows = [headers] + rows
for row in rows:
for index, field in enumerate(row):
if len(str(field)) > widths[index]:
widths[index] = len(str(field))
output = ""
for row in rows:
for index, field in enumerate(row):
field = str(field)
output += field + (widths[index] - len(field) + spacing) * " "
output += "\n"
return output | [
"def",
"_tabulate",
"(",
"rows",
",",
"headers",
",",
"spacing",
"=",
"5",
")",
":",
"if",
"len",
"(",
"rows",
")",
"==",
"0",
":",
"return",
"\"None\\n\"",
"assert",
"len",
"(",
"rows",
"[",
"0",
"]",
")",
"==",
"len",
"(",
"headers",
")",
"count",
"=",
"len",
"(",
"rows",
"[",
"0",
"]",
")",
"widths",
"=",
"[",
"0",
"for",
"_",
"in",
"range",
"(",
"count",
")",
"]",
"rows",
"=",
"[",
"headers",
"]",
"+",
"rows",
"for",
"row",
"in",
"rows",
":",
"for",
"index",
",",
"field",
"in",
"enumerate",
"(",
"row",
")",
":",
"if",
"len",
"(",
"str",
"(",
"field",
")",
")",
">",
"widths",
"[",
"index",
"]",
":",
"widths",
"[",
"index",
"]",
"=",
"len",
"(",
"str",
"(",
"field",
")",
")",
"output",
"=",
"\"\"",
"for",
"row",
"in",
"rows",
":",
"for",
"index",
",",
"field",
"in",
"enumerate",
"(",
"row",
")",
":",
"field",
"=",
"str",
"(",
"field",
")",
"output",
"+=",
"field",
"+",
"(",
"widths",
"[",
"index",
"]",
"-",
"len",
"(",
"field",
")",
"+",
"spacing",
")",
"*",
"\" \"",
"output",
"+=",
"\"\\n\"",
"return",
"output"
] | Prepare simple table with spacing based on content | [
"Prepare",
"simple",
"table",
"with",
"spacing",
"based",
"on",
"content"
] | 8da5d00ac5f9d2b48a7239563ac7788209891ca4 | https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/plugin.py#L786-L806 | train |
intelsdi-x/snap-plugin-lib-py | snap_plugin/v1/plugin.py | _Flags.add_item | def add_item(self, item):
"""Add single command line flag
Arguments:
name (:obj:`str`): Name of flag used in command line
flag_type (:py:class:`snap_plugin.v1.plugin.FlagType`):
Indication if flag should store value or is simple bool flag
description (:obj:`str`): Flag description used in command line
default (:obj:`object`, optional): Optional default value for flag
Raises:
TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError
"""
if not(isinstance(item.name, basestring) and isinstance(item.description, basestring)):
raise TypeError("Name and description should be strings, are of type {} and {}"
.format(type(item.name), type(item.description)))
if not(isinstance(item.flag_type, FlagType)):
raise TypeError("Flag type should be of type FlagType, is of {}".format(type(item.flag_type)))
if item.name not in self._flags:
if item.default is not None:
if item.default is not False:
item.description = item.description + " (default: %(default)s)"
self._flags[item.name] = item
else:
self._flags[item.name] = item | python | def add_item(self, item):
"""Add single command line flag
Arguments:
name (:obj:`str`): Name of flag used in command line
flag_type (:py:class:`snap_plugin.v1.plugin.FlagType`):
Indication if flag should store value or is simple bool flag
description (:obj:`str`): Flag description used in command line
default (:obj:`object`, optional): Optional default value for flag
Raises:
TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError
"""
if not(isinstance(item.name, basestring) and isinstance(item.description, basestring)):
raise TypeError("Name and description should be strings, are of type {} and {}"
.format(type(item.name), type(item.description)))
if not(isinstance(item.flag_type, FlagType)):
raise TypeError("Flag type should be of type FlagType, is of {}".format(type(item.flag_type)))
if item.name not in self._flags:
if item.default is not None:
if item.default is not False:
item.description = item.description + " (default: %(default)s)"
self._flags[item.name] = item
else:
self._flags[item.name] = item | [
"def",
"add_item",
"(",
"self",
",",
"item",
")",
":",
"if",
"not",
"(",
"isinstance",
"(",
"item",
".",
"name",
",",
"basestring",
")",
"and",
"isinstance",
"(",
"item",
".",
"description",
",",
"basestring",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"Name and description should be strings, are of type {} and {}\"",
".",
"format",
"(",
"type",
"(",
"item",
".",
"name",
")",
",",
"type",
"(",
"item",
".",
"description",
")",
")",
")",
"if",
"not",
"(",
"isinstance",
"(",
"item",
".",
"flag_type",
",",
"FlagType",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"Flag type should be of type FlagType, is of {}\"",
".",
"format",
"(",
"type",
"(",
"item",
".",
"flag_type",
")",
")",
")",
"if",
"item",
".",
"name",
"not",
"in",
"self",
".",
"_flags",
":",
"if",
"item",
".",
"default",
"is",
"not",
"None",
":",
"if",
"item",
".",
"default",
"is",
"not",
"False",
":",
"item",
".",
"description",
"=",
"item",
".",
"description",
"+",
"\" (default: %(default)s)\"",
"self",
".",
"_flags",
"[",
"item",
".",
"name",
"]",
"=",
"item",
"else",
":",
"self",
".",
"_flags",
"[",
"item",
".",
"name",
"]",
"=",
"item"
] | Add single command line flag
Arguments:
name (:obj:`str`): Name of flag used in command line
flag_type (:py:class:`snap_plugin.v1.plugin.FlagType`):
Indication if flag should store value or is simple bool flag
description (:obj:`str`): Flag description used in command line
default (:obj:`object`, optional): Optional default value for flag
Raises:
TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError | [
"Add",
"single",
"command",
"line",
"flag"
] | 8da5d00ac5f9d2b48a7239563ac7788209891ca4 | https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/plugin.py#L107-L133 | train |
intelsdi-x/snap-plugin-lib-py | snap_plugin/v1/plugin.py | _Flags.add_multiple | def add_multiple(self, flags):
"""Add multiple command line flags
Arguments:
flags (:obj:`list` of :obj:`tuple`): List of flags
in tuples (name, flag_type, description, (optional) default)
Raises:
TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError
"""
if not isinstance(flags, list):
raise TypeError("Expected list of flags, got object of type{}".format(type(flags)))
for flag in flags:
if isinstance(flag, Flag):
self.add_item(flag)
elif isinstance(flag, tuple):
try:
item = Flag(*flag)
self.add_item(item)
except TypeError as e:
raise TypeError("Invalid arguments to initialize a flag definition, expect ({0} [, {1}]) but got {3}"
.format(", ".join(Flag.REQUIRED_FIELDS),
", ".join(Flag.OPTIONAL_FIELDS), flag)) | python | def add_multiple(self, flags):
"""Add multiple command line flags
Arguments:
flags (:obj:`list` of :obj:`tuple`): List of flags
in tuples (name, flag_type, description, (optional) default)
Raises:
TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError
"""
if not isinstance(flags, list):
raise TypeError("Expected list of flags, got object of type{}".format(type(flags)))
for flag in flags:
if isinstance(flag, Flag):
self.add_item(flag)
elif isinstance(flag, tuple):
try:
item = Flag(*flag)
self.add_item(item)
except TypeError as e:
raise TypeError("Invalid arguments to initialize a flag definition, expect ({0} [, {1}]) but got {3}"
.format(", ".join(Flag.REQUIRED_FIELDS),
", ".join(Flag.OPTIONAL_FIELDS), flag)) | [
"def",
"add_multiple",
"(",
"self",
",",
"flags",
")",
":",
"if",
"not",
"isinstance",
"(",
"flags",
",",
"list",
")",
":",
"raise",
"TypeError",
"(",
"\"Expected list of flags, got object of type{}\"",
".",
"format",
"(",
"type",
"(",
"flags",
")",
")",
")",
"for",
"flag",
"in",
"flags",
":",
"if",
"isinstance",
"(",
"flag",
",",
"Flag",
")",
":",
"self",
".",
"add_item",
"(",
"flag",
")",
"elif",
"isinstance",
"(",
"flag",
",",
"tuple",
")",
":",
"try",
":",
"item",
"=",
"Flag",
"(",
"*",
"flag",
")",
"self",
".",
"add_item",
"(",
"item",
")",
"except",
"TypeError",
"as",
"e",
":",
"raise",
"TypeError",
"(",
"\"Invalid arguments to initialize a flag definition, expect ({0} [, {1}]) but got {3}\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"Flag",
".",
"REQUIRED_FIELDS",
")",
",",
"\", \"",
".",
"join",
"(",
"Flag",
".",
"OPTIONAL_FIELDS",
")",
",",
"flag",
")",
")"
] | Add multiple command line flags
Arguments:
flags (:obj:`list` of :obj:`tuple`): List of flags
in tuples (name, flag_type, description, (optional) default)
Raises:
TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError | [
"Add",
"multiple",
"command",
"line",
"flags"
] | 8da5d00ac5f9d2b48a7239563ac7788209891ca4 | https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/plugin.py#L138-L160 | train |
bitesofcode/projexui | projexui/widgets/xcalendarwidget/xcalendarwidget.py | XCalendarWidget.gotoNext | def gotoNext( self ):
"""
Goes to the next date based on the current mode and date.
"""
scene = self.scene()
date = scene.currentDate()
# go forward a day
if ( scene.currentMode() == scene.Mode.Day ):
scene.setCurrentDate(date.addDays(1))
# go forward a week
elif ( scene.currentMode() == scene.Mode.Week ):
scene.setCurrentDate(date.addDays(7))
# go forward a month
elif ( scene.currentMode() == scene.Mode.Month ):
scene.setCurrentDate(date.addMonths(1)) | python | def gotoNext( self ):
"""
Goes to the next date based on the current mode and date.
"""
scene = self.scene()
date = scene.currentDate()
# go forward a day
if ( scene.currentMode() == scene.Mode.Day ):
scene.setCurrentDate(date.addDays(1))
# go forward a week
elif ( scene.currentMode() == scene.Mode.Week ):
scene.setCurrentDate(date.addDays(7))
# go forward a month
elif ( scene.currentMode() == scene.Mode.Month ):
scene.setCurrentDate(date.addMonths(1)) | [
"def",
"gotoNext",
"(",
"self",
")",
":",
"scene",
"=",
"self",
".",
"scene",
"(",
")",
"date",
"=",
"scene",
".",
"currentDate",
"(",
")",
"# go forward a day\r",
"if",
"(",
"scene",
".",
"currentMode",
"(",
")",
"==",
"scene",
".",
"Mode",
".",
"Day",
")",
":",
"scene",
".",
"setCurrentDate",
"(",
"date",
".",
"addDays",
"(",
"1",
")",
")",
"# go forward a week\r",
"elif",
"(",
"scene",
".",
"currentMode",
"(",
")",
"==",
"scene",
".",
"Mode",
".",
"Week",
")",
":",
"scene",
".",
"setCurrentDate",
"(",
"date",
".",
"addDays",
"(",
"7",
")",
")",
"# go forward a month\r",
"elif",
"(",
"scene",
".",
"currentMode",
"(",
")",
"==",
"scene",
".",
"Mode",
".",
"Month",
")",
":",
"scene",
".",
"setCurrentDate",
"(",
"date",
".",
"addMonths",
"(",
"1",
")",
")"
] | Goes to the next date based on the current mode and date. | [
"Goes",
"to",
"the",
"next",
"date",
"based",
"on",
"the",
"current",
"mode",
"and",
"date",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xcalendarwidget/xcalendarwidget.py#L168-L185 | train |
bitesofcode/projexui | projexui/widgets/xnodewidget/xnodewidget.py | XNodeWidget.zoomExtents | def zoomExtents(self):
"""
Fits all the nodes in the view.
"""
rect = self.scene().visibleItemsBoundingRect()
vrect = self.viewportRect()
if rect.width():
changed = False
scene_rect = self.scene().sceneRect()
if scene_rect.width() < rect.width():
scene_rect.setWidth(rect.width() + 150)
scene_rect.setX(-scene_rect.width() / 2.0)
changed = True
if scene_rect.height() < rect.height():
scene_rect.setHeight(rect.height() + 150)
scene_rect.setY(-scene_rect.height() / 2.0)
changed = True
if changed:
self.scene().setSceneRect(scene_rect)
self.fitInView(rect, Qt.KeepAspectRatio)
if not self.signalsBlocked():
self.zoomAmountChanged.emit(self.zoomAmount()) | python | def zoomExtents(self):
"""
Fits all the nodes in the view.
"""
rect = self.scene().visibleItemsBoundingRect()
vrect = self.viewportRect()
if rect.width():
changed = False
scene_rect = self.scene().sceneRect()
if scene_rect.width() < rect.width():
scene_rect.setWidth(rect.width() + 150)
scene_rect.setX(-scene_rect.width() / 2.0)
changed = True
if scene_rect.height() < rect.height():
scene_rect.setHeight(rect.height() + 150)
scene_rect.setY(-scene_rect.height() / 2.0)
changed = True
if changed:
self.scene().setSceneRect(scene_rect)
self.fitInView(rect, Qt.KeepAspectRatio)
if not self.signalsBlocked():
self.zoomAmountChanged.emit(self.zoomAmount()) | [
"def",
"zoomExtents",
"(",
"self",
")",
":",
"rect",
"=",
"self",
".",
"scene",
"(",
")",
".",
"visibleItemsBoundingRect",
"(",
")",
"vrect",
"=",
"self",
".",
"viewportRect",
"(",
")",
"if",
"rect",
".",
"width",
"(",
")",
":",
"changed",
"=",
"False",
"scene_rect",
"=",
"self",
".",
"scene",
"(",
")",
".",
"sceneRect",
"(",
")",
"if",
"scene_rect",
".",
"width",
"(",
")",
"<",
"rect",
".",
"width",
"(",
")",
":",
"scene_rect",
".",
"setWidth",
"(",
"rect",
".",
"width",
"(",
")",
"+",
"150",
")",
"scene_rect",
".",
"setX",
"(",
"-",
"scene_rect",
".",
"width",
"(",
")",
"/",
"2.0",
")",
"changed",
"=",
"True",
"if",
"scene_rect",
".",
"height",
"(",
")",
"<",
"rect",
".",
"height",
"(",
")",
":",
"scene_rect",
".",
"setHeight",
"(",
"rect",
".",
"height",
"(",
")",
"+",
"150",
")",
"scene_rect",
".",
"setY",
"(",
"-",
"scene_rect",
".",
"height",
"(",
")",
"/",
"2.0",
")",
"changed",
"=",
"True",
"if",
"changed",
":",
"self",
".",
"scene",
"(",
")",
".",
"setSceneRect",
"(",
"scene_rect",
")",
"self",
".",
"fitInView",
"(",
"rect",
",",
"Qt",
".",
"KeepAspectRatio",
")",
"if",
"not",
"self",
".",
"signalsBlocked",
"(",
")",
":",
"self",
".",
"zoomAmountChanged",
".",
"emit",
"(",
"self",
".",
"zoomAmount",
"(",
")",
")"
] | Fits all the nodes in the view. | [
"Fits",
"all",
"the",
"nodes",
"in",
"the",
"view",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xnodewidget/xnodewidget.py#L287-L314 | train |
FNNDSC/pfurl | pfurl/pfurl.py | zipdir | def zipdir(path, ziph, **kwargs):
"""
Zip up a directory.
:param path:
:param ziph:
:param kwargs:
:return:
"""
str_arcroot = ""
for k, v in kwargs.items():
if k == 'arcroot': str_arcroot = v
for root, dirs, files in os.walk(path):
for file in files:
str_arcfile = os.path.join(root, file)
if len(str_arcroot):
str_arcname = str_arcroot.split('/')[-1] + str_arcfile.split(str_arcroot)[1]
else:
str_arcname = str_arcfile
try:
ziph.write(str_arcfile, arcname = str_arcname)
except:
print("Skipping %s" % str_arcfile) | python | def zipdir(path, ziph, **kwargs):
"""
Zip up a directory.
:param path:
:param ziph:
:param kwargs:
:return:
"""
str_arcroot = ""
for k, v in kwargs.items():
if k == 'arcroot': str_arcroot = v
for root, dirs, files in os.walk(path):
for file in files:
str_arcfile = os.path.join(root, file)
if len(str_arcroot):
str_arcname = str_arcroot.split('/')[-1] + str_arcfile.split(str_arcroot)[1]
else:
str_arcname = str_arcfile
try:
ziph.write(str_arcfile, arcname = str_arcname)
except:
print("Skipping %s" % str_arcfile) | [
"def",
"zipdir",
"(",
"path",
",",
"ziph",
",",
"*",
"*",
"kwargs",
")",
":",
"str_arcroot",
"=",
"\"\"",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"k",
"==",
"'arcroot'",
":",
"str_arcroot",
"=",
"v",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"for",
"file",
"in",
"files",
":",
"str_arcfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"file",
")",
"if",
"len",
"(",
"str_arcroot",
")",
":",
"str_arcname",
"=",
"str_arcroot",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"+",
"str_arcfile",
".",
"split",
"(",
"str_arcroot",
")",
"[",
"1",
"]",
"else",
":",
"str_arcname",
"=",
"str_arcfile",
"try",
":",
"ziph",
".",
"write",
"(",
"str_arcfile",
",",
"arcname",
"=",
"str_arcname",
")",
"except",
":",
"print",
"(",
"\"Skipping %s\"",
"%",
"str_arcfile",
")"
] | Zip up a directory.
:param path:
:param ziph:
:param kwargs:
:return: | [
"Zip",
"up",
"a",
"directory",
"."
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L1350-L1373 | train |
FNNDSC/pfurl | pfurl/pfurl.py | zip_process | def zip_process(**kwargs):
"""
Process zip operations.
:param kwargs:
:return:
"""
str_localPath = ""
str_zipFileName = ""
str_action = "zip"
str_arcroot = ""
for k,v in kwargs.items():
if k == 'path': str_localPath = v
if k == 'action': str_action = v
if k == 'payloadFile': str_zipFileName = v
if k == 'arcroot': str_arcroot = v
if str_action == 'zip':
str_mode = 'w'
str_zipFileName = '%s.zip' % uuid.uuid4()
else:
str_mode = 'r'
try:
ziphandler = zipfile.ZipFile(str_zipFileName, str_mode, zipfile.ZIP_DEFLATED)
if str_mode == 'w':
if os.path.isdir(str_localPath):
zipdir(str_localPath, ziphandler, arcroot = str_arcroot)
else:
if len(str_arcroot):
str_arcname = str_arcroot.split('/')[-1] + str_localPath.split(str_arcroot)[1]
else:
str_arcname = str_localPath
try:
ziphandler.write(str_localPath, arcname = str_arcname)
except:
ziphandler.close()
os.remove(str_zipFileName)
return {
'msg': json.dumps({"msg": "No file or directory found for '%s'" % str_localPath}),
'status': False
}
if str_mode == 'r':
ziphandler.extractall(str_localPath)
ziphandler.close()
str_msg = '%s operation successful' % str_action
b_status = True
except:
str_msg = '%s operation failed' % str_action
b_status = False
return {
'msg': str_msg,
'fileProcessed': str_zipFileName,
'status': b_status,
'path': str_localPath,
'zipmode': str_mode,
'filesize': "{:,}".format(os.stat(str_zipFileName).st_size),
'timestamp': '%s' % datetime.datetime.now()
} | python | def zip_process(**kwargs):
"""
Process zip operations.
:param kwargs:
:return:
"""
str_localPath = ""
str_zipFileName = ""
str_action = "zip"
str_arcroot = ""
for k,v in kwargs.items():
if k == 'path': str_localPath = v
if k == 'action': str_action = v
if k == 'payloadFile': str_zipFileName = v
if k == 'arcroot': str_arcroot = v
if str_action == 'zip':
str_mode = 'w'
str_zipFileName = '%s.zip' % uuid.uuid4()
else:
str_mode = 'r'
try:
ziphandler = zipfile.ZipFile(str_zipFileName, str_mode, zipfile.ZIP_DEFLATED)
if str_mode == 'w':
if os.path.isdir(str_localPath):
zipdir(str_localPath, ziphandler, arcroot = str_arcroot)
else:
if len(str_arcroot):
str_arcname = str_arcroot.split('/')[-1] + str_localPath.split(str_arcroot)[1]
else:
str_arcname = str_localPath
try:
ziphandler.write(str_localPath, arcname = str_arcname)
except:
ziphandler.close()
os.remove(str_zipFileName)
return {
'msg': json.dumps({"msg": "No file or directory found for '%s'" % str_localPath}),
'status': False
}
if str_mode == 'r':
ziphandler.extractall(str_localPath)
ziphandler.close()
str_msg = '%s operation successful' % str_action
b_status = True
except:
str_msg = '%s operation failed' % str_action
b_status = False
return {
'msg': str_msg,
'fileProcessed': str_zipFileName,
'status': b_status,
'path': str_localPath,
'zipmode': str_mode,
'filesize': "{:,}".format(os.stat(str_zipFileName).st_size),
'timestamp': '%s' % datetime.datetime.now()
} | [
"def",
"zip_process",
"(",
"*",
"*",
"kwargs",
")",
":",
"str_localPath",
"=",
"\"\"",
"str_zipFileName",
"=",
"\"\"",
"str_action",
"=",
"\"zip\"",
"str_arcroot",
"=",
"\"\"",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"k",
"==",
"'path'",
":",
"str_localPath",
"=",
"v",
"if",
"k",
"==",
"'action'",
":",
"str_action",
"=",
"v",
"if",
"k",
"==",
"'payloadFile'",
":",
"str_zipFileName",
"=",
"v",
"if",
"k",
"==",
"'arcroot'",
":",
"str_arcroot",
"=",
"v",
"if",
"str_action",
"==",
"'zip'",
":",
"str_mode",
"=",
"'w'",
"str_zipFileName",
"=",
"'%s.zip'",
"%",
"uuid",
".",
"uuid4",
"(",
")",
"else",
":",
"str_mode",
"=",
"'r'",
"try",
":",
"ziphandler",
"=",
"zipfile",
".",
"ZipFile",
"(",
"str_zipFileName",
",",
"str_mode",
",",
"zipfile",
".",
"ZIP_DEFLATED",
")",
"if",
"str_mode",
"==",
"'w'",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"str_localPath",
")",
":",
"zipdir",
"(",
"str_localPath",
",",
"ziphandler",
",",
"arcroot",
"=",
"str_arcroot",
")",
"else",
":",
"if",
"len",
"(",
"str_arcroot",
")",
":",
"str_arcname",
"=",
"str_arcroot",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"+",
"str_localPath",
".",
"split",
"(",
"str_arcroot",
")",
"[",
"1",
"]",
"else",
":",
"str_arcname",
"=",
"str_localPath",
"try",
":",
"ziphandler",
".",
"write",
"(",
"str_localPath",
",",
"arcname",
"=",
"str_arcname",
")",
"except",
":",
"ziphandler",
".",
"close",
"(",
")",
"os",
".",
"remove",
"(",
"str_zipFileName",
")",
"return",
"{",
"'msg'",
":",
"json",
".",
"dumps",
"(",
"{",
"\"msg\"",
":",
"\"No file or directory found for '%s'\"",
"%",
"str_localPath",
"}",
")",
",",
"'status'",
":",
"False",
"}",
"if",
"str_mode",
"==",
"'r'",
":",
"ziphandler",
".",
"extractall",
"(",
"str_localPath",
")",
"ziphandler",
".",
"close",
"(",
")",
"str_msg",
"=",
"'%s operation successful'",
"%",
"str_action",
"b_status",
"=",
"True",
"except",
":",
"str_msg",
"=",
"'%s operation failed'",
"%",
"str_action",
"b_status",
"=",
"False",
"return",
"{",
"'msg'",
":",
"str_msg",
",",
"'fileProcessed'",
":",
"str_zipFileName",
",",
"'status'",
":",
"b_status",
",",
"'path'",
":",
"str_localPath",
",",
"'zipmode'",
":",
"str_mode",
",",
"'filesize'",
":",
"\"{:,}\"",
".",
"format",
"(",
"os",
".",
"stat",
"(",
"str_zipFileName",
")",
".",
"st_size",
")",
",",
"'timestamp'",
":",
"'%s'",
"%",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"}"
] | Process zip operations.
:param kwargs:
:return: | [
"Process",
"zip",
"operations",
"."
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L1376-L1435 | train |
FNNDSC/pfurl | pfurl/pfurl.py | base64_process | def base64_process(**kwargs):
"""
Process base64 file io
"""
str_fileToSave = ""
str_fileToRead = ""
str_action = "encode"
data = None
for k,v in kwargs.items():
if k == 'action': str_action = v
if k == 'payloadBytes': data = v
if k == 'payloadFile': str_fileToRead = v
if k == 'saveToFile': str_fileToSave = v
# if k == 'sourcePath': str_sourcePath = v
if str_action == "encode":
# Encode the contents of the file at targetPath as ASCII for transmission
if len(str_fileToRead):
with open(str_fileToRead, 'rb') as f:
data = f.read()
f.close()
data_b64 = base64.b64encode(data)
with open(str_fileToSave, 'wb') as f:
f.write(data_b64)
f.close()
return {
'msg': 'Encode successful',
'fileProcessed': str_fileToSave,
'status': True
# 'encodedBytes': data_b64
}
if str_action == "decode":
# if len(data) % 4:
# not a multiple of 4, add padding:
# data += '=' * (4 - len(data) % 4)
# adding 3 padding = will never succumb to the TypeError and will always produce the same result.
# https://gist.github.com/perrygeo/ee7c65bb1541ff6ac770
bytes_decoded = base64.b64decode(data + "===")
with open(str_fileToSave, 'wb') as f:
f.write(bytes_decoded)
f.close()
return {
'msg': 'Decode successful',
'fileProcessed': str_fileToSave,
'status': True
# 'decodedBytes': bytes_decoded
} | python | def base64_process(**kwargs):
"""
Process base64 file io
"""
str_fileToSave = ""
str_fileToRead = ""
str_action = "encode"
data = None
for k,v in kwargs.items():
if k == 'action': str_action = v
if k == 'payloadBytes': data = v
if k == 'payloadFile': str_fileToRead = v
if k == 'saveToFile': str_fileToSave = v
# if k == 'sourcePath': str_sourcePath = v
if str_action == "encode":
# Encode the contents of the file at targetPath as ASCII for transmission
if len(str_fileToRead):
with open(str_fileToRead, 'rb') as f:
data = f.read()
f.close()
data_b64 = base64.b64encode(data)
with open(str_fileToSave, 'wb') as f:
f.write(data_b64)
f.close()
return {
'msg': 'Encode successful',
'fileProcessed': str_fileToSave,
'status': True
# 'encodedBytes': data_b64
}
if str_action == "decode":
# if len(data) % 4:
# not a multiple of 4, add padding:
# data += '=' * (4 - len(data) % 4)
# adding 3 padding = will never succumb to the TypeError and will always produce the same result.
# https://gist.github.com/perrygeo/ee7c65bb1541ff6ac770
bytes_decoded = base64.b64decode(data + "===")
with open(str_fileToSave, 'wb') as f:
f.write(bytes_decoded)
f.close()
return {
'msg': 'Decode successful',
'fileProcessed': str_fileToSave,
'status': True
# 'decodedBytes': bytes_decoded
} | [
"def",
"base64_process",
"(",
"*",
"*",
"kwargs",
")",
":",
"str_fileToSave",
"=",
"\"\"",
"str_fileToRead",
"=",
"\"\"",
"str_action",
"=",
"\"encode\"",
"data",
"=",
"None",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"k",
"==",
"'action'",
":",
"str_action",
"=",
"v",
"if",
"k",
"==",
"'payloadBytes'",
":",
"data",
"=",
"v",
"if",
"k",
"==",
"'payloadFile'",
":",
"str_fileToRead",
"=",
"v",
"if",
"k",
"==",
"'saveToFile'",
":",
"str_fileToSave",
"=",
"v",
"# if k == 'sourcePath': str_sourcePath = v",
"if",
"str_action",
"==",
"\"encode\"",
":",
"# Encode the contents of the file at targetPath as ASCII for transmission",
"if",
"len",
"(",
"str_fileToRead",
")",
":",
"with",
"open",
"(",
"str_fileToRead",
",",
"'rb'",
")",
"as",
"f",
":",
"data",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"data_b64",
"=",
"base64",
".",
"b64encode",
"(",
"data",
")",
"with",
"open",
"(",
"str_fileToSave",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"data_b64",
")",
"f",
".",
"close",
"(",
")",
"return",
"{",
"'msg'",
":",
"'Encode successful'",
",",
"'fileProcessed'",
":",
"str_fileToSave",
",",
"'status'",
":",
"True",
"# 'encodedBytes': data_b64",
"}",
"if",
"str_action",
"==",
"\"decode\"",
":",
"# if len(data) % 4:",
"# not a multiple of 4, add padding:",
"# data += '=' * (4 - len(data) % 4)",
"# adding 3 padding = will never succumb to the TypeError and will always produce the same result.",
"# https://gist.github.com/perrygeo/ee7c65bb1541ff6ac770",
"bytes_decoded",
"=",
"base64",
".",
"b64decode",
"(",
"data",
"+",
"\"===\"",
")",
"with",
"open",
"(",
"str_fileToSave",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"bytes_decoded",
")",
"f",
".",
"close",
"(",
")",
"return",
"{",
"'msg'",
":",
"'Decode successful'",
",",
"'fileProcessed'",
":",
"str_fileToSave",
",",
"'status'",
":",
"True",
"# 'decodedBytes': bytes_decoded",
"}"
] | Process base64 file io | [
"Process",
"base64",
"file",
"io"
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L1438-L1488 | train |
FNNDSC/pfurl | pfurl/pfurl.py | Pfurl.storage_resolveBasedOnKey | def storage_resolveBasedOnKey(self, *args, **kwargs):
"""
Call the remote service and ask for the storage location based on the key.
:param args:
:param kwargs:
:return:
"""
global Gd_internalvar
d_msg = {
'action': 'internalctl',
'meta': {
'var': 'key2address',
'compute': '<key>'
}
}
str_key = ""
b_status = False
for k,v in kwargs.items():
if k == 'key': str_key = v
d_msg['meta']['key'] = str_key
#
d_ret = self.pullPath_core(d_msg = d_msg)
return {
'status': b_status,
'path': str_internalLocation
} | python | def storage_resolveBasedOnKey(self, *args, **kwargs):
"""
Call the remote service and ask for the storage location based on the key.
:param args:
:param kwargs:
:return:
"""
global Gd_internalvar
d_msg = {
'action': 'internalctl',
'meta': {
'var': 'key2address',
'compute': '<key>'
}
}
str_key = ""
b_status = False
for k,v in kwargs.items():
if k == 'key': str_key = v
d_msg['meta']['key'] = str_key
#
d_ret = self.pullPath_core(d_msg = d_msg)
return {
'status': b_status,
'path': str_internalLocation
} | [
"def",
"storage_resolveBasedOnKey",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"global",
"Gd_internalvar",
"d_msg",
"=",
"{",
"'action'",
":",
"'internalctl'",
",",
"'meta'",
":",
"{",
"'var'",
":",
"'key2address'",
",",
"'compute'",
":",
"'<key>'",
"}",
"}",
"str_key",
"=",
"\"\"",
"b_status",
"=",
"False",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"k",
"==",
"'key'",
":",
"str_key",
"=",
"v",
"d_msg",
"[",
"'meta'",
"]",
"[",
"'key'",
"]",
"=",
"str_key",
"# ",
"d_ret",
"=",
"self",
".",
"pullPath_core",
"(",
"d_msg",
"=",
"d_msg",
")",
"return",
"{",
"'status'",
":",
"b_status",
",",
"'path'",
":",
"str_internalLocation",
"}"
] | Call the remote service and ask for the storage location based on the key.
:param args:
:param kwargs:
:return: | [
"Call",
"the",
"remote",
"service",
"and",
"ask",
"for",
"the",
"storage",
"location",
"based",
"on",
"the",
"key",
"."
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L190-L221 | train |
FNNDSC/pfurl | pfurl/pfurl.py | Pfurl.remoteLocation_resolveSimple | def remoteLocation_resolveSimple(self, d_remote):
"""
Resolve the remote "path" location by returning either the
'path' or 'key' parameter in the 'remote' JSON record.
:param d_remote:
:return:
"""
b_status = False
str_remotePath = ""
if 'path' in d_remote.keys():
str_remotePath = d_remote['path']
b_status = True
if 'key' in d_remote.keys():
str_remotePath = d_remote['key']
b_status = True
return {
'status': b_status,
'path': str_remotePath
} | python | def remoteLocation_resolveSimple(self, d_remote):
"""
Resolve the remote "path" location by returning either the
'path' or 'key' parameter in the 'remote' JSON record.
:param d_remote:
:return:
"""
b_status = False
str_remotePath = ""
if 'path' in d_remote.keys():
str_remotePath = d_remote['path']
b_status = True
if 'key' in d_remote.keys():
str_remotePath = d_remote['key']
b_status = True
return {
'status': b_status,
'path': str_remotePath
} | [
"def",
"remoteLocation_resolveSimple",
"(",
"self",
",",
"d_remote",
")",
":",
"b_status",
"=",
"False",
"str_remotePath",
"=",
"\"\"",
"if",
"'path'",
"in",
"d_remote",
".",
"keys",
"(",
")",
":",
"str_remotePath",
"=",
"d_remote",
"[",
"'path'",
"]",
"b_status",
"=",
"True",
"if",
"'key'",
"in",
"d_remote",
".",
"keys",
"(",
")",
":",
"str_remotePath",
"=",
"d_remote",
"[",
"'key'",
"]",
"b_status",
"=",
"True",
"return",
"{",
"'status'",
":",
"b_status",
",",
"'path'",
":",
"str_remotePath",
"}"
] | Resolve the remote "path" location by returning either the
'path' or 'key' parameter in the 'remote' JSON record.
:param d_remote:
:return: | [
"Resolve",
"the",
"remote",
"path",
"location",
"by",
"returning",
"either",
"the",
"path",
"or",
"key",
"parameter",
"in",
"the",
"remote",
"JSON",
"record",
"."
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L223-L242 | train |
FNNDSC/pfurl | pfurl/pfurl.py | Pfurl.remoteLocation_resolve | def remoteLocation_resolve(self, d_remote):
"""
Resolve the remote path location
:param d_remote: the "remote" specification
:return: a string representation of the remote path
"""
b_status = False
str_remotePath = ""
if 'path' in d_remote.keys():
str_remotePath = d_remote['path']
b_status = True
if 'key' in d_remote.keys():
d_ret = self.storage_resolveBasedOnKey(key = d_remote['key'])
if d_ret['status']:
b_status = True
str_remotePath = d_ret['path']
return {
'status': b_status,
'path': str_remotePath
} | python | def remoteLocation_resolve(self, d_remote):
"""
Resolve the remote path location
:param d_remote: the "remote" specification
:return: a string representation of the remote path
"""
b_status = False
str_remotePath = ""
if 'path' in d_remote.keys():
str_remotePath = d_remote['path']
b_status = True
if 'key' in d_remote.keys():
d_ret = self.storage_resolveBasedOnKey(key = d_remote['key'])
if d_ret['status']:
b_status = True
str_remotePath = d_ret['path']
return {
'status': b_status,
'path': str_remotePath
} | [
"def",
"remoteLocation_resolve",
"(",
"self",
",",
"d_remote",
")",
":",
"b_status",
"=",
"False",
"str_remotePath",
"=",
"\"\"",
"if",
"'path'",
"in",
"d_remote",
".",
"keys",
"(",
")",
":",
"str_remotePath",
"=",
"d_remote",
"[",
"'path'",
"]",
"b_status",
"=",
"True",
"if",
"'key'",
"in",
"d_remote",
".",
"keys",
"(",
")",
":",
"d_ret",
"=",
"self",
".",
"storage_resolveBasedOnKey",
"(",
"key",
"=",
"d_remote",
"[",
"'key'",
"]",
")",
"if",
"d_ret",
"[",
"'status'",
"]",
":",
"b_status",
"=",
"True",
"str_remotePath",
"=",
"d_ret",
"[",
"'path'",
"]",
"return",
"{",
"'status'",
":",
"b_status",
",",
"'path'",
":",
"str_remotePath",
"}"
] | Resolve the remote path location
:param d_remote: the "remote" specification
:return: a string representation of the remote path | [
"Resolve",
"the",
"remote",
"path",
"location"
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L244-L264 | train |
FNNDSC/pfurl | pfurl/pfurl.py | Pfurl.path_localLocationCheck | def path_localLocationCheck(self, d_msg, **kwargs):
"""
Check if a path exists on the local filesystem
:param self:
:param kwargs:
:return:
"""
b_pull = False
d_meta = d_msg['meta']
if 'do' in d_meta:
if d_meta['do'] == 'pull':
b_pull = True
if 'local' in d_meta:
d_local = d_meta['local']
if 'to' in d_meta:
d_local = d_meta['to']
str_localPathFull = d_local['path']
str_localPath, str_unpack = os.path.split(str_localPathFull)
str_msg = ''
str_checkedDir = str_localPathFull
b_isFile = os.path.isfile(str_localPathFull)
b_isDir = os.path.isdir(str_localPathFull)
b_exists = os.path.exists(str_localPathFull)
if 'pull' in d_msg['action'] or b_pull:
# If we are "pulling" data to local, then we assume the local
# directory does not exist. If it does, and if 'createDir' is 'true',
# we remove the localPath and re-create it, thus assuring it will
# only contain the info pulled from the remote source.
# If 'writeInExisting' is 'true', then execution continues, but
# may fail if the pulled target exists in the localPath.
str_checkedDir = str_localPath
b_isFile = os.path.isfile(str_localPath)
b_isDir = os.path.isdir(str_localPath)
b_exists = os.path.exists(str_localPath)
if 'createDir' in d_local.keys():
if d_local['createDir']:
if os.path.isdir(str_localPathFull):
self.dp.qprint('Removing local path %s...' % str_localPathFull)
shutil.rmtree(str_localPathFull)
str_msg = 'Removed existing local path... '
self.dp.qprint('Creating empty local path %s...' % str_localPathFull)
os.makedirs(str_localPathFull)
b_exists = True
str_msg += 'Created new local path'
else:
str_msg = 'local path already exists!'
if 'writeInExisting' in d_local.keys():
if not d_local['writeInExisting']:
if b_isDir: b_exists = False
else:
if b_isDir: b_exists = False
d_ret = {
'action': d_msg['action'],
'dir': str_checkedDir,
'status': b_exists,
'isfile': b_isFile,
'isdir': b_isDir,
'msg': str_msg
}
return {'check': d_ret,
'status': d_ret['status'],
'timestamp': '%s' % datetime.datetime.now()} | python | def path_localLocationCheck(self, d_msg, **kwargs):
"""
Check if a path exists on the local filesystem
:param self:
:param kwargs:
:return:
"""
b_pull = False
d_meta = d_msg['meta']
if 'do' in d_meta:
if d_meta['do'] == 'pull':
b_pull = True
if 'local' in d_meta:
d_local = d_meta['local']
if 'to' in d_meta:
d_local = d_meta['to']
str_localPathFull = d_local['path']
str_localPath, str_unpack = os.path.split(str_localPathFull)
str_msg = ''
str_checkedDir = str_localPathFull
b_isFile = os.path.isfile(str_localPathFull)
b_isDir = os.path.isdir(str_localPathFull)
b_exists = os.path.exists(str_localPathFull)
if 'pull' in d_msg['action'] or b_pull:
# If we are "pulling" data to local, then we assume the local
# directory does not exist. If it does, and if 'createDir' is 'true',
# we remove the localPath and re-create it, thus assuring it will
# only contain the info pulled from the remote source.
# If 'writeInExisting' is 'true', then execution continues, but
# may fail if the pulled target exists in the localPath.
str_checkedDir = str_localPath
b_isFile = os.path.isfile(str_localPath)
b_isDir = os.path.isdir(str_localPath)
b_exists = os.path.exists(str_localPath)
if 'createDir' in d_local.keys():
if d_local['createDir']:
if os.path.isdir(str_localPathFull):
self.dp.qprint('Removing local path %s...' % str_localPathFull)
shutil.rmtree(str_localPathFull)
str_msg = 'Removed existing local path... '
self.dp.qprint('Creating empty local path %s...' % str_localPathFull)
os.makedirs(str_localPathFull)
b_exists = True
str_msg += 'Created new local path'
else:
str_msg = 'local path already exists!'
if 'writeInExisting' in d_local.keys():
if not d_local['writeInExisting']:
if b_isDir: b_exists = False
else:
if b_isDir: b_exists = False
d_ret = {
'action': d_msg['action'],
'dir': str_checkedDir,
'status': b_exists,
'isfile': b_isFile,
'isdir': b_isDir,
'msg': str_msg
}
return {'check': d_ret,
'status': d_ret['status'],
'timestamp': '%s' % datetime.datetime.now()} | [
"def",
"path_localLocationCheck",
"(",
"self",
",",
"d_msg",
",",
"*",
"*",
"kwargs",
")",
":",
"b_pull",
"=",
"False",
"d_meta",
"=",
"d_msg",
"[",
"'meta'",
"]",
"if",
"'do'",
"in",
"d_meta",
":",
"if",
"d_meta",
"[",
"'do'",
"]",
"==",
"'pull'",
":",
"b_pull",
"=",
"True",
"if",
"'local'",
"in",
"d_meta",
":",
"d_local",
"=",
"d_meta",
"[",
"'local'",
"]",
"if",
"'to'",
"in",
"d_meta",
":",
"d_local",
"=",
"d_meta",
"[",
"'to'",
"]",
"str_localPathFull",
"=",
"d_local",
"[",
"'path'",
"]",
"str_localPath",
",",
"str_unpack",
"=",
"os",
".",
"path",
".",
"split",
"(",
"str_localPathFull",
")",
"str_msg",
"=",
"''",
"str_checkedDir",
"=",
"str_localPathFull",
"b_isFile",
"=",
"os",
".",
"path",
".",
"isfile",
"(",
"str_localPathFull",
")",
"b_isDir",
"=",
"os",
".",
"path",
".",
"isdir",
"(",
"str_localPathFull",
")",
"b_exists",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"str_localPathFull",
")",
"if",
"'pull'",
"in",
"d_msg",
"[",
"'action'",
"]",
"or",
"b_pull",
":",
"# If we are \"pulling\" data to local, then we assume the local",
"# directory does not exist. If it does, and if 'createDir' is 'true', ",
"# we remove the localPath and re-create it, thus assuring it will ",
"# only contain the info pulled from the remote source.",
"# If 'writeInExisting' is 'true', then execution continues, but",
"# may fail if the pulled target exists in the localPath.",
"str_checkedDir",
"=",
"str_localPath",
"b_isFile",
"=",
"os",
".",
"path",
".",
"isfile",
"(",
"str_localPath",
")",
"b_isDir",
"=",
"os",
".",
"path",
".",
"isdir",
"(",
"str_localPath",
")",
"b_exists",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"str_localPath",
")",
"if",
"'createDir'",
"in",
"d_local",
".",
"keys",
"(",
")",
":",
"if",
"d_local",
"[",
"'createDir'",
"]",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"str_localPathFull",
")",
":",
"self",
".",
"dp",
".",
"qprint",
"(",
"'Removing local path %s...'",
"%",
"str_localPathFull",
")",
"shutil",
".",
"rmtree",
"(",
"str_localPathFull",
")",
"str_msg",
"=",
"'Removed existing local path... '",
"self",
".",
"dp",
".",
"qprint",
"(",
"'Creating empty local path %s...'",
"%",
"str_localPathFull",
")",
"os",
".",
"makedirs",
"(",
"str_localPathFull",
")",
"b_exists",
"=",
"True",
"str_msg",
"+=",
"'Created new local path'",
"else",
":",
"str_msg",
"=",
"'local path already exists!'",
"if",
"'writeInExisting'",
"in",
"d_local",
".",
"keys",
"(",
")",
":",
"if",
"not",
"d_local",
"[",
"'writeInExisting'",
"]",
":",
"if",
"b_isDir",
":",
"b_exists",
"=",
"False",
"else",
":",
"if",
"b_isDir",
":",
"b_exists",
"=",
"False",
"d_ret",
"=",
"{",
"'action'",
":",
"d_msg",
"[",
"'action'",
"]",
",",
"'dir'",
":",
"str_checkedDir",
",",
"'status'",
":",
"b_exists",
",",
"'isfile'",
":",
"b_isFile",
",",
"'isdir'",
":",
"b_isDir",
",",
"'msg'",
":",
"str_msg",
"}",
"return",
"{",
"'check'",
":",
"d_ret",
",",
"'status'",
":",
"d_ret",
"[",
"'status'",
"]",
",",
"'timestamp'",
":",
"'%s'",
"%",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"}"
] | Check if a path exists on the local filesystem
:param self:
:param kwargs:
:return: | [
"Check",
"if",
"a",
"path",
"exists",
"on",
"the",
"local",
"filesystem"
] | 572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958 | https://github.com/FNNDSC/pfurl/blob/572f634ab582b7b7b7a3fbfd5bf12aadc1ba7958/pfurl/pfurl.py#L810-L879 | train |
SkullTech/webdriver-start | wdstart/helper.py | find_executable | def find_executable(name):
"""
Returns the path of an executable file.
Searches for an executable with the given name, first in the `PATH`,
then in the current directory (recursively). Upon finding the file,
returns the full filepath of it.
Parameters
----------
name : str
The name of the executable. This is platform-independent so
you don't have to include any platform-specific file extension
(such as `.exe`).
Returns
-------
str
The path of the executable file. In case of multiple hits, it
only returns the first one.
"""
if sys.platform.startswith('win') or os.name.startswith('os2'):
name = name + '.exe'
executable_path = find_file(name, deep=True)
return executable_path | python | def find_executable(name):
"""
Returns the path of an executable file.
Searches for an executable with the given name, first in the `PATH`,
then in the current directory (recursively). Upon finding the file,
returns the full filepath of it.
Parameters
----------
name : str
The name of the executable. This is platform-independent so
you don't have to include any platform-specific file extension
(such as `.exe`).
Returns
-------
str
The path of the executable file. In case of multiple hits, it
only returns the first one.
"""
if sys.platform.startswith('win') or os.name.startswith('os2'):
name = name + '.exe'
executable_path = find_file(name, deep=True)
return executable_path | [
"def",
"find_executable",
"(",
"name",
")",
":",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'win'",
")",
"or",
"os",
".",
"name",
".",
"startswith",
"(",
"'os2'",
")",
":",
"name",
"=",
"name",
"+",
"'.exe'",
"executable_path",
"=",
"find_file",
"(",
"name",
",",
"deep",
"=",
"True",
")",
"return",
"executable_path"
] | Returns the path of an executable file.
Searches for an executable with the given name, first in the `PATH`,
then in the current directory (recursively). Upon finding the file,
returns the full filepath of it.
Parameters
----------
name : str
The name of the executable. This is platform-independent so
you don't have to include any platform-specific file extension
(such as `.exe`).
Returns
-------
str
The path of the executable file. In case of multiple hits, it
only returns the first one. | [
"Returns",
"the",
"path",
"of",
"an",
"executable",
"file",
"."
] | 26285fd84c4deaf8906828e0ec0758a650b7ba49 | https://github.com/SkullTech/webdriver-start/blob/26285fd84c4deaf8906828e0ec0758a650b7ba49/wdstart/helper.py#L91-L117 | train |
starling-lab/rnlp | rnlp/corpus.py | readCorpus | def readCorpus(location):
"""
Returns the contents of a file or a group of files as a string.
:param location: .txt file or a directory to read files from.
:type location: str.
:returns: A string of all contents joined together.
:rtype: str.
.. note::
This function takes a ``location`` on disk as a parameter. Location is
assumed to be a string representing a text file or a directory. A text
file is further assumed to contain ``.txt`` as a file extension while
a directory may be a path.
Example:
.. code-block:: python
from rnlp.corpus import readCorpus
# If you have a text file:
doi = readCorpus('files/doi.txt')
# If you have multiple files to read from:
corpus = readCorpus('files')
"""
print("Reading corpus from file(s)...")
corpus = ''
if '.txt' in location:
with open(location) as fp:
corpus = fp.read()
else:
dirFiles = listdir(location)
nFiles = len(dirFiles)
for f in tqdm(dirFiles):
with open(location+"/"+f) as fp:
corpus += fp.read()
return corpus | python | def readCorpus(location):
"""
Returns the contents of a file or a group of files as a string.
:param location: .txt file or a directory to read files from.
:type location: str.
:returns: A string of all contents joined together.
:rtype: str.
.. note::
This function takes a ``location`` on disk as a parameter. Location is
assumed to be a string representing a text file or a directory. A text
file is further assumed to contain ``.txt`` as a file extension while
a directory may be a path.
Example:
.. code-block:: python
from rnlp.corpus import readCorpus
# If you have a text file:
doi = readCorpus('files/doi.txt')
# If you have multiple files to read from:
corpus = readCorpus('files')
"""
print("Reading corpus from file(s)...")
corpus = ''
if '.txt' in location:
with open(location) as fp:
corpus = fp.read()
else:
dirFiles = listdir(location)
nFiles = len(dirFiles)
for f in tqdm(dirFiles):
with open(location+"/"+f) as fp:
corpus += fp.read()
return corpus | [
"def",
"readCorpus",
"(",
"location",
")",
":",
"print",
"(",
"\"Reading corpus from file(s)...\"",
")",
"corpus",
"=",
"''",
"if",
"'.txt'",
"in",
"location",
":",
"with",
"open",
"(",
"location",
")",
"as",
"fp",
":",
"corpus",
"=",
"fp",
".",
"read",
"(",
")",
"else",
":",
"dirFiles",
"=",
"listdir",
"(",
"location",
")",
"nFiles",
"=",
"len",
"(",
"dirFiles",
")",
"for",
"f",
"in",
"tqdm",
"(",
"dirFiles",
")",
":",
"with",
"open",
"(",
"location",
"+",
"\"/\"",
"+",
"f",
")",
"as",
"fp",
":",
"corpus",
"+=",
"fp",
".",
"read",
"(",
")",
"return",
"corpus"
] | Returns the contents of a file or a group of files as a string.
:param location: .txt file or a directory to read files from.
:type location: str.
:returns: A string of all contents joined together.
:rtype: str.
.. note::
This function takes a ``location`` on disk as a parameter. Location is
assumed to be a string representing a text file or a directory. A text
file is further assumed to contain ``.txt`` as a file extension while
a directory may be a path.
Example:
.. code-block:: python
from rnlp.corpus import readCorpus
# If you have a text file:
doi = readCorpus('files/doi.txt')
# If you have multiple files to read from:
corpus = readCorpus('files') | [
"Returns",
"the",
"contents",
"of",
"a",
"file",
"or",
"a",
"group",
"of",
"files",
"as",
"a",
"string",
"."
] | 72054cc2c0cbaea1d281bf3d56b271d4da29fc4a | https://github.com/starling-lab/rnlp/blob/72054cc2c0cbaea1d281bf3d56b271d4da29fc4a/rnlp/corpus.py#L33-L78 | train |
bearyinnovative/bearychat.py | bearychat/incoming.py | validate | def validate(data):
"""Validates incoming data
Args:
data(dict): the incoming data
Returns:
True if the data is valid
Raises:
ValueError: the data is not valid
"""
text = data.get('text')
if not isinstance(text, _string_types) or len(text) == 0:
raise ValueError('text field is required and should not be empty')
if 'markdown' in data and not type(data['markdown']) is bool:
raise ValueError('markdown field should be bool')
if 'attachments' in data:
if not isinstance(data['attachments'], (list, tuple)):
raise ValueError('attachments field should be list or tuple')
for attachment in data['attachments']:
if 'text' not in attachment and 'title' not in attachment:
raise ValueError('text or title is required in attachment')
return True | python | def validate(data):
"""Validates incoming data
Args:
data(dict): the incoming data
Returns:
True if the data is valid
Raises:
ValueError: the data is not valid
"""
text = data.get('text')
if not isinstance(text, _string_types) or len(text) == 0:
raise ValueError('text field is required and should not be empty')
if 'markdown' in data and not type(data['markdown']) is bool:
raise ValueError('markdown field should be bool')
if 'attachments' in data:
if not isinstance(data['attachments'], (list, tuple)):
raise ValueError('attachments field should be list or tuple')
for attachment in data['attachments']:
if 'text' not in attachment and 'title' not in attachment:
raise ValueError('text or title is required in attachment')
return True | [
"def",
"validate",
"(",
"data",
")",
":",
"text",
"=",
"data",
".",
"get",
"(",
"'text'",
")",
"if",
"not",
"isinstance",
"(",
"text",
",",
"_string_types",
")",
"or",
"len",
"(",
"text",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'text field is required and should not be empty'",
")",
"if",
"'markdown'",
"in",
"data",
"and",
"not",
"type",
"(",
"data",
"[",
"'markdown'",
"]",
")",
"is",
"bool",
":",
"raise",
"ValueError",
"(",
"'markdown field should be bool'",
")",
"if",
"'attachments'",
"in",
"data",
":",
"if",
"not",
"isinstance",
"(",
"data",
"[",
"'attachments'",
"]",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"raise",
"ValueError",
"(",
"'attachments field should be list or tuple'",
")",
"for",
"attachment",
"in",
"data",
"[",
"'attachments'",
"]",
":",
"if",
"'text'",
"not",
"in",
"attachment",
"and",
"'title'",
"not",
"in",
"attachment",
":",
"raise",
"ValueError",
"(",
"'text or title is required in attachment'",
")",
"return",
"True"
] | Validates incoming data
Args:
data(dict): the incoming data
Returns:
True if the data is valid
Raises:
ValueError: the data is not valid | [
"Validates",
"incoming",
"data"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/bearychat/incoming.py#L13-L40 | train |
bearyinnovative/bearychat.py | bearychat/incoming.py | send | def send(url, data):
"""Sends an incoming message
Args:
url(str): the incoming hook url
data(dict): the sending data
Returns:
requests.Response
"""
validate(data)
return requests.post(url, json=data) | python | def send(url, data):
"""Sends an incoming message
Args:
url(str): the incoming hook url
data(dict): the sending data
Returns:
requests.Response
"""
validate(data)
return requests.post(url, json=data) | [
"def",
"send",
"(",
"url",
",",
"data",
")",
":",
"validate",
"(",
"data",
")",
"return",
"requests",
".",
"post",
"(",
"url",
",",
"json",
"=",
"data",
")"
] | Sends an incoming message
Args:
url(str): the incoming hook url
data(dict): the sending data
Returns:
requests.Response | [
"Sends",
"an",
"incoming",
"message"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/bearychat/incoming.py#L43-L55 | train |
pnegahdar/inenv | inenv/cli.py | switch_or_run | def switch_or_run(cmd, venv_name=None):
"""Switch or run in this env"""
if cmd:
return _run(venv_name, cmd)
inenv = InenvManager()
if not os.getenv(INENV_ENV_VAR):
activator_warn(inenv)
return
else:
venv = inenv.get_prepped_venv(venv_name)
inenv.clear_extra_source_file()
inenv.write_extra_source_file("source {}".format(venv.activate_shell_file))
inenv.write_extra_source_file(override_envars_and_deactivate(inenv.get_envvars(venv_name)))
if autojump_enabled():
directory = inenv.guess_contents_dir(venv_name)
inenv.write_extra_source_file('cd {}'.format(directory))
click.secho("Jumping to {}".format(directory), fg='green')
sys.exit(EVAL_EXIT_CODE) | python | def switch_or_run(cmd, venv_name=None):
"""Switch or run in this env"""
if cmd:
return _run(venv_name, cmd)
inenv = InenvManager()
if not os.getenv(INENV_ENV_VAR):
activator_warn(inenv)
return
else:
venv = inenv.get_prepped_venv(venv_name)
inenv.clear_extra_source_file()
inenv.write_extra_source_file("source {}".format(venv.activate_shell_file))
inenv.write_extra_source_file(override_envars_and_deactivate(inenv.get_envvars(venv_name)))
if autojump_enabled():
directory = inenv.guess_contents_dir(venv_name)
inenv.write_extra_source_file('cd {}'.format(directory))
click.secho("Jumping to {}".format(directory), fg='green')
sys.exit(EVAL_EXIT_CODE) | [
"def",
"switch_or_run",
"(",
"cmd",
",",
"venv_name",
"=",
"None",
")",
":",
"if",
"cmd",
":",
"return",
"_run",
"(",
"venv_name",
",",
"cmd",
")",
"inenv",
"=",
"InenvManager",
"(",
")",
"if",
"not",
"os",
".",
"getenv",
"(",
"INENV_ENV_VAR",
")",
":",
"activator_warn",
"(",
"inenv",
")",
"return",
"else",
":",
"venv",
"=",
"inenv",
".",
"get_prepped_venv",
"(",
"venv_name",
")",
"inenv",
".",
"clear_extra_source_file",
"(",
")",
"inenv",
".",
"write_extra_source_file",
"(",
"\"source {}\"",
".",
"format",
"(",
"venv",
".",
"activate_shell_file",
")",
")",
"inenv",
".",
"write_extra_source_file",
"(",
"override_envars_and_deactivate",
"(",
"inenv",
".",
"get_envvars",
"(",
"venv_name",
")",
")",
")",
"if",
"autojump_enabled",
"(",
")",
":",
"directory",
"=",
"inenv",
".",
"guess_contents_dir",
"(",
"venv_name",
")",
"inenv",
".",
"write_extra_source_file",
"(",
"'cd {}'",
".",
"format",
"(",
"directory",
")",
")",
"click",
".",
"secho",
"(",
"\"Jumping to {}\"",
".",
"format",
"(",
"directory",
")",
",",
"fg",
"=",
"'green'",
")",
"sys",
".",
"exit",
"(",
"EVAL_EXIT_CODE",
")"
] | Switch or run in this env | [
"Switch",
"or",
"run",
"in",
"this",
"env"
] | 8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6 | https://github.com/pnegahdar/inenv/blob/8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6/inenv/cli.py#L91-L108 | train |
pnegahdar/inenv | inenv/cli.py | rm | def rm(venv_name):
""" Removes the venv by name """
inenv = InenvManager()
venv = inenv.get_venv(venv_name)
click.confirm("Delete dir {}".format(venv.path))
shutil.rmtree(venv.path) | python | def rm(venv_name):
""" Removes the venv by name """
inenv = InenvManager()
venv = inenv.get_venv(venv_name)
click.confirm("Delete dir {}".format(venv.path))
shutil.rmtree(venv.path) | [
"def",
"rm",
"(",
"venv_name",
")",
":",
"inenv",
"=",
"InenvManager",
"(",
")",
"venv",
"=",
"inenv",
".",
"get_venv",
"(",
"venv_name",
")",
"click",
".",
"confirm",
"(",
"\"Delete dir {}\"",
".",
"format",
"(",
"venv",
".",
"path",
")",
")",
"shutil",
".",
"rmtree",
"(",
"venv",
".",
"path",
")"
] | Removes the venv by name | [
"Removes",
"the",
"venv",
"by",
"name"
] | 8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6 | https://github.com/pnegahdar/inenv/blob/8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6/inenv/cli.py#L113-L118 | train |
pnegahdar/inenv | inenv/cli.py | root | def root(venv_name):
"""Print the root directory of a virtualenv"""
inenv = InenvManager()
inenv.get_venv(venv_name)
venv = inenv.registered_venvs[venv_name]
click.secho(venv['root']) | python | def root(venv_name):
"""Print the root directory of a virtualenv"""
inenv = InenvManager()
inenv.get_venv(venv_name)
venv = inenv.registered_venvs[venv_name]
click.secho(venv['root']) | [
"def",
"root",
"(",
"venv_name",
")",
":",
"inenv",
"=",
"InenvManager",
"(",
")",
"inenv",
".",
"get_venv",
"(",
"venv_name",
")",
"venv",
"=",
"inenv",
".",
"registered_venvs",
"[",
"venv_name",
"]",
"click",
".",
"secho",
"(",
"venv",
"[",
"'root'",
"]",
")"
] | Print the root directory of a virtualenv | [
"Print",
"the",
"root",
"directory",
"of",
"a",
"virtualenv"
] | 8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6 | https://github.com/pnegahdar/inenv/blob/8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6/inenv/cli.py#L123-L128 | train |
pnegahdar/inenv | inenv/cli.py | init | def init(venv_name):
"""Initializez a virtualenv"""
inenv = InenvManager()
inenv.get_prepped_venv(venv_name, skip_cached=False)
if not os.getenv(INENV_ENV_VAR):
activator_warn(inenv)
click.secho("Your venv is ready. Enjoy!", fg='green') | python | def init(venv_name):
"""Initializez a virtualenv"""
inenv = InenvManager()
inenv.get_prepped_venv(venv_name, skip_cached=False)
if not os.getenv(INENV_ENV_VAR):
activator_warn(inenv)
click.secho("Your venv is ready. Enjoy!", fg='green') | [
"def",
"init",
"(",
"venv_name",
")",
":",
"inenv",
"=",
"InenvManager",
"(",
")",
"inenv",
".",
"get_prepped_venv",
"(",
"venv_name",
",",
"skip_cached",
"=",
"False",
")",
"if",
"not",
"os",
".",
"getenv",
"(",
"INENV_ENV_VAR",
")",
":",
"activator_warn",
"(",
"inenv",
")",
"click",
".",
"secho",
"(",
"\"Your venv is ready. Enjoy!\"",
",",
"fg",
"=",
"'green'",
")"
] | Initializez a virtualenv | [
"Initializez",
"a",
"virtualenv"
] | 8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6 | https://github.com/pnegahdar/inenv/blob/8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6/inenv/cli.py#L133-L139 | train |
pnegahdar/inenv | inenv/cli.py | autojump | def autojump():
"""Initializes a virtualenv"""
currently_enabled = autojump_enabled()
toggle_autojump()
if not currently_enabled:
click.secho("Autojump enabled", fg='green')
else:
click.secho("Autojump disabled", fg='red') | python | def autojump():
"""Initializes a virtualenv"""
currently_enabled = autojump_enabled()
toggle_autojump()
if not currently_enabled:
click.secho("Autojump enabled", fg='green')
else:
click.secho("Autojump disabled", fg='red') | [
"def",
"autojump",
"(",
")",
":",
"currently_enabled",
"=",
"autojump_enabled",
"(",
")",
"toggle_autojump",
"(",
")",
"if",
"not",
"currently_enabled",
":",
"click",
".",
"secho",
"(",
"\"Autojump enabled\"",
",",
"fg",
"=",
"'green'",
")",
"else",
":",
"click",
".",
"secho",
"(",
"\"Autojump disabled\"",
",",
"fg",
"=",
"'red'",
")"
] | Initializes a virtualenv | [
"Initializes",
"a",
"virtualenv"
] | 8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6 | https://github.com/pnegahdar/inenv/blob/8f484e520892bf9eb59f91b4b5c92df9fbd9a4e6/inenv/cli.py#L143-L150 | train |
bitesofcode/projexui | projexui/widgets/xchartwidget/xchartruler.py | XChartRuler.clear | def clear( self ):
"""
Clears all the cached information about this ruler.
"""
self._minimum = None
self._maximum = None
self._step = None
self._notches = None
self._format = None
self._formatter = None
self._padEnd = 0
self._padStart = 0 | python | def clear( self ):
"""
Clears all the cached information about this ruler.
"""
self._minimum = None
self._maximum = None
self._step = None
self._notches = None
self._format = None
self._formatter = None
self._padEnd = 0
self._padStart = 0 | [
"def",
"clear",
"(",
"self",
")",
":",
"self",
".",
"_minimum",
"=",
"None",
"self",
".",
"_maximum",
"=",
"None",
"self",
".",
"_step",
"=",
"None",
"self",
".",
"_notches",
"=",
"None",
"self",
".",
"_format",
"=",
"None",
"self",
".",
"_formatter",
"=",
"None",
"self",
".",
"_padEnd",
"=",
"0",
"self",
".",
"_padStart",
"=",
"0"
] | Clears all the cached information about this ruler. | [
"Clears",
"all",
"the",
"cached",
"information",
"about",
"this",
"ruler",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xchartwidget/xchartruler.py#L91-L102 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbquickfilterwidget.py | XOrbQuickFilterWidget.keyPressEvent | def keyPressEvent(self, event):
"""
Listens for the enter event to check if the query is setup.
"""
if event.key() in (Qt.Key_Enter, Qt.Key_Return):
self.queryEntered.emit(self.query())
super(XOrbQuickFilterWidget, self).keyPressEvent(event) | python | def keyPressEvent(self, event):
"""
Listens for the enter event to check if the query is setup.
"""
if event.key() in (Qt.Key_Enter, Qt.Key_Return):
self.queryEntered.emit(self.query())
super(XOrbQuickFilterWidget, self).keyPressEvent(event) | [
"def",
"keyPressEvent",
"(",
"self",
",",
"event",
")",
":",
"if",
"event",
".",
"key",
"(",
")",
"in",
"(",
"Qt",
".",
"Key_Enter",
",",
"Qt",
".",
"Key_Return",
")",
":",
"self",
".",
"queryEntered",
".",
"emit",
"(",
"self",
".",
"query",
"(",
")",
")",
"super",
"(",
"XOrbQuickFilterWidget",
",",
"self",
")",
".",
"keyPressEvent",
"(",
"event",
")"
] | Listens for the enter event to check if the query is setup. | [
"Listens",
"for",
"the",
"enter",
"event",
"to",
"check",
"if",
"the",
"query",
"is",
"setup",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbquickfilterwidget.py#L96-L103 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbquickfilterwidget.py | XOrbQuickFilterWidget.rebuild | def rebuild(self):
"""
Rebuilds the data associated with this filter widget.
"""
table = self.tableType()
form = nativestring(self.filterFormat())
if not table and form:
if self.layout().count() == 0:
self.layout().addWidget(QLabel(form, self))
else:
self.layout().itemAt(0).widget().setText(form)
return
elif not form:
return
for child in self.findChildren(QWidget):
child.close()
child.setParent(None)
child.deleteLater()
self.setUpdatesEnabled(False)
schema = table.schema()
vlayout = self.layout()
for i in range(vlayout.count()):
vlayout.takeAt(0)
self._plugins = []
for line in form.split('\n'):
row = QHBoxLayout()
row.setContentsMargins(0, 0, 0, 0)
row.setSpacing(0)
for label, lookup in FORMAT_SPLITTER.findall(line):
# create the label
lbl = QLabel(label, self)
row.addWidget(lbl)
# create the query plugin
opts = lookup.split(':')
if len(opts) == 1:
opts.append('is')
column = schema.column(opts[0])
if not column:
continue
plugin = self.pluginFactory().plugin(column)
if not plugin:
continue
editor = plugin.createEditor(self, column, opts[1], None)
if editor:
editor.setObjectName(opts[0])
row.addWidget(editor)
self._plugins.append((opts[0], opts[1], plugin, editor))
row.addStretch(1)
vlayout.addLayout(row)
self.setUpdatesEnabled(True)
self.adjustSize() | python | def rebuild(self):
"""
Rebuilds the data associated with this filter widget.
"""
table = self.tableType()
form = nativestring(self.filterFormat())
if not table and form:
if self.layout().count() == 0:
self.layout().addWidget(QLabel(form, self))
else:
self.layout().itemAt(0).widget().setText(form)
return
elif not form:
return
for child in self.findChildren(QWidget):
child.close()
child.setParent(None)
child.deleteLater()
self.setUpdatesEnabled(False)
schema = table.schema()
vlayout = self.layout()
for i in range(vlayout.count()):
vlayout.takeAt(0)
self._plugins = []
for line in form.split('\n'):
row = QHBoxLayout()
row.setContentsMargins(0, 0, 0, 0)
row.setSpacing(0)
for label, lookup in FORMAT_SPLITTER.findall(line):
# create the label
lbl = QLabel(label, self)
row.addWidget(lbl)
# create the query plugin
opts = lookup.split(':')
if len(opts) == 1:
opts.append('is')
column = schema.column(opts[0])
if not column:
continue
plugin = self.pluginFactory().plugin(column)
if not plugin:
continue
editor = plugin.createEditor(self, column, opts[1], None)
if editor:
editor.setObjectName(opts[0])
row.addWidget(editor)
self._plugins.append((opts[0], opts[1], plugin, editor))
row.addStretch(1)
vlayout.addLayout(row)
self.setUpdatesEnabled(True)
self.adjustSize() | [
"def",
"rebuild",
"(",
"self",
")",
":",
"table",
"=",
"self",
".",
"tableType",
"(",
")",
"form",
"=",
"nativestring",
"(",
"self",
".",
"filterFormat",
"(",
")",
")",
"if",
"not",
"table",
"and",
"form",
":",
"if",
"self",
".",
"layout",
"(",
")",
".",
"count",
"(",
")",
"==",
"0",
":",
"self",
".",
"layout",
"(",
")",
".",
"addWidget",
"(",
"QLabel",
"(",
"form",
",",
"self",
")",
")",
"else",
":",
"self",
".",
"layout",
"(",
")",
".",
"itemAt",
"(",
"0",
")",
".",
"widget",
"(",
")",
".",
"setText",
"(",
"form",
")",
"return",
"elif",
"not",
"form",
":",
"return",
"for",
"child",
"in",
"self",
".",
"findChildren",
"(",
"QWidget",
")",
":",
"child",
".",
"close",
"(",
")",
"child",
".",
"setParent",
"(",
"None",
")",
"child",
".",
"deleteLater",
"(",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"False",
")",
"schema",
"=",
"table",
".",
"schema",
"(",
")",
"vlayout",
"=",
"self",
".",
"layout",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"vlayout",
".",
"count",
"(",
")",
")",
":",
"vlayout",
".",
"takeAt",
"(",
"0",
")",
"self",
".",
"_plugins",
"=",
"[",
"]",
"for",
"line",
"in",
"form",
".",
"split",
"(",
"'\\n'",
")",
":",
"row",
"=",
"QHBoxLayout",
"(",
")",
"row",
".",
"setContentsMargins",
"(",
"0",
",",
"0",
",",
"0",
",",
"0",
")",
"row",
".",
"setSpacing",
"(",
"0",
")",
"for",
"label",
",",
"lookup",
"in",
"FORMAT_SPLITTER",
".",
"findall",
"(",
"line",
")",
":",
"# create the label\r",
"lbl",
"=",
"QLabel",
"(",
"label",
",",
"self",
")",
"row",
".",
"addWidget",
"(",
"lbl",
")",
"# create the query plugin\r",
"opts",
"=",
"lookup",
".",
"split",
"(",
"':'",
")",
"if",
"len",
"(",
"opts",
")",
"==",
"1",
":",
"opts",
".",
"append",
"(",
"'is'",
")",
"column",
"=",
"schema",
".",
"column",
"(",
"opts",
"[",
"0",
"]",
")",
"if",
"not",
"column",
":",
"continue",
"plugin",
"=",
"self",
".",
"pluginFactory",
"(",
")",
".",
"plugin",
"(",
"column",
")",
"if",
"not",
"plugin",
":",
"continue",
"editor",
"=",
"plugin",
".",
"createEditor",
"(",
"self",
",",
"column",
",",
"opts",
"[",
"1",
"]",
",",
"None",
")",
"if",
"editor",
":",
"editor",
".",
"setObjectName",
"(",
"opts",
"[",
"0",
"]",
")",
"row",
".",
"addWidget",
"(",
"editor",
")",
"self",
".",
"_plugins",
".",
"append",
"(",
"(",
"opts",
"[",
"0",
"]",
",",
"opts",
"[",
"1",
"]",
",",
"plugin",
",",
"editor",
")",
")",
"row",
".",
"addStretch",
"(",
"1",
")",
"vlayout",
".",
"addLayout",
"(",
"row",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"True",
")",
"self",
".",
"adjustSize",
"(",
")"
] | Rebuilds the data associated with this filter widget. | [
"Rebuilds",
"the",
"data",
"associated",
"with",
"this",
"filter",
"widget",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbquickfilterwidget.py#L105-L168 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/xorbquickfilterwidget.py | XOrbQuickFilterWidget.showMenu | def showMenu(self, point):
"""
Displays the menu for this filter widget.
"""
menu = QMenu(self)
acts = {}
acts['edit'] = menu.addAction('Edit quick filter...')
trigger = menu.exec_(self.mapToGlobal(point))
if trigger == acts['edit']:
text, accepted = XTextEdit.getText(self.window(),
'Edit Format',
'Format:',
self.filterFormat(),
wrapped=False)
if accepted:
self.setFilterFormat(text) | python | def showMenu(self, point):
"""
Displays the menu for this filter widget.
"""
menu = QMenu(self)
acts = {}
acts['edit'] = menu.addAction('Edit quick filter...')
trigger = menu.exec_(self.mapToGlobal(point))
if trigger == acts['edit']:
text, accepted = XTextEdit.getText(self.window(),
'Edit Format',
'Format:',
self.filterFormat(),
wrapped=False)
if accepted:
self.setFilterFormat(text) | [
"def",
"showMenu",
"(",
"self",
",",
"point",
")",
":",
"menu",
"=",
"QMenu",
"(",
"self",
")",
"acts",
"=",
"{",
"}",
"acts",
"[",
"'edit'",
"]",
"=",
"menu",
".",
"addAction",
"(",
"'Edit quick filter...'",
")",
"trigger",
"=",
"menu",
".",
"exec_",
"(",
"self",
".",
"mapToGlobal",
"(",
"point",
")",
")",
"if",
"trigger",
"==",
"acts",
"[",
"'edit'",
"]",
":",
"text",
",",
"accepted",
"=",
"XTextEdit",
".",
"getText",
"(",
"self",
".",
"window",
"(",
")",
",",
"'Edit Format'",
",",
"'Format:'",
",",
"self",
".",
"filterFormat",
"(",
")",
",",
"wrapped",
"=",
"False",
")",
"if",
"accepted",
":",
"self",
".",
"setFilterFormat",
"(",
"text",
")"
] | Displays the menu for this filter widget. | [
"Displays",
"the",
"menu",
"for",
"this",
"filter",
"widget",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/xorbquickfilterwidget.py#L170-L188 | train |
tueda/python-form | form/ioutil.py | set_nonblock | def set_nonblock(fd):
# type: (int) -> None
"""Set the given file descriptor to non-blocking mode."""
fcntl.fcntl(fd,
fcntl.F_SETFL,
fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) | python | def set_nonblock(fd):
# type: (int) -> None
"""Set the given file descriptor to non-blocking mode."""
fcntl.fcntl(fd,
fcntl.F_SETFL,
fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) | [
"def",
"set_nonblock",
"(",
"fd",
")",
":",
"# type: (int) -> None",
"fcntl",
".",
"fcntl",
"(",
"fd",
",",
"fcntl",
".",
"F_SETFL",
",",
"fcntl",
".",
"fcntl",
"(",
"fd",
",",
"fcntl",
".",
"F_GETFL",
")",
"|",
"os",
".",
"O_NONBLOCK",
")"
] | Set the given file descriptor to non-blocking mode. | [
"Set",
"the",
"given",
"file",
"descriptor",
"to",
"non",
"-",
"blocking",
"mode",
"."
] | 1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b | https://github.com/tueda/python-form/blob/1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b/form/ioutil.py#L10-L15 | train |
tueda/python-form | form/ioutil.py | PushbackReader.read | def read(self):
# type: () -> str
"""Read data from the stream."""
s = self._buf + self._raw.read()
self._buf = ''
return s | python | def read(self):
# type: () -> str
"""Read data from the stream."""
s = self._buf + self._raw.read()
self._buf = ''
return s | [
"def",
"read",
"(",
"self",
")",
":",
"# type: () -> str",
"s",
"=",
"self",
".",
"_buf",
"+",
"self",
".",
"_raw",
".",
"read",
"(",
")",
"self",
".",
"_buf",
"=",
"''",
"return",
"s"
] | Read data from the stream. | [
"Read",
"data",
"from",
"the",
"stream",
"."
] | 1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b | https://github.com/tueda/python-form/blob/1e5a8464f7a7a6cbbb32411fc2ea3615fd48334b/form/ioutil.py#L37-L42 | train |
bearyinnovative/bearychat.py | examples/rtm_loop.py | RTMLoop.on_open | def on_open(self, ws):
"""Websocket on_open event handler"""
def keep_alive(interval):
while True:
time.sleep(interval)
self.ping()
start_new_thread(keep_alive, (self.keep_alive_interval, )) | python | def on_open(self, ws):
"""Websocket on_open event handler"""
def keep_alive(interval):
while True:
time.sleep(interval)
self.ping()
start_new_thread(keep_alive, (self.keep_alive_interval, )) | [
"def",
"on_open",
"(",
"self",
",",
"ws",
")",
":",
"def",
"keep_alive",
"(",
"interval",
")",
":",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"interval",
")",
"self",
".",
"ping",
"(",
")",
"start_new_thread",
"(",
"keep_alive",
",",
"(",
"self",
".",
"keep_alive_interval",
",",
")",
")"
] | Websocket on_open event handler | [
"Websocket",
"on_open",
"event",
"handler"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/examples/rtm_loop.py#L50-L57 | train |
bearyinnovative/bearychat.py | examples/rtm_loop.py | RTMLoop.on_message | def on_message(self, ws, message):
"""Websocket on_message event handler
Saves message as RTMMessage in self._inbox
"""
try:
data = json.loads(message)
except Exception:
self._set_error(message, "decode message failed")
else:
self._inbox.put(RTMMessage(data)) | python | def on_message(self, ws, message):
"""Websocket on_message event handler
Saves message as RTMMessage in self._inbox
"""
try:
data = json.loads(message)
except Exception:
self._set_error(message, "decode message failed")
else:
self._inbox.put(RTMMessage(data)) | [
"def",
"on_message",
"(",
"self",
",",
"ws",
",",
"message",
")",
":",
"try",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"message",
")",
"except",
"Exception",
":",
"self",
".",
"_set_error",
"(",
"message",
",",
"\"decode message failed\"",
")",
"else",
":",
"self",
".",
"_inbox",
".",
"put",
"(",
"RTMMessage",
"(",
"data",
")",
")"
] | Websocket on_message event handler
Saves message as RTMMessage in self._inbox | [
"Websocket",
"on_message",
"event",
"handler"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/examples/rtm_loop.py#L59-L69 | train |
bearyinnovative/bearychat.py | examples/rtm_loop.py | RTMLoop.send | def send(self, message):
"""Sends a RTMMessage
Should be called after starting the loop
Args:
message(RTMMessage): the sending message
Raises:
WebSocketConnectionClosedException: if the loop is closed
"""
if "call_id" not in message:
message["call_id"] = self.gen_call_id()
self._ws.send(message.to_json()) | python | def send(self, message):
"""Sends a RTMMessage
Should be called after starting the loop
Args:
message(RTMMessage): the sending message
Raises:
WebSocketConnectionClosedException: if the loop is closed
"""
if "call_id" not in message:
message["call_id"] = self.gen_call_id()
self._ws.send(message.to_json()) | [
"def",
"send",
"(",
"self",
",",
"message",
")",
":",
"if",
"\"call_id\"",
"not",
"in",
"message",
":",
"message",
"[",
"\"call_id\"",
"]",
"=",
"self",
".",
"gen_call_id",
"(",
")",
"self",
".",
"_ws",
".",
"send",
"(",
"message",
".",
"to_json",
"(",
")",
")"
] | Sends a RTMMessage
Should be called after starting the loop
Args:
message(RTMMessage): the sending message
Raises:
WebSocketConnectionClosedException: if the loop is closed | [
"Sends",
"a",
"RTMMessage",
"Should",
"be",
"called",
"after",
"starting",
"the",
"loop"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/examples/rtm_loop.py#L120-L133 | train |
bearyinnovative/bearychat.py | examples/rtm_loop.py | RTMLoop.get_message | def get_message(self, block=False, timeout=None):
"""Removes and returns a RTMMessage from self._inbox
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
RTMMessage if self._inbox is not empty, else None
"""
try:
message = self._inbox.get(block=block, timeout=timeout)
return message
except Exception:
return None | python | def get_message(self, block=False, timeout=None):
"""Removes and returns a RTMMessage from self._inbox
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
RTMMessage if self._inbox is not empty, else None
"""
try:
message = self._inbox.get(block=block, timeout=timeout)
return message
except Exception:
return None | [
"def",
"get_message",
"(",
"self",
",",
"block",
"=",
"False",
",",
"timeout",
"=",
"None",
")",
":",
"try",
":",
"message",
"=",
"self",
".",
"_inbox",
".",
"get",
"(",
"block",
"=",
"block",
",",
"timeout",
"=",
"timeout",
")",
"return",
"message",
"except",
"Exception",
":",
"return",
"None"
] | Removes and returns a RTMMessage from self._inbox
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
RTMMessage if self._inbox is not empty, else None | [
"Removes",
"and",
"returns",
"a",
"RTMMessage",
"from",
"self",
".",
"_inbox"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/examples/rtm_loop.py#L135-L150 | train |
bearyinnovative/bearychat.py | examples/rtm_loop.py | RTMLoop.get_error | def get_error(self, block=False, timeout=None):
"""Removes and returns an error from self._errors
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
error if inbox is not empty, else None
"""
try:
error = self._errors.get(block=block, timeout=timeout)
return error
except Exception:
return None | python | def get_error(self, block=False, timeout=None):
"""Removes and returns an error from self._errors
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
error if inbox is not empty, else None
"""
try:
error = self._errors.get(block=block, timeout=timeout)
return error
except Exception:
return None | [
"def",
"get_error",
"(",
"self",
",",
"block",
"=",
"False",
",",
"timeout",
"=",
"None",
")",
":",
"try",
":",
"error",
"=",
"self",
".",
"_errors",
".",
"get",
"(",
"block",
"=",
"block",
",",
"timeout",
"=",
"timeout",
")",
"return",
"error",
"except",
"Exception",
":",
"return",
"None"
] | Removes and returns an error from self._errors
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
error if inbox is not empty, else None | [
"Removes",
"and",
"returns",
"an",
"error",
"from",
"self",
".",
"_errors"
] | 6c7af2d215c2ff7135bb5af66ca333d0ea1089fd | https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/examples/rtm_loop.py#L152-L167 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/plugins.py | EnumPlugin.createEditor | def createEditor(self, parent, column, operator, value):
"""
Creates a new editor for the system.
"""
editor = super(EnumPlugin, self).createEditor(parent,
column,
operator,
value)
editor.setEnum(column.enum())
if operator in ('contains', 'does not contain'):
editor.setCheckable(True)
editor.setCurrentValue(value)
return editor | python | def createEditor(self, parent, column, operator, value):
"""
Creates a new editor for the system.
"""
editor = super(EnumPlugin, self).createEditor(parent,
column,
operator,
value)
editor.setEnum(column.enum())
if operator in ('contains', 'does not contain'):
editor.setCheckable(True)
editor.setCurrentValue(value)
return editor | [
"def",
"createEditor",
"(",
"self",
",",
"parent",
",",
"column",
",",
"operator",
",",
"value",
")",
":",
"editor",
"=",
"super",
"(",
"EnumPlugin",
",",
"self",
")",
".",
"createEditor",
"(",
"parent",
",",
"column",
",",
"operator",
",",
"value",
")",
"editor",
".",
"setEnum",
"(",
"column",
".",
"enum",
"(",
")",
")",
"if",
"operator",
"in",
"(",
"'contains'",
",",
"'does not contain'",
")",
":",
"editor",
".",
"setCheckable",
"(",
"True",
")",
"editor",
".",
"setCurrentValue",
"(",
"value",
")",
"return",
"editor"
] | Creates a new editor for the system. | [
"Creates",
"a",
"new",
"editor",
"for",
"the",
"system",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/plugins.py#L127-L141 | train |
bitesofcode/projexui | projexui/widgets/xorbquerywidget/plugins.py | ForeignKeyPlugin.setupQuery | def setupQuery(self, query, op, editor):
"""
Sets up the query for this editor.
"""
if editor is not None:
value = editor.currentRecord()
if value is None:
return False
return super(ForeignKeyPlugin, self).setupQuery(query, op, editor) | python | def setupQuery(self, query, op, editor):
"""
Sets up the query for this editor.
"""
if editor is not None:
value = editor.currentRecord()
if value is None:
return False
return super(ForeignKeyPlugin, self).setupQuery(query, op, editor) | [
"def",
"setupQuery",
"(",
"self",
",",
"query",
",",
"op",
",",
"editor",
")",
":",
"if",
"editor",
"is",
"not",
"None",
":",
"value",
"=",
"editor",
".",
"currentRecord",
"(",
")",
"if",
"value",
"is",
"None",
":",
"return",
"False",
"return",
"super",
"(",
"ForeignKeyPlugin",
",",
"self",
")",
".",
"setupQuery",
"(",
"query",
",",
"op",
",",
"editor",
")"
] | Sets up the query for this editor. | [
"Sets",
"up",
"the",
"query",
"for",
"this",
"editor",
"."
] | f18a73bec84df90b034ca69b9deea118dbedfc4d | https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbquerywidget/plugins.py#L189-L198 | train |
hover2pi/svo_filters | svo_filters/svo.py | color_gen | def color_gen(colormap='viridis', key=None, n=15):
"""Color generator for Bokeh plots
Parameters
----------
colormap: str, sequence
The name of the color map
Returns
-------
generator
A generator for the color palette
"""
if colormap in dir(bpal):
palette = getattr(bpal, colormap)
if isinstance(palette, dict):
if key is None:
key = list(palette.keys())[0]
palette = palette[key]
elif callable(palette):
palette = palette(n)
else:
raise TypeError("pallette must be a bokeh palette name or a sequence of color hex values.")
elif isinstance(colormap, (list, tuple)):
palette = colormap
else:
raise TypeError("pallette must be a bokeh palette name or a sequence of color hex values.")
yield from itertools.cycle(palette) | python | def color_gen(colormap='viridis', key=None, n=15):
"""Color generator for Bokeh plots
Parameters
----------
colormap: str, sequence
The name of the color map
Returns
-------
generator
A generator for the color palette
"""
if colormap in dir(bpal):
palette = getattr(bpal, colormap)
if isinstance(palette, dict):
if key is None:
key = list(palette.keys())[0]
palette = palette[key]
elif callable(palette):
palette = palette(n)
else:
raise TypeError("pallette must be a bokeh palette name or a sequence of color hex values.")
elif isinstance(colormap, (list, tuple)):
palette = colormap
else:
raise TypeError("pallette must be a bokeh palette name or a sequence of color hex values.")
yield from itertools.cycle(palette) | [
"def",
"color_gen",
"(",
"colormap",
"=",
"'viridis'",
",",
"key",
"=",
"None",
",",
"n",
"=",
"15",
")",
":",
"if",
"colormap",
"in",
"dir",
"(",
"bpal",
")",
":",
"palette",
"=",
"getattr",
"(",
"bpal",
",",
"colormap",
")",
"if",
"isinstance",
"(",
"palette",
",",
"dict",
")",
":",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"list",
"(",
"palette",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"palette",
"=",
"palette",
"[",
"key",
"]",
"elif",
"callable",
"(",
"palette",
")",
":",
"palette",
"=",
"palette",
"(",
"n",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"pallette must be a bokeh palette name or a sequence of color hex values.\"",
")",
"elif",
"isinstance",
"(",
"colormap",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"palette",
"=",
"colormap",
"else",
":",
"raise",
"TypeError",
"(",
"\"pallette must be a bokeh palette name or a sequence of color hex values.\"",
")",
"yield",
"from",
"itertools",
".",
"cycle",
"(",
"palette",
")"
] | Color generator for Bokeh plots
Parameters
----------
colormap: str, sequence
The name of the color map
Returns
-------
generator
A generator for the color palette | [
"Color",
"generator",
"for",
"Bokeh",
"plots"
] | f0587c4908baf636d4bdf030fa95029e8f31b975 | https://github.com/hover2pi/svo_filters/blob/f0587c4908baf636d4bdf030fa95029e8f31b975/svo_filters/svo.py#L763-L796 | train |
hover2pi/svo_filters | svo_filters/svo.py | filters | def filters(filter_directory=None, update=False, fmt='table', **kwargs):
"""
Get a list of the available filters
Parameters
----------
filter_directory: str
The directory containing the filter relative spectral response curves
update: bool
Check the filter directory for new filters and generate pickle of table
fmt: str
The format for the returned table
Returns
-------
list
The list of band names
"""
if filter_directory is None:
filter_directory = resource_filename('svo_filters', 'data/filters/')
# Get the pickle path and make sure file exists
p_path = os.path.join(filter_directory, 'filter_list.p')
updated = False
if not os.path.isfile(p_path):
os.system('touch {}'.format(p_path))
if update:
print('Loading filters into table...')
# Get all the filters (except the pickle)
files = glob(filter_directory+'*')
files = [f for f in files if not f.endswith('.p')]
bands = [os.path.basename(b) for b in files]
tables = []
for band in bands:
# Load the filter
band = band.replace('.txt', '')
filt = Filter(band, **kwargs)
filt.Band = band
# Put metadata into table with correct dtypes
info = filt.info(True)
vals = [float(i) if i.replace('.', '').replace('-', '')
.replace('+', '').isnumeric() else i
for i in info['Values']]
dtypes = np.array([type(i) for i in vals])
table = at.Table(np.array([vals]), names=info['Attributes'],
dtype=dtypes)
tables.append(table)
del filt, info, table
# Write to the pickle
with open(p_path, 'wb') as file:
pickle.dump(at.vstack(tables), file)
# Load the saved pickle
data = {}
if os.path.isfile(p_path):
with open(p_path, 'rb') as file:
data = pickle.load(file)
# Return the data
if data:
if fmt == 'dict':
data = {r[0]: {k: r[k].value if hasattr(r[k], 'unit') else r[k]
for k in data.keys()[1:]} for r in data}
else:
# Add Band as index
data.add_index('Band')
return data
# Or try to generate it once
else:
if not updated:
updated = True
filters(update=True)
else:
print('No filters found in', filter_directory) | python | def filters(filter_directory=None, update=False, fmt='table', **kwargs):
"""
Get a list of the available filters
Parameters
----------
filter_directory: str
The directory containing the filter relative spectral response curves
update: bool
Check the filter directory for new filters and generate pickle of table
fmt: str
The format for the returned table
Returns
-------
list
The list of band names
"""
if filter_directory is None:
filter_directory = resource_filename('svo_filters', 'data/filters/')
# Get the pickle path and make sure file exists
p_path = os.path.join(filter_directory, 'filter_list.p')
updated = False
if not os.path.isfile(p_path):
os.system('touch {}'.format(p_path))
if update:
print('Loading filters into table...')
# Get all the filters (except the pickle)
files = glob(filter_directory+'*')
files = [f for f in files if not f.endswith('.p')]
bands = [os.path.basename(b) for b in files]
tables = []
for band in bands:
# Load the filter
band = band.replace('.txt', '')
filt = Filter(band, **kwargs)
filt.Band = band
# Put metadata into table with correct dtypes
info = filt.info(True)
vals = [float(i) if i.replace('.', '').replace('-', '')
.replace('+', '').isnumeric() else i
for i in info['Values']]
dtypes = np.array([type(i) for i in vals])
table = at.Table(np.array([vals]), names=info['Attributes'],
dtype=dtypes)
tables.append(table)
del filt, info, table
# Write to the pickle
with open(p_path, 'wb') as file:
pickle.dump(at.vstack(tables), file)
# Load the saved pickle
data = {}
if os.path.isfile(p_path):
with open(p_path, 'rb') as file:
data = pickle.load(file)
# Return the data
if data:
if fmt == 'dict':
data = {r[0]: {k: r[k].value if hasattr(r[k], 'unit') else r[k]
for k in data.keys()[1:]} for r in data}
else:
# Add Band as index
data.add_index('Band')
return data
# Or try to generate it once
else:
if not updated:
updated = True
filters(update=True)
else:
print('No filters found in', filter_directory) | [
"def",
"filters",
"(",
"filter_directory",
"=",
"None",
",",
"update",
"=",
"False",
",",
"fmt",
"=",
"'table'",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"filter_directory",
"is",
"None",
":",
"filter_directory",
"=",
"resource_filename",
"(",
"'svo_filters'",
",",
"'data/filters/'",
")",
"# Get the pickle path and make sure file exists",
"p_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"filter_directory",
",",
"'filter_list.p'",
")",
"updated",
"=",
"False",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"p_path",
")",
":",
"os",
".",
"system",
"(",
"'touch {}'",
".",
"format",
"(",
"p_path",
")",
")",
"if",
"update",
":",
"print",
"(",
"'Loading filters into table...'",
")",
"# Get all the filters (except the pickle)",
"files",
"=",
"glob",
"(",
"filter_directory",
"+",
"'*'",
")",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"not",
"f",
".",
"endswith",
"(",
"'.p'",
")",
"]",
"bands",
"=",
"[",
"os",
".",
"path",
".",
"basename",
"(",
"b",
")",
"for",
"b",
"in",
"files",
"]",
"tables",
"=",
"[",
"]",
"for",
"band",
"in",
"bands",
":",
"# Load the filter",
"band",
"=",
"band",
".",
"replace",
"(",
"'.txt'",
",",
"''",
")",
"filt",
"=",
"Filter",
"(",
"band",
",",
"*",
"*",
"kwargs",
")",
"filt",
".",
"Band",
"=",
"band",
"# Put metadata into table with correct dtypes",
"info",
"=",
"filt",
".",
"info",
"(",
"True",
")",
"vals",
"=",
"[",
"float",
"(",
"i",
")",
"if",
"i",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
".",
"replace",
"(",
"'-'",
",",
"''",
")",
".",
"replace",
"(",
"'+'",
",",
"''",
")",
".",
"isnumeric",
"(",
")",
"else",
"i",
"for",
"i",
"in",
"info",
"[",
"'Values'",
"]",
"]",
"dtypes",
"=",
"np",
".",
"array",
"(",
"[",
"type",
"(",
"i",
")",
"for",
"i",
"in",
"vals",
"]",
")",
"table",
"=",
"at",
".",
"Table",
"(",
"np",
".",
"array",
"(",
"[",
"vals",
"]",
")",
",",
"names",
"=",
"info",
"[",
"'Attributes'",
"]",
",",
"dtype",
"=",
"dtypes",
")",
"tables",
".",
"append",
"(",
"table",
")",
"del",
"filt",
",",
"info",
",",
"table",
"# Write to the pickle",
"with",
"open",
"(",
"p_path",
",",
"'wb'",
")",
"as",
"file",
":",
"pickle",
".",
"dump",
"(",
"at",
".",
"vstack",
"(",
"tables",
")",
",",
"file",
")",
"# Load the saved pickle",
"data",
"=",
"{",
"}",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"p_path",
")",
":",
"with",
"open",
"(",
"p_path",
",",
"'rb'",
")",
"as",
"file",
":",
"data",
"=",
"pickle",
".",
"load",
"(",
"file",
")",
"# Return the data",
"if",
"data",
":",
"if",
"fmt",
"==",
"'dict'",
":",
"data",
"=",
"{",
"r",
"[",
"0",
"]",
":",
"{",
"k",
":",
"r",
"[",
"k",
"]",
".",
"value",
"if",
"hasattr",
"(",
"r",
"[",
"k",
"]",
",",
"'unit'",
")",
"else",
"r",
"[",
"k",
"]",
"for",
"k",
"in",
"data",
".",
"keys",
"(",
")",
"[",
"1",
":",
"]",
"}",
"for",
"r",
"in",
"data",
"}",
"else",
":",
"# Add Band as index",
"data",
".",
"add_index",
"(",
"'Band'",
")",
"return",
"data",
"# Or try to generate it once",
"else",
":",
"if",
"not",
"updated",
":",
"updated",
"=",
"True",
"filters",
"(",
"update",
"=",
"True",
")",
"else",
":",
"print",
"(",
"'No filters found in'",
",",
"filter_directory",
")"
] | Get a list of the available filters
Parameters
----------
filter_directory: str
The directory containing the filter relative spectral response curves
update: bool
Check the filter directory for new filters and generate pickle of table
fmt: str
The format for the returned table
Returns
-------
list
The list of band names | [
"Get",
"a",
"list",
"of",
"the",
"available",
"filters"
] | f0587c4908baf636d4bdf030fa95029e8f31b975 | https://github.com/hover2pi/svo_filters/blob/f0587c4908baf636d4bdf030fa95029e8f31b975/svo_filters/svo.py#L799-L886 | train |
hover2pi/svo_filters | svo_filters/svo.py | rebin_spec | def rebin_spec(spec, wavnew, oversamp=100, plot=False):
"""
Rebin a spectrum to a new wavelength array while preserving
the total flux
Parameters
----------
spec: array-like
The wavelength and flux to be binned
wavenew: array-like
The new wavelength array
Returns
-------
np.ndarray
The rebinned flux
"""
wave, flux = spec
nlam = len(wave)
x0 = np.arange(nlam, dtype=float)
x0int = np.arange((nlam-1.) * oversamp + 1., dtype=float)/oversamp
w0int = np.interp(x0int, x0, wave)
spec0int = np.interp(w0int, wave, flux)/oversamp
# Set up the bin edges for down-binning
maxdiffw1 = np.diff(wavnew).max()
w1bins = np.concatenate(([wavnew[0]-maxdiffw1],
.5*(wavnew[1::]+wavnew[0: -1]),
[wavnew[-1]+maxdiffw1]))
# Bin down the interpolated spectrum:
w1bins = np.sort(w1bins)
nbins = len(w1bins)-1
specnew = np.zeros(nbins)
inds2 = [[w0int.searchsorted(w1bins[ii], side='left'),
w0int.searchsorted(w1bins[ii+1], side='left')]
for ii in range(nbins)]
for ii in range(nbins):
specnew[ii] = np.sum(spec0int[inds2[ii][0]: inds2[ii][1]])
return specnew | python | def rebin_spec(spec, wavnew, oversamp=100, plot=False):
"""
Rebin a spectrum to a new wavelength array while preserving
the total flux
Parameters
----------
spec: array-like
The wavelength and flux to be binned
wavenew: array-like
The new wavelength array
Returns
-------
np.ndarray
The rebinned flux
"""
wave, flux = spec
nlam = len(wave)
x0 = np.arange(nlam, dtype=float)
x0int = np.arange((nlam-1.) * oversamp + 1., dtype=float)/oversamp
w0int = np.interp(x0int, x0, wave)
spec0int = np.interp(w0int, wave, flux)/oversamp
# Set up the bin edges for down-binning
maxdiffw1 = np.diff(wavnew).max()
w1bins = np.concatenate(([wavnew[0]-maxdiffw1],
.5*(wavnew[1::]+wavnew[0: -1]),
[wavnew[-1]+maxdiffw1]))
# Bin down the interpolated spectrum:
w1bins = np.sort(w1bins)
nbins = len(w1bins)-1
specnew = np.zeros(nbins)
inds2 = [[w0int.searchsorted(w1bins[ii], side='left'),
w0int.searchsorted(w1bins[ii+1], side='left')]
for ii in range(nbins)]
for ii in range(nbins):
specnew[ii] = np.sum(spec0int[inds2[ii][0]: inds2[ii][1]])
return specnew | [
"def",
"rebin_spec",
"(",
"spec",
",",
"wavnew",
",",
"oversamp",
"=",
"100",
",",
"plot",
"=",
"False",
")",
":",
"wave",
",",
"flux",
"=",
"spec",
"nlam",
"=",
"len",
"(",
"wave",
")",
"x0",
"=",
"np",
".",
"arange",
"(",
"nlam",
",",
"dtype",
"=",
"float",
")",
"x0int",
"=",
"np",
".",
"arange",
"(",
"(",
"nlam",
"-",
"1.",
")",
"*",
"oversamp",
"+",
"1.",
",",
"dtype",
"=",
"float",
")",
"/",
"oversamp",
"w0int",
"=",
"np",
".",
"interp",
"(",
"x0int",
",",
"x0",
",",
"wave",
")",
"spec0int",
"=",
"np",
".",
"interp",
"(",
"w0int",
",",
"wave",
",",
"flux",
")",
"/",
"oversamp",
"# Set up the bin edges for down-binning",
"maxdiffw1",
"=",
"np",
".",
"diff",
"(",
"wavnew",
")",
".",
"max",
"(",
")",
"w1bins",
"=",
"np",
".",
"concatenate",
"(",
"(",
"[",
"wavnew",
"[",
"0",
"]",
"-",
"maxdiffw1",
"]",
",",
".5",
"*",
"(",
"wavnew",
"[",
"1",
":",
":",
"]",
"+",
"wavnew",
"[",
"0",
":",
"-",
"1",
"]",
")",
",",
"[",
"wavnew",
"[",
"-",
"1",
"]",
"+",
"maxdiffw1",
"]",
")",
")",
"# Bin down the interpolated spectrum:",
"w1bins",
"=",
"np",
".",
"sort",
"(",
"w1bins",
")",
"nbins",
"=",
"len",
"(",
"w1bins",
")",
"-",
"1",
"specnew",
"=",
"np",
".",
"zeros",
"(",
"nbins",
")",
"inds2",
"=",
"[",
"[",
"w0int",
".",
"searchsorted",
"(",
"w1bins",
"[",
"ii",
"]",
",",
"side",
"=",
"'left'",
")",
",",
"w0int",
".",
"searchsorted",
"(",
"w1bins",
"[",
"ii",
"+",
"1",
"]",
",",
"side",
"=",
"'left'",
")",
"]",
"for",
"ii",
"in",
"range",
"(",
"nbins",
")",
"]",
"for",
"ii",
"in",
"range",
"(",
"nbins",
")",
":",
"specnew",
"[",
"ii",
"]",
"=",
"np",
".",
"sum",
"(",
"spec0int",
"[",
"inds2",
"[",
"ii",
"]",
"[",
"0",
"]",
":",
"inds2",
"[",
"ii",
"]",
"[",
"1",
"]",
"]",
")",
"return",
"specnew"
] | Rebin a spectrum to a new wavelength array while preserving
the total flux
Parameters
----------
spec: array-like
The wavelength and flux to be binned
wavenew: array-like
The new wavelength array
Returns
-------
np.ndarray
The rebinned flux | [
"Rebin",
"a",
"spectrum",
"to",
"a",
"new",
"wavelength",
"array",
"while",
"preserving",
"the",
"total",
"flux"
] | f0587c4908baf636d4bdf030fa95029e8f31b975 | https://github.com/hover2pi/svo_filters/blob/f0587c4908baf636d4bdf030fa95029e8f31b975/svo_filters/svo.py#L889-L931 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.