repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.todo | def todo(self, p_identifier):
"""
The _todos list has the same order as in the backend store (usually
a todo.txt file. The user refers to the first task as number 1, so use
index 0, etc.
Alternative ways to identify a todo is using a hashed version based on
the todo's text, or a regexp that matches the todo's source. The regexp
match is a fallback.
Returns None when the todo couldn't be found.
"""
result = None
def todo_by_uid(p_identifier):
""" Returns the todo that corresponds to the unique ID. """
result = None
if config().identifiers() == 'text':
try:
result = self._id_todo_map[p_identifier]
except KeyError:
pass # we'll try something else
return result
def todo_by_linenumber(p_identifier):
"""
Attempts to find the todo on the given line number.
When the identifier is a number but has leading zeros, the result
will be None.
"""
result = None
if config().identifiers() != 'text':
try:
if re.match('[1-9]\d*', p_identifier):
# the expression is a string and no leading zeroes,
# treat it as an integer
raise TypeError
except TypeError as te:
try:
result = self._todos[int(p_identifier) - 1]
except (ValueError, IndexError):
raise InvalidTodoException from te
return result
def todo_by_regexp(p_identifier):
"""
Returns the todo that is (uniquely) identified by the given regexp.
If the regexp matches more than one item, no result is returned.
"""
result = None
candidates = Filter.GrepFilter(p_identifier).filter(self._todos)
if len(candidates) == 1:
result = candidates[0]
else:
raise InvalidTodoException
return result
result = todo_by_uid(p_identifier)
if not result:
result = todo_by_linenumber(p_identifier)
if not result:
# convert integer to text so we pass on a valid regex
result = todo_by_regexp(str(p_identifier))
return result | python | def todo(self, p_identifier):
"""
The _todos list has the same order as in the backend store (usually
a todo.txt file. The user refers to the first task as number 1, so use
index 0, etc.
Alternative ways to identify a todo is using a hashed version based on
the todo's text, or a regexp that matches the todo's source. The regexp
match is a fallback.
Returns None when the todo couldn't be found.
"""
result = None
def todo_by_uid(p_identifier):
""" Returns the todo that corresponds to the unique ID. """
result = None
if config().identifiers() == 'text':
try:
result = self._id_todo_map[p_identifier]
except KeyError:
pass # we'll try something else
return result
def todo_by_linenumber(p_identifier):
"""
Attempts to find the todo on the given line number.
When the identifier is a number but has leading zeros, the result
will be None.
"""
result = None
if config().identifiers() != 'text':
try:
if re.match('[1-9]\d*', p_identifier):
# the expression is a string and no leading zeroes,
# treat it as an integer
raise TypeError
except TypeError as te:
try:
result = self._todos[int(p_identifier) - 1]
except (ValueError, IndexError):
raise InvalidTodoException from te
return result
def todo_by_regexp(p_identifier):
"""
Returns the todo that is (uniquely) identified by the given regexp.
If the regexp matches more than one item, no result is returned.
"""
result = None
candidates = Filter.GrepFilter(p_identifier).filter(self._todos)
if len(candidates) == 1:
result = candidates[0]
else:
raise InvalidTodoException
return result
result = todo_by_uid(p_identifier)
if not result:
result = todo_by_linenumber(p_identifier)
if not result:
# convert integer to text so we pass on a valid regex
result = todo_by_regexp(str(p_identifier))
return result | [
"def",
"todo",
"(",
"self",
",",
"p_identifier",
")",
":",
"result",
"=",
"None",
"def",
"todo_by_uid",
"(",
"p_identifier",
")",
":",
"\"\"\" Returns the todo that corresponds to the unique ID. \"\"\"",
"result",
"=",
"None",
"if",
"config",
"(",
")",
".",
"identifiers",
"(",
")",
"==",
"'text'",
":",
"try",
":",
"result",
"=",
"self",
".",
"_id_todo_map",
"[",
"p_identifier",
"]",
"except",
"KeyError",
":",
"pass",
"# we'll try something else",
"return",
"result",
"def",
"todo_by_linenumber",
"(",
"p_identifier",
")",
":",
"\"\"\"\n Attempts to find the todo on the given line number.\n\n When the identifier is a number but has leading zeros, the result\n will be None.\n \"\"\"",
"result",
"=",
"None",
"if",
"config",
"(",
")",
".",
"identifiers",
"(",
")",
"!=",
"'text'",
":",
"try",
":",
"if",
"re",
".",
"match",
"(",
"'[1-9]\\d*'",
",",
"p_identifier",
")",
":",
"# the expression is a string and no leading zeroes,",
"# treat it as an integer",
"raise",
"TypeError",
"except",
"TypeError",
"as",
"te",
":",
"try",
":",
"result",
"=",
"self",
".",
"_todos",
"[",
"int",
"(",
"p_identifier",
")",
"-",
"1",
"]",
"except",
"(",
"ValueError",
",",
"IndexError",
")",
":",
"raise",
"InvalidTodoException",
"from",
"te",
"return",
"result",
"def",
"todo_by_regexp",
"(",
"p_identifier",
")",
":",
"\"\"\"\n Returns the todo that is (uniquely) identified by the given regexp.\n If the regexp matches more than one item, no result is returned.\n \"\"\"",
"result",
"=",
"None",
"candidates",
"=",
"Filter",
".",
"GrepFilter",
"(",
"p_identifier",
")",
".",
"filter",
"(",
"self",
".",
"_todos",
")",
"if",
"len",
"(",
"candidates",
")",
"==",
"1",
":",
"result",
"=",
"candidates",
"[",
"0",
"]",
"else",
":",
"raise",
"InvalidTodoException",
"return",
"result",
"result",
"=",
"todo_by_uid",
"(",
"p_identifier",
")",
"if",
"not",
"result",
":",
"result",
"=",
"todo_by_linenumber",
"(",
"p_identifier",
")",
"if",
"not",
"result",
":",
"# convert integer to text so we pass on a valid regex",
"result",
"=",
"todo_by_regexp",
"(",
"str",
"(",
"p_identifier",
")",
")",
"return",
"result"
] | The _todos list has the same order as in the backend store (usually
a todo.txt file. The user refers to the first task as number 1, so use
index 0, etc.
Alternative ways to identify a todo is using a hashed version based on
the todo's text, or a regexp that matches the todo's source. The regexp
match is a fallback.
Returns None when the todo couldn't be found. | [
"The",
"_todos",
"list",
"has",
"the",
"same",
"order",
"as",
"in",
"the",
"backend",
"store",
"(",
"usually",
"a",
"todo",
".",
"txt",
"file",
".",
"The",
"user",
"refers",
"to",
"the",
"first",
"task",
"as",
"number",
"1",
"so",
"use",
"index",
"0",
"etc",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L64-L138 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.add | def add(self, p_src):
"""
Given a todo string, parse it and put it to the end of the list.
"""
todos = self.add_list([p_src])
return todos[0] if len(todos) else None | python | def add(self, p_src):
"""
Given a todo string, parse it and put it to the end of the list.
"""
todos = self.add_list([p_src])
return todos[0] if len(todos) else None | [
"def",
"add",
"(",
"self",
",",
"p_src",
")",
":",
"todos",
"=",
"self",
".",
"add_list",
"(",
"[",
"p_src",
"]",
")",
"return",
"todos",
"[",
"0",
"]",
"if",
"len",
"(",
"todos",
")",
"else",
"None"
] | Given a todo string, parse it and put it to the end of the list. | [
"Given",
"a",
"todo",
"string",
"parse",
"it",
"and",
"put",
"it",
"to",
"the",
"end",
"of",
"the",
"list",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L140-L146 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.replace | def replace(self, p_todos):
""" Replaces whole todolist with todo objects supplied as p_todos. """
self.erase()
self.add_todos(p_todos)
self.dirty = True | python | def replace(self, p_todos):
""" Replaces whole todolist with todo objects supplied as p_todos. """
self.erase()
self.add_todos(p_todos)
self.dirty = True | [
"def",
"replace",
"(",
"self",
",",
"p_todos",
")",
":",
"self",
".",
"erase",
"(",
")",
"self",
".",
"add_todos",
"(",
"p_todos",
")",
"self",
".",
"dirty",
"=",
"True"
] | Replaces whole todolist with todo objects supplied as p_todos. | [
"Replaces",
"whole",
"todolist",
"with",
"todo",
"objects",
"supplied",
"as",
"p_todos",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L181-L185 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.append | def append(self, p_todo, p_string):
"""
Appends a text to the todo, specified by its number.
The todo will be parsed again, such that tags and projects in de
appended string are processed.
"""
if len(p_string) > 0:
new_text = p_todo.source() + ' ' + p_string
p_todo.set_source_text(new_text)
self._update_todo_ids()
self.dirty = True | python | def append(self, p_todo, p_string):
"""
Appends a text to the todo, specified by its number.
The todo will be parsed again, such that tags and projects in de
appended string are processed.
"""
if len(p_string) > 0:
new_text = p_todo.source() + ' ' + p_string
p_todo.set_source_text(new_text)
self._update_todo_ids()
self.dirty = True | [
"def",
"append",
"(",
"self",
",",
"p_todo",
",",
"p_string",
")",
":",
"if",
"len",
"(",
"p_string",
")",
">",
"0",
":",
"new_text",
"=",
"p_todo",
".",
"source",
"(",
")",
"+",
"' '",
"+",
"p_string",
"p_todo",
".",
"set_source_text",
"(",
"new_text",
")",
"self",
".",
"_update_todo_ids",
"(",
")",
"self",
".",
"dirty",
"=",
"True"
] | Appends a text to the todo, specified by its number.
The todo will be parsed again, such that tags and projects in de
appended string are processed. | [
"Appends",
"a",
"text",
"to",
"the",
"todo",
"specified",
"by",
"its",
"number",
".",
"The",
"todo",
"will",
"be",
"parsed",
"again",
"such",
"that",
"tags",
"and",
"projects",
"in",
"de",
"appended",
"string",
"are",
"processed",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L191-L201 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.projects | def projects(self):
""" Returns a set of all projects in this list. """
result = set()
for todo in self._todos:
projects = todo.projects()
result = result.union(projects)
return result | python | def projects(self):
""" Returns a set of all projects in this list. """
result = set()
for todo in self._todos:
projects = todo.projects()
result = result.union(projects)
return result | [
"def",
"projects",
"(",
"self",
")",
":",
"result",
"=",
"set",
"(",
")",
"for",
"todo",
"in",
"self",
".",
"_todos",
":",
"projects",
"=",
"todo",
".",
"projects",
"(",
")",
"result",
"=",
"result",
".",
"union",
"(",
"projects",
")",
"return",
"result"
] | Returns a set of all projects in this list. | [
"Returns",
"a",
"set",
"of",
"all",
"projects",
"in",
"this",
"list",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L203-L210 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.contexts | def contexts(self):
""" Returns a set of all contexts in this list. """
result = set()
for todo in self._todos:
contexts = todo.contexts()
result = result.union(contexts)
return result | python | def contexts(self):
""" Returns a set of all contexts in this list. """
result = set()
for todo in self._todos:
contexts = todo.contexts()
result = result.union(contexts)
return result | [
"def",
"contexts",
"(",
"self",
")",
":",
"result",
"=",
"set",
"(",
")",
"for",
"todo",
"in",
"self",
".",
"_todos",
":",
"contexts",
"=",
"todo",
".",
"contexts",
"(",
")",
"result",
"=",
"result",
".",
"union",
"(",
"contexts",
")",
"return",
"result"
] | Returns a set of all contexts in this list. | [
"Returns",
"a",
"set",
"of",
"all",
"contexts",
"in",
"this",
"list",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L212-L219 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.linenumber | def linenumber(self, p_todo):
"""
Returns the line number of the todo item.
"""
try:
return self._todos.index(p_todo) + 1
except ValueError as ex:
raise InvalidTodoException from ex | python | def linenumber(self, p_todo):
"""
Returns the line number of the todo item.
"""
try:
return self._todos.index(p_todo) + 1
except ValueError as ex:
raise InvalidTodoException from ex | [
"def",
"linenumber",
"(",
"self",
",",
"p_todo",
")",
":",
"try",
":",
"return",
"self",
".",
"_todos",
".",
"index",
"(",
"p_todo",
")",
"+",
"1",
"except",
"ValueError",
"as",
"ex",
":",
"raise",
"InvalidTodoException",
"from",
"ex"
] | Returns the line number of the todo item. | [
"Returns",
"the",
"line",
"number",
"of",
"the",
"todo",
"item",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L251-L258 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.uid | def uid(self, p_todo):
"""
Returns the unique text-based ID for a todo item.
"""
try:
return self._todo_id_map[p_todo]
except KeyError as ex:
raise InvalidTodoException from ex | python | def uid(self, p_todo):
"""
Returns the unique text-based ID for a todo item.
"""
try:
return self._todo_id_map[p_todo]
except KeyError as ex:
raise InvalidTodoException from ex | [
"def",
"uid",
"(",
"self",
",",
"p_todo",
")",
":",
"try",
":",
"return",
"self",
".",
"_todo_id_map",
"[",
"p_todo",
"]",
"except",
"KeyError",
"as",
"ex",
":",
"raise",
"InvalidTodoException",
"from",
"ex"
] | Returns the unique text-based ID for a todo item. | [
"Returns",
"the",
"unique",
"text",
"-",
"based",
"ID",
"for",
"a",
"todo",
"item",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L260-L267 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.number | def number(self, p_todo):
"""
Returns the line number or text ID of a todo (depends on the
configuration.
"""
if config().identifiers() == "text":
return self.uid(p_todo)
else:
return self.linenumber(p_todo) | python | def number(self, p_todo):
"""
Returns the line number or text ID of a todo (depends on the
configuration.
"""
if config().identifiers() == "text":
return self.uid(p_todo)
else:
return self.linenumber(p_todo) | [
"def",
"number",
"(",
"self",
",",
"p_todo",
")",
":",
"if",
"config",
"(",
")",
".",
"identifiers",
"(",
")",
"==",
"\"text\"",
":",
"return",
"self",
".",
"uid",
"(",
"p_todo",
")",
"else",
":",
"return",
"self",
".",
"linenumber",
"(",
"p_todo",
")"
] | Returns the line number or text ID of a todo (depends on the
configuration. | [
"Returns",
"the",
"line",
"number",
"or",
"text",
"ID",
"of",
"a",
"todo",
"(",
"depends",
"on",
"the",
"configuration",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L269-L277 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.max_id_length | def max_id_length(self):
"""
Returns the maximum length of a todo ID, used for formatting purposes.
"""
if config().identifiers() == "text":
return max_id_length(len(self._todos))
else:
try:
return math.ceil(math.log(len(self._todos), 10))
except ValueError:
return 0 | python | def max_id_length(self):
"""
Returns the maximum length of a todo ID, used for formatting purposes.
"""
if config().identifiers() == "text":
return max_id_length(len(self._todos))
else:
try:
return math.ceil(math.log(len(self._todos), 10))
except ValueError:
return 0 | [
"def",
"max_id_length",
"(",
"self",
")",
":",
"if",
"config",
"(",
")",
".",
"identifiers",
"(",
")",
"==",
"\"text\"",
":",
"return",
"max_id_length",
"(",
"len",
"(",
"self",
".",
"_todos",
")",
")",
"else",
":",
"try",
":",
"return",
"math",
".",
"ceil",
"(",
"math",
".",
"log",
"(",
"len",
"(",
"self",
".",
"_todos",
")",
",",
"10",
")",
")",
"except",
"ValueError",
":",
"return",
"0"
] | Returns the maximum length of a todo ID, used for formatting purposes. | [
"Returns",
"the",
"maximum",
"length",
"of",
"a",
"todo",
"ID",
"used",
"for",
"formatting",
"purposes",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L279-L289 | train |
bram85/topydo | topydo/lib/TodoListBase.py | TodoListBase.ids | def ids(self):
""" Returns set with all todo IDs. """
if config().identifiers() == 'text':
ids = self._id_todo_map.keys()
else:
ids = [str(i + 1) for i in range(self.count())]
return set(ids) | python | def ids(self):
""" Returns set with all todo IDs. """
if config().identifiers() == 'text':
ids = self._id_todo_map.keys()
else:
ids = [str(i + 1) for i in range(self.count())]
return set(ids) | [
"def",
"ids",
"(",
"self",
")",
":",
"if",
"config",
"(",
")",
".",
"identifiers",
"(",
")",
"==",
"'text'",
":",
"ids",
"=",
"self",
".",
"_id_todo_map",
".",
"keys",
"(",
")",
"else",
":",
"ids",
"=",
"[",
"str",
"(",
"i",
"+",
"1",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"count",
"(",
")",
")",
"]",
"return",
"set",
"(",
"ids",
")"
] | Returns set with all todo IDs. | [
"Returns",
"set",
"with",
"all",
"todo",
"IDs",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/TodoListBase.py#L313-L319 | train |
bram85/topydo | topydo/ui/columns/Transaction.py | Transaction.prepare | def prepare(self, p_args):
"""
Prepares list of operations to execute based on p_args, list of
todo items contained in _todo_ids attribute and _subcommand
attribute.
"""
if self._todo_ids:
id_position = p_args.index('{}')
# Not using MultiCommand abilities would make EditCommand awkward
if self._multi:
p_args[id_position:id_position + 1] = self._todo_ids
self._operations.append(p_args)
else:
for todo_id in self._todo_ids:
operation_args = p_args[:]
operation_args[id_position] = todo_id
self._operations.append(operation_args)
else:
self._operations.append(p_args)
self._create_label() | python | def prepare(self, p_args):
"""
Prepares list of operations to execute based on p_args, list of
todo items contained in _todo_ids attribute and _subcommand
attribute.
"""
if self._todo_ids:
id_position = p_args.index('{}')
# Not using MultiCommand abilities would make EditCommand awkward
if self._multi:
p_args[id_position:id_position + 1] = self._todo_ids
self._operations.append(p_args)
else:
for todo_id in self._todo_ids:
operation_args = p_args[:]
operation_args[id_position] = todo_id
self._operations.append(operation_args)
else:
self._operations.append(p_args)
self._create_label() | [
"def",
"prepare",
"(",
"self",
",",
"p_args",
")",
":",
"if",
"self",
".",
"_todo_ids",
":",
"id_position",
"=",
"p_args",
".",
"index",
"(",
"'{}'",
")",
"# Not using MultiCommand abilities would make EditCommand awkward",
"if",
"self",
".",
"_multi",
":",
"p_args",
"[",
"id_position",
":",
"id_position",
"+",
"1",
"]",
"=",
"self",
".",
"_todo_ids",
"self",
".",
"_operations",
".",
"append",
"(",
"p_args",
")",
"else",
":",
"for",
"todo_id",
"in",
"self",
".",
"_todo_ids",
":",
"operation_args",
"=",
"p_args",
"[",
":",
"]",
"operation_args",
"[",
"id_position",
"]",
"=",
"todo_id",
"self",
".",
"_operations",
".",
"append",
"(",
"operation_args",
")",
"else",
":",
"self",
".",
"_operations",
".",
"append",
"(",
"p_args",
")",
"self",
".",
"_create_label",
"(",
")"
] | Prepares list of operations to execute based on p_args, list of
todo items contained in _todo_ids attribute and _subcommand
attribute. | [
"Prepares",
"list",
"of",
"operations",
"to",
"execute",
"based",
"on",
"p_args",
"list",
"of",
"todo",
"items",
"contained",
"in",
"_todo_ids",
"attribute",
"and",
"_subcommand",
"attribute",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/ui/columns/Transaction.py#L34-L55 | train |
bram85/topydo | topydo/ui/columns/Transaction.py | Transaction.execute | def execute(self):
"""
Executes each operation from _operations attribute.
"""
last_operation = len(self._operations) - 1
for i, operation in enumerate(self._operations):
command = self._cmd(operation)
if command.execute() is False:
return False
else:
action = command.execute_post_archive_actions
self._post_archive_actions.append(action)
if i == last_operation:
return True | python | def execute(self):
"""
Executes each operation from _operations attribute.
"""
last_operation = len(self._operations) - 1
for i, operation in enumerate(self._operations):
command = self._cmd(operation)
if command.execute() is False:
return False
else:
action = command.execute_post_archive_actions
self._post_archive_actions.append(action)
if i == last_operation:
return True | [
"def",
"execute",
"(",
"self",
")",
":",
"last_operation",
"=",
"len",
"(",
"self",
".",
"_operations",
")",
"-",
"1",
"for",
"i",
",",
"operation",
"in",
"enumerate",
"(",
"self",
".",
"_operations",
")",
":",
"command",
"=",
"self",
".",
"_cmd",
"(",
"operation",
")",
"if",
"command",
".",
"execute",
"(",
")",
"is",
"False",
":",
"return",
"False",
"else",
":",
"action",
"=",
"command",
".",
"execute_post_archive_actions",
"self",
".",
"_post_archive_actions",
".",
"append",
"(",
"action",
")",
"if",
"i",
"==",
"last_operation",
":",
"return",
"True"
] | Executes each operation from _operations attribute. | [
"Executes",
"each",
"operation",
"from",
"_operations",
"attribute",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/ui/columns/Transaction.py#L66-L81 | train |
bram85/topydo | topydo/lib/RelativeDate.py | _add_months | def _add_months(p_sourcedate, p_months):
"""
Adds a number of months to the source date.
Takes into account shorter months and leap years and such.
https://stackoverflow.com/questions/4130922/how-to-increment-datetime-month-in-python
"""
month = p_sourcedate.month - 1 + p_months
year = p_sourcedate.year + month // 12
month = month % 12 + 1
day = min(p_sourcedate.day, calendar.monthrange(year, month)[1])
return date(year, month, day) | python | def _add_months(p_sourcedate, p_months):
"""
Adds a number of months to the source date.
Takes into account shorter months and leap years and such.
https://stackoverflow.com/questions/4130922/how-to-increment-datetime-month-in-python
"""
month = p_sourcedate.month - 1 + p_months
year = p_sourcedate.year + month // 12
month = month % 12 + 1
day = min(p_sourcedate.day, calendar.monthrange(year, month)[1])
return date(year, month, day) | [
"def",
"_add_months",
"(",
"p_sourcedate",
",",
"p_months",
")",
":",
"month",
"=",
"p_sourcedate",
".",
"month",
"-",
"1",
"+",
"p_months",
"year",
"=",
"p_sourcedate",
".",
"year",
"+",
"month",
"//",
"12",
"month",
"=",
"month",
"%",
"12",
"+",
"1",
"day",
"=",
"min",
"(",
"p_sourcedate",
".",
"day",
",",
"calendar",
".",
"monthrange",
"(",
"year",
",",
"month",
")",
"[",
"1",
"]",
")",
"return",
"date",
"(",
"year",
",",
"month",
",",
"day",
")"
] | Adds a number of months to the source date.
Takes into account shorter months and leap years and such.
https://stackoverflow.com/questions/4130922/how-to-increment-datetime-month-in-python | [
"Adds",
"a",
"number",
"of",
"months",
"to",
"the",
"source",
"date",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/RelativeDate.py#L24-L37 | train |
bram85/topydo | topydo/lib/RelativeDate.py | _add_business_days | def _add_business_days(p_sourcedate, p_bdays):
""" Adds a number of business days to the source date. """
result = p_sourcedate
delta = 1 if p_bdays > 0 else -1
while abs(p_bdays) > 0:
result += timedelta(delta)
weekday = result.weekday()
if weekday >= 5:
continue
p_bdays = p_bdays - 1 if delta > 0 else p_bdays + 1
return result | python | def _add_business_days(p_sourcedate, p_bdays):
""" Adds a number of business days to the source date. """
result = p_sourcedate
delta = 1 if p_bdays > 0 else -1
while abs(p_bdays) > 0:
result += timedelta(delta)
weekday = result.weekday()
if weekday >= 5:
continue
p_bdays = p_bdays - 1 if delta > 0 else p_bdays + 1
return result | [
"def",
"_add_business_days",
"(",
"p_sourcedate",
",",
"p_bdays",
")",
":",
"result",
"=",
"p_sourcedate",
"delta",
"=",
"1",
"if",
"p_bdays",
">",
"0",
"else",
"-",
"1",
"while",
"abs",
"(",
"p_bdays",
")",
">",
"0",
":",
"result",
"+=",
"timedelta",
"(",
"delta",
")",
"weekday",
"=",
"result",
".",
"weekday",
"(",
")",
"if",
"weekday",
">=",
"5",
":",
"continue",
"p_bdays",
"=",
"p_bdays",
"-",
"1",
"if",
"delta",
">",
"0",
"else",
"p_bdays",
"+",
"1",
"return",
"result"
] | Adds a number of business days to the source date. | [
"Adds",
"a",
"number",
"of",
"business",
"days",
"to",
"the",
"source",
"date",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/RelativeDate.py#L40-L54 | train |
bram85/topydo | topydo/lib/RelativeDate.py | _convert_weekday_pattern | def _convert_weekday_pattern(p_weekday):
"""
Converts a weekday name to an absolute date.
When today's day of the week is entered, it will return next week's date.
"""
day_value = {
'mo': 0,
'tu': 1,
'we': 2,
'th': 3,
'fr': 4,
'sa': 5,
'su': 6
}
target_day_string = p_weekday[:2].lower()
target_day = day_value[target_day_string]
day = date.today().weekday()
shift = 7 - (day - target_day) % 7
return date.today() + timedelta(shift) | python | def _convert_weekday_pattern(p_weekday):
"""
Converts a weekday name to an absolute date.
When today's day of the week is entered, it will return next week's date.
"""
day_value = {
'mo': 0,
'tu': 1,
'we': 2,
'th': 3,
'fr': 4,
'sa': 5,
'su': 6
}
target_day_string = p_weekday[:2].lower()
target_day = day_value[target_day_string]
day = date.today().weekday()
shift = 7 - (day - target_day) % 7
return date.today() + timedelta(shift) | [
"def",
"_convert_weekday_pattern",
"(",
"p_weekday",
")",
":",
"day_value",
"=",
"{",
"'mo'",
":",
"0",
",",
"'tu'",
":",
"1",
",",
"'we'",
":",
"2",
",",
"'th'",
":",
"3",
",",
"'fr'",
":",
"4",
",",
"'sa'",
":",
"5",
",",
"'su'",
":",
"6",
"}",
"target_day_string",
"=",
"p_weekday",
"[",
":",
"2",
"]",
".",
"lower",
"(",
")",
"target_day",
"=",
"day_value",
"[",
"target_day_string",
"]",
"day",
"=",
"date",
".",
"today",
"(",
")",
".",
"weekday",
"(",
")",
"shift",
"=",
"7",
"-",
"(",
"day",
"-",
"target_day",
")",
"%",
"7",
"return",
"date",
".",
"today",
"(",
")",
"+",
"timedelta",
"(",
"shift",
")"
] | Converts a weekday name to an absolute date.
When today's day of the week is entered, it will return next week's date. | [
"Converts",
"a",
"weekday",
"name",
"to",
"an",
"absolute",
"date",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/RelativeDate.py#L81-L103 | train |
bram85/topydo | topydo/lib/RelativeDate.py | relative_date_to_date | def relative_date_to_date(p_date, p_offset=None):
"""
Transforms a relative date into a date object.
The following formats are understood:
* [0-9][dwmy]
* 'yesterday', 'today' or 'tomorrow'
* days of the week (in full or abbreviated)
"""
result = None
p_date = p_date.lower()
p_offset = p_offset or date.today()
relative = re.match('(?P<length>-?[0-9]+)(?P<period>[dwmyb])$',
p_date, re.I)
monday = 'mo(n(day)?)?$'
tuesday = 'tu(e(sday)?)?$'
wednesday = 'we(d(nesday)?)?$'
thursday = 'th(u(rsday)?)?$'
friday = 'fr(i(day)?)?$'
saturday = 'sa(t(urday)?)?$'
sunday = 'su(n(day)?)?$'
weekday = re.match('|'.join(
[monday, tuesday, wednesday, thursday, friday, saturday, sunday]),
p_date)
if relative:
length = relative.group('length')
period = relative.group('period')
result = _convert_pattern(length, period, p_offset)
elif weekday:
result = _convert_weekday_pattern(weekday.group(0))
elif re.match('tod(ay)?$', p_date):
result = _convert_pattern('0', 'd')
elif re.match('tom(orrow)?$', p_date):
result = _convert_pattern('1', 'd')
elif re.match('yes(terday)?$', p_date):
result = _convert_pattern('-1', 'd')
return result | python | def relative_date_to_date(p_date, p_offset=None):
"""
Transforms a relative date into a date object.
The following formats are understood:
* [0-9][dwmy]
* 'yesterday', 'today' or 'tomorrow'
* days of the week (in full or abbreviated)
"""
result = None
p_date = p_date.lower()
p_offset = p_offset or date.today()
relative = re.match('(?P<length>-?[0-9]+)(?P<period>[dwmyb])$',
p_date, re.I)
monday = 'mo(n(day)?)?$'
tuesday = 'tu(e(sday)?)?$'
wednesday = 'we(d(nesday)?)?$'
thursday = 'th(u(rsday)?)?$'
friday = 'fr(i(day)?)?$'
saturday = 'sa(t(urday)?)?$'
sunday = 'su(n(day)?)?$'
weekday = re.match('|'.join(
[monday, tuesday, wednesday, thursday, friday, saturday, sunday]),
p_date)
if relative:
length = relative.group('length')
period = relative.group('period')
result = _convert_pattern(length, period, p_offset)
elif weekday:
result = _convert_weekday_pattern(weekday.group(0))
elif re.match('tod(ay)?$', p_date):
result = _convert_pattern('0', 'd')
elif re.match('tom(orrow)?$', p_date):
result = _convert_pattern('1', 'd')
elif re.match('yes(terday)?$', p_date):
result = _convert_pattern('-1', 'd')
return result | [
"def",
"relative_date_to_date",
"(",
"p_date",
",",
"p_offset",
"=",
"None",
")",
":",
"result",
"=",
"None",
"p_date",
"=",
"p_date",
".",
"lower",
"(",
")",
"p_offset",
"=",
"p_offset",
"or",
"date",
".",
"today",
"(",
")",
"relative",
"=",
"re",
".",
"match",
"(",
"'(?P<length>-?[0-9]+)(?P<period>[dwmyb])$'",
",",
"p_date",
",",
"re",
".",
"I",
")",
"monday",
"=",
"'mo(n(day)?)?$'",
"tuesday",
"=",
"'tu(e(sday)?)?$'",
"wednesday",
"=",
"'we(d(nesday)?)?$'",
"thursday",
"=",
"'th(u(rsday)?)?$'",
"friday",
"=",
"'fr(i(day)?)?$'",
"saturday",
"=",
"'sa(t(urday)?)?$'",
"sunday",
"=",
"'su(n(day)?)?$'",
"weekday",
"=",
"re",
".",
"match",
"(",
"'|'",
".",
"join",
"(",
"[",
"monday",
",",
"tuesday",
",",
"wednesday",
",",
"thursday",
",",
"friday",
",",
"saturday",
",",
"sunday",
"]",
")",
",",
"p_date",
")",
"if",
"relative",
":",
"length",
"=",
"relative",
".",
"group",
"(",
"'length'",
")",
"period",
"=",
"relative",
".",
"group",
"(",
"'period'",
")",
"result",
"=",
"_convert_pattern",
"(",
"length",
",",
"period",
",",
"p_offset",
")",
"elif",
"weekday",
":",
"result",
"=",
"_convert_weekday_pattern",
"(",
"weekday",
".",
"group",
"(",
"0",
")",
")",
"elif",
"re",
".",
"match",
"(",
"'tod(ay)?$'",
",",
"p_date",
")",
":",
"result",
"=",
"_convert_pattern",
"(",
"'0'",
",",
"'d'",
")",
"elif",
"re",
".",
"match",
"(",
"'tom(orrow)?$'",
",",
"p_date",
")",
":",
"result",
"=",
"_convert_pattern",
"(",
"'1'",
",",
"'d'",
")",
"elif",
"re",
".",
"match",
"(",
"'yes(terday)?$'",
",",
"p_date",
")",
":",
"result",
"=",
"_convert_pattern",
"(",
"'-1'",
",",
"'d'",
")",
"return",
"result"
] | Transforms a relative date into a date object.
The following formats are understood:
* [0-9][dwmy]
* 'yesterday', 'today' or 'tomorrow'
* days of the week (in full or abbreviated) | [
"Transforms",
"a",
"relative",
"date",
"into",
"a",
"date",
"object",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/RelativeDate.py#L106-L152 | train |
bram85/topydo | topydo/lib/prettyprinters/Numbers.py | PrettyPrinterNumbers.filter | def filter(self, p_todo_str, p_todo):
""" Prepends the number to the todo string. """
return "|{:>3}| {}".format(self.todolist.number(p_todo), p_todo_str) | python | def filter(self, p_todo_str, p_todo):
""" Prepends the number to the todo string. """
return "|{:>3}| {}".format(self.todolist.number(p_todo), p_todo_str) | [
"def",
"filter",
"(",
"self",
",",
"p_todo_str",
",",
"p_todo",
")",
":",
"return",
"\"|{:>3}| {}\"",
".",
"format",
"(",
"self",
".",
"todolist",
".",
"number",
"(",
"p_todo",
")",
",",
"p_todo_str",
")"
] | Prepends the number to the todo string. | [
"Prepends",
"the",
"number",
"to",
"the",
"todo",
"string",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/prettyprinters/Numbers.py#L29-L31 | train |
bram85/topydo | topydo/lib/WriteCommand.py | WriteCommand.postprocess_input_todo | def postprocess_input_todo(self, p_todo):
"""
Post-processes a parsed todo when adding it to the list.
* It converts relative dates to absolute ones.
* Automatically inserts a creation date if not present.
* Handles more user-friendly dependencies with before:, partof: and
after: tags
"""
def convert_date(p_tag):
value = p_todo.tag_value(p_tag)
if value:
dateobj = relative_date_to_date(value)
if dateobj:
p_todo.set_tag(p_tag, dateobj.isoformat())
def add_dependencies(p_tag):
for value in p_todo.tag_values(p_tag):
try:
dep = self.todolist.todo(value)
if p_tag == 'after':
self.todolist.add_dependency(p_todo, dep)
elif p_tag == 'before' or p_tag == 'partof':
self.todolist.add_dependency(dep, p_todo)
elif p_tag.startswith('parent'):
for parent in self.todolist.parents(dep):
self.todolist.add_dependency(parent, p_todo)
elif p_tag.startswith('child'):
for child in self.todolist.children(dep):
self.todolist.add_dependency(p_todo, child)
except InvalidTodoException:
pass
p_todo.remove_tag(p_tag, value)
convert_date(config().tag_start())
convert_date(config().tag_due())
keywords = [
'after',
'before',
'child-of',
'childof',
'children-of',
'childrenof',
'parent-of',
'parentof',
'parents-of',
'parentsof',
'partof',
]
for keyword in keywords:
add_dependencies(keyword) | python | def postprocess_input_todo(self, p_todo):
"""
Post-processes a parsed todo when adding it to the list.
* It converts relative dates to absolute ones.
* Automatically inserts a creation date if not present.
* Handles more user-friendly dependencies with before:, partof: and
after: tags
"""
def convert_date(p_tag):
value = p_todo.tag_value(p_tag)
if value:
dateobj = relative_date_to_date(value)
if dateobj:
p_todo.set_tag(p_tag, dateobj.isoformat())
def add_dependencies(p_tag):
for value in p_todo.tag_values(p_tag):
try:
dep = self.todolist.todo(value)
if p_tag == 'after':
self.todolist.add_dependency(p_todo, dep)
elif p_tag == 'before' or p_tag == 'partof':
self.todolist.add_dependency(dep, p_todo)
elif p_tag.startswith('parent'):
for parent in self.todolist.parents(dep):
self.todolist.add_dependency(parent, p_todo)
elif p_tag.startswith('child'):
for child in self.todolist.children(dep):
self.todolist.add_dependency(p_todo, child)
except InvalidTodoException:
pass
p_todo.remove_tag(p_tag, value)
convert_date(config().tag_start())
convert_date(config().tag_due())
keywords = [
'after',
'before',
'child-of',
'childof',
'children-of',
'childrenof',
'parent-of',
'parentof',
'parents-of',
'parentsof',
'partof',
]
for keyword in keywords:
add_dependencies(keyword) | [
"def",
"postprocess_input_todo",
"(",
"self",
",",
"p_todo",
")",
":",
"def",
"convert_date",
"(",
"p_tag",
")",
":",
"value",
"=",
"p_todo",
".",
"tag_value",
"(",
"p_tag",
")",
"if",
"value",
":",
"dateobj",
"=",
"relative_date_to_date",
"(",
"value",
")",
"if",
"dateobj",
":",
"p_todo",
".",
"set_tag",
"(",
"p_tag",
",",
"dateobj",
".",
"isoformat",
"(",
")",
")",
"def",
"add_dependencies",
"(",
"p_tag",
")",
":",
"for",
"value",
"in",
"p_todo",
".",
"tag_values",
"(",
"p_tag",
")",
":",
"try",
":",
"dep",
"=",
"self",
".",
"todolist",
".",
"todo",
"(",
"value",
")",
"if",
"p_tag",
"==",
"'after'",
":",
"self",
".",
"todolist",
".",
"add_dependency",
"(",
"p_todo",
",",
"dep",
")",
"elif",
"p_tag",
"==",
"'before'",
"or",
"p_tag",
"==",
"'partof'",
":",
"self",
".",
"todolist",
".",
"add_dependency",
"(",
"dep",
",",
"p_todo",
")",
"elif",
"p_tag",
".",
"startswith",
"(",
"'parent'",
")",
":",
"for",
"parent",
"in",
"self",
".",
"todolist",
".",
"parents",
"(",
"dep",
")",
":",
"self",
".",
"todolist",
".",
"add_dependency",
"(",
"parent",
",",
"p_todo",
")",
"elif",
"p_tag",
".",
"startswith",
"(",
"'child'",
")",
":",
"for",
"child",
"in",
"self",
".",
"todolist",
".",
"children",
"(",
"dep",
")",
":",
"self",
".",
"todolist",
".",
"add_dependency",
"(",
"p_todo",
",",
"child",
")",
"except",
"InvalidTodoException",
":",
"pass",
"p_todo",
".",
"remove_tag",
"(",
"p_tag",
",",
"value",
")",
"convert_date",
"(",
"config",
"(",
")",
".",
"tag_start",
"(",
")",
")",
"convert_date",
"(",
"config",
"(",
")",
".",
"tag_due",
"(",
")",
")",
"keywords",
"=",
"[",
"'after'",
",",
"'before'",
",",
"'child-of'",
",",
"'childof'",
",",
"'children-of'",
",",
"'childrenof'",
",",
"'parent-of'",
",",
"'parentof'",
",",
"'parents-of'",
",",
"'parentsof'",
",",
"'partof'",
",",
"]",
"for",
"keyword",
"in",
"keywords",
":",
"add_dependencies",
"(",
"keyword",
")"
] | Post-processes a parsed todo when adding it to the list.
* It converts relative dates to absolute ones.
* Automatically inserts a creation date if not present.
* Handles more user-friendly dependencies with before:, partof: and
after: tags | [
"Post",
"-",
"processes",
"a",
"parsed",
"todo",
"when",
"adding",
"it",
"to",
"the",
"list",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/WriteCommand.py#L22-L77 | train |
bram85/topydo | topydo/ui/columns/ColumnLayout.py | columns | def columns(p_alt_layout_path=None):
"""
Returns list with complete column configuration dicts.
"""
def _get_column_dict(p_cp, p_column):
column_dict = dict()
filterexpr = p_cp.get(p_column, 'filterexpr')
try:
title = p_cp.get(p_column, 'title')
except NoOptionError:
title = filterexpr
column_dict['title'] = title or 'Yet another column'
column_dict['filterexpr'] = filterexpr
column_dict['sortexpr'] = p_cp.get(p_column, 'sortexpr')
column_dict['groupexpr'] = p_cp.get(p_column, 'groupexpr')
column_dict['show_all'] = p_cp.getboolean(p_column, 'show_all')
return column_dict
defaults = {
'filterexpr': '',
'sortexpr': config().sort_string(),
'groupexpr': config().group_string(),
'show_all': '0',
}
cp = RawConfigParser(defaults, strict=False)
files = [
"topydo_columns.ini",
"topydo_columns.conf",
".topydo_columns",
home_config_path('.topydo_columns'),
home_config_path('.config/topydo/columns'),
"/etc/topydo_columns.conf",
]
if p_alt_layout_path is not None:
files.insert(0, expanduser(p_alt_layout_path))
for filename in files:
if cp.read(filename):
break
column_list = []
for column in cp.sections():
column_list.append(_get_column_dict(cp, column))
return column_list | python | def columns(p_alt_layout_path=None):
"""
Returns list with complete column configuration dicts.
"""
def _get_column_dict(p_cp, p_column):
column_dict = dict()
filterexpr = p_cp.get(p_column, 'filterexpr')
try:
title = p_cp.get(p_column, 'title')
except NoOptionError:
title = filterexpr
column_dict['title'] = title or 'Yet another column'
column_dict['filterexpr'] = filterexpr
column_dict['sortexpr'] = p_cp.get(p_column, 'sortexpr')
column_dict['groupexpr'] = p_cp.get(p_column, 'groupexpr')
column_dict['show_all'] = p_cp.getboolean(p_column, 'show_all')
return column_dict
defaults = {
'filterexpr': '',
'sortexpr': config().sort_string(),
'groupexpr': config().group_string(),
'show_all': '0',
}
cp = RawConfigParser(defaults, strict=False)
files = [
"topydo_columns.ini",
"topydo_columns.conf",
".topydo_columns",
home_config_path('.topydo_columns'),
home_config_path('.config/topydo/columns'),
"/etc/topydo_columns.conf",
]
if p_alt_layout_path is not None:
files.insert(0, expanduser(p_alt_layout_path))
for filename in files:
if cp.read(filename):
break
column_list = []
for column in cp.sections():
column_list.append(_get_column_dict(cp, column))
return column_list | [
"def",
"columns",
"(",
"p_alt_layout_path",
"=",
"None",
")",
":",
"def",
"_get_column_dict",
"(",
"p_cp",
",",
"p_column",
")",
":",
"column_dict",
"=",
"dict",
"(",
")",
"filterexpr",
"=",
"p_cp",
".",
"get",
"(",
"p_column",
",",
"'filterexpr'",
")",
"try",
":",
"title",
"=",
"p_cp",
".",
"get",
"(",
"p_column",
",",
"'title'",
")",
"except",
"NoOptionError",
":",
"title",
"=",
"filterexpr",
"column_dict",
"[",
"'title'",
"]",
"=",
"title",
"or",
"'Yet another column'",
"column_dict",
"[",
"'filterexpr'",
"]",
"=",
"filterexpr",
"column_dict",
"[",
"'sortexpr'",
"]",
"=",
"p_cp",
".",
"get",
"(",
"p_column",
",",
"'sortexpr'",
")",
"column_dict",
"[",
"'groupexpr'",
"]",
"=",
"p_cp",
".",
"get",
"(",
"p_column",
",",
"'groupexpr'",
")",
"column_dict",
"[",
"'show_all'",
"]",
"=",
"p_cp",
".",
"getboolean",
"(",
"p_column",
",",
"'show_all'",
")",
"return",
"column_dict",
"defaults",
"=",
"{",
"'filterexpr'",
":",
"''",
",",
"'sortexpr'",
":",
"config",
"(",
")",
".",
"sort_string",
"(",
")",
",",
"'groupexpr'",
":",
"config",
"(",
")",
".",
"group_string",
"(",
")",
",",
"'show_all'",
":",
"'0'",
",",
"}",
"cp",
"=",
"RawConfigParser",
"(",
"defaults",
",",
"strict",
"=",
"False",
")",
"files",
"=",
"[",
"\"topydo_columns.ini\"",
",",
"\"topydo_columns.conf\"",
",",
"\".topydo_columns\"",
",",
"home_config_path",
"(",
"'.topydo_columns'",
")",
",",
"home_config_path",
"(",
"'.config/topydo/columns'",
")",
",",
"\"/etc/topydo_columns.conf\"",
",",
"]",
"if",
"p_alt_layout_path",
"is",
"not",
"None",
":",
"files",
".",
"insert",
"(",
"0",
",",
"expanduser",
"(",
"p_alt_layout_path",
")",
")",
"for",
"filename",
"in",
"files",
":",
"if",
"cp",
".",
"read",
"(",
"filename",
")",
":",
"break",
"column_list",
"=",
"[",
"]",
"for",
"column",
"in",
"cp",
".",
"sections",
"(",
")",
":",
"column_list",
".",
"append",
"(",
"_get_column_dict",
"(",
"cp",
",",
"column",
")",
")",
"return",
"column_list"
] | Returns list with complete column configuration dicts. | [
"Returns",
"list",
"with",
"complete",
"column",
"configuration",
"dicts",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/ui/columns/ColumnLayout.py#L23-L73 | train |
bram85/topydo | topydo/lib/DCommand.py | DCommand._active_todos | def _active_todos(self):
"""
Returns a list of active todos, taking uncompleted subtodos into
account.
The stored length of the todolist is taken into account, to prevent new
todos created by recurrence to pop up as newly activated tasks.
Since these todos pop up at the end of the list, we cut off the list
just before that point.
"""
return [todo for todo in self.todolist.todos()
if not self._uncompleted_children(todo) and todo.is_active()] | python | def _active_todos(self):
"""
Returns a list of active todos, taking uncompleted subtodos into
account.
The stored length of the todolist is taken into account, to prevent new
todos created by recurrence to pop up as newly activated tasks.
Since these todos pop up at the end of the list, we cut off the list
just before that point.
"""
return [todo for todo in self.todolist.todos()
if not self._uncompleted_children(todo) and todo.is_active()] | [
"def",
"_active_todos",
"(",
"self",
")",
":",
"return",
"[",
"todo",
"for",
"todo",
"in",
"self",
".",
"todolist",
".",
"todos",
"(",
")",
"if",
"not",
"self",
".",
"_uncompleted_children",
"(",
"todo",
")",
"and",
"todo",
".",
"is_active",
"(",
")",
"]"
] | Returns a list of active todos, taking uncompleted subtodos into
account.
The stored length of the todolist is taken into account, to prevent new
todos created by recurrence to pop up as newly activated tasks.
Since these todos pop up at the end of the list, we cut off the list
just before that point. | [
"Returns",
"a",
"list",
"of",
"active",
"todos",
"taking",
"uncompleted",
"subtodos",
"into",
"account",
"."
] | b59fcfca5361869a6b78d4c9808c7c6cd0a18b58 | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/DCommand.py#L84-L95 | train |
indygreg/python-zstandard | zstandard/cffi.py | ZstdDecompressionReader._decompress_into_buffer | def _decompress_into_buffer(self, out_buffer):
"""Decompress available input into an output buffer.
Returns True if data in output buffer should be emitted.
"""
zresult = lib.ZSTD_decompressStream(self._decompressor._dctx,
out_buffer, self._in_buffer)
if self._in_buffer.pos == self._in_buffer.size:
self._in_buffer.src = ffi.NULL
self._in_buffer.pos = 0
self._in_buffer.size = 0
self._source_buffer = None
if not hasattr(self._source, 'read'):
self._finished_input = True
if lib.ZSTD_isError(zresult):
raise ZstdError('zstd decompress error: %s' %
_zstd_error(zresult))
# Emit data if there is data AND either:
# a) output buffer is full (read amount is satisfied)
# b) we're at end of a frame and not in frame spanning mode
return (out_buffer.pos and
(out_buffer.pos == out_buffer.size or
zresult == 0 and not self._read_across_frames)) | python | def _decompress_into_buffer(self, out_buffer):
"""Decompress available input into an output buffer.
Returns True if data in output buffer should be emitted.
"""
zresult = lib.ZSTD_decompressStream(self._decompressor._dctx,
out_buffer, self._in_buffer)
if self._in_buffer.pos == self._in_buffer.size:
self._in_buffer.src = ffi.NULL
self._in_buffer.pos = 0
self._in_buffer.size = 0
self._source_buffer = None
if not hasattr(self._source, 'read'):
self._finished_input = True
if lib.ZSTD_isError(zresult):
raise ZstdError('zstd decompress error: %s' %
_zstd_error(zresult))
# Emit data if there is data AND either:
# a) output buffer is full (read amount is satisfied)
# b) we're at end of a frame and not in frame spanning mode
return (out_buffer.pos and
(out_buffer.pos == out_buffer.size or
zresult == 0 and not self._read_across_frames)) | [
"def",
"_decompress_into_buffer",
"(",
"self",
",",
"out_buffer",
")",
":",
"zresult",
"=",
"lib",
".",
"ZSTD_decompressStream",
"(",
"self",
".",
"_decompressor",
".",
"_dctx",
",",
"out_buffer",
",",
"self",
".",
"_in_buffer",
")",
"if",
"self",
".",
"_in_buffer",
".",
"pos",
"==",
"self",
".",
"_in_buffer",
".",
"size",
":",
"self",
".",
"_in_buffer",
".",
"src",
"=",
"ffi",
".",
"NULL",
"self",
".",
"_in_buffer",
".",
"pos",
"=",
"0",
"self",
".",
"_in_buffer",
".",
"size",
"=",
"0",
"self",
".",
"_source_buffer",
"=",
"None",
"if",
"not",
"hasattr",
"(",
"self",
".",
"_source",
",",
"'read'",
")",
":",
"self",
".",
"_finished_input",
"=",
"True",
"if",
"lib",
".",
"ZSTD_isError",
"(",
"zresult",
")",
":",
"raise",
"ZstdError",
"(",
"'zstd decompress error: %s'",
"%",
"_zstd_error",
"(",
"zresult",
")",
")",
"# Emit data if there is data AND either:",
"# a) output buffer is full (read amount is satisfied)",
"# b) we're at end of a frame and not in frame spanning mode",
"return",
"(",
"out_buffer",
".",
"pos",
"and",
"(",
"out_buffer",
".",
"pos",
"==",
"out_buffer",
".",
"size",
"or",
"zresult",
"==",
"0",
"and",
"not",
"self",
".",
"_read_across_frames",
")",
")"
] | Decompress available input into an output buffer.
Returns True if data in output buffer should be emitted. | [
"Decompress",
"available",
"input",
"into",
"an",
"output",
"buffer",
"."
] | 74fa5904c3e7df67a4260344bf919356a181487e | https://github.com/indygreg/python-zstandard/blob/74fa5904c3e7df67a4260344bf919356a181487e/zstandard/cffi.py#L1864-L1890 | train |
indygreg/python-zstandard | setup_zstd.py | get_c_extension | def get_c_extension(support_legacy=False, system_zstd=False, name='zstd',
warnings_as_errors=False, root=None):
"""Obtain a distutils.extension.Extension for the C extension.
``support_legacy`` controls whether to compile in legacy zstd format support.
``system_zstd`` controls whether to compile against the system zstd library.
For this to work, the system zstd library and headers must match what
python-zstandard is coded against exactly.
``name`` is the module name of the C extension to produce.
``warnings_as_errors`` controls whether compiler warnings are turned into
compiler errors.
``root`` defines a root path that source should be computed as relative
to. This should be the directory with the main ``setup.py`` that is
being invoked. If not defined, paths will be relative to this file.
"""
actual_root = os.path.abspath(os.path.dirname(__file__))
root = root or actual_root
sources = set([os.path.join(actual_root, p) for p in ext_sources])
if not system_zstd:
sources.update([os.path.join(actual_root, p) for p in zstd_sources])
if support_legacy:
sources.update([os.path.join(actual_root, p)
for p in zstd_sources_legacy])
sources = list(sources)
include_dirs = set([os.path.join(actual_root, d) for d in ext_includes])
if not system_zstd:
include_dirs.update([os.path.join(actual_root, d)
for d in zstd_includes])
if support_legacy:
include_dirs.update([os.path.join(actual_root, d)
for d in zstd_includes_legacy])
include_dirs = list(include_dirs)
depends = [os.path.join(actual_root, p) for p in zstd_depends]
compiler = distutils.ccompiler.new_compiler()
# Needed for MSVC.
if hasattr(compiler, 'initialize'):
compiler.initialize()
if compiler.compiler_type == 'unix':
compiler_type = 'unix'
elif compiler.compiler_type == 'msvc':
compiler_type = 'msvc'
elif compiler.compiler_type == 'mingw32':
compiler_type = 'mingw32'
else:
raise Exception('unhandled compiler type: %s' %
compiler.compiler_type)
extra_args = ['-DZSTD_MULTITHREAD']
if not system_zstd:
extra_args.append('-DZSTDLIB_VISIBILITY=')
extra_args.append('-DZDICTLIB_VISIBILITY=')
extra_args.append('-DZSTDERRORLIB_VISIBILITY=')
if compiler_type == 'unix':
extra_args.append('-fvisibility=hidden')
if not system_zstd and support_legacy:
extra_args.append('-DZSTD_LEGACY_SUPPORT=1')
if warnings_as_errors:
if compiler_type in ('unix', 'mingw32'):
extra_args.append('-Werror')
elif compiler_type == 'msvc':
extra_args.append('/WX')
else:
assert False
libraries = ['zstd'] if system_zstd else []
# Python 3.7 doesn't like absolute paths. So normalize to relative.
sources = [os.path.relpath(p, root) for p in sources]
include_dirs = [os.path.relpath(p, root) for p in include_dirs]
depends = [os.path.relpath(p, root) for p in depends]
# TODO compile with optimizations.
return Extension(name, sources,
include_dirs=include_dirs,
depends=depends,
extra_compile_args=extra_args,
libraries=libraries) | python | def get_c_extension(support_legacy=False, system_zstd=False, name='zstd',
warnings_as_errors=False, root=None):
"""Obtain a distutils.extension.Extension for the C extension.
``support_legacy`` controls whether to compile in legacy zstd format support.
``system_zstd`` controls whether to compile against the system zstd library.
For this to work, the system zstd library and headers must match what
python-zstandard is coded against exactly.
``name`` is the module name of the C extension to produce.
``warnings_as_errors`` controls whether compiler warnings are turned into
compiler errors.
``root`` defines a root path that source should be computed as relative
to. This should be the directory with the main ``setup.py`` that is
being invoked. If not defined, paths will be relative to this file.
"""
actual_root = os.path.abspath(os.path.dirname(__file__))
root = root or actual_root
sources = set([os.path.join(actual_root, p) for p in ext_sources])
if not system_zstd:
sources.update([os.path.join(actual_root, p) for p in zstd_sources])
if support_legacy:
sources.update([os.path.join(actual_root, p)
for p in zstd_sources_legacy])
sources = list(sources)
include_dirs = set([os.path.join(actual_root, d) for d in ext_includes])
if not system_zstd:
include_dirs.update([os.path.join(actual_root, d)
for d in zstd_includes])
if support_legacy:
include_dirs.update([os.path.join(actual_root, d)
for d in zstd_includes_legacy])
include_dirs = list(include_dirs)
depends = [os.path.join(actual_root, p) for p in zstd_depends]
compiler = distutils.ccompiler.new_compiler()
# Needed for MSVC.
if hasattr(compiler, 'initialize'):
compiler.initialize()
if compiler.compiler_type == 'unix':
compiler_type = 'unix'
elif compiler.compiler_type == 'msvc':
compiler_type = 'msvc'
elif compiler.compiler_type == 'mingw32':
compiler_type = 'mingw32'
else:
raise Exception('unhandled compiler type: %s' %
compiler.compiler_type)
extra_args = ['-DZSTD_MULTITHREAD']
if not system_zstd:
extra_args.append('-DZSTDLIB_VISIBILITY=')
extra_args.append('-DZDICTLIB_VISIBILITY=')
extra_args.append('-DZSTDERRORLIB_VISIBILITY=')
if compiler_type == 'unix':
extra_args.append('-fvisibility=hidden')
if not system_zstd and support_legacy:
extra_args.append('-DZSTD_LEGACY_SUPPORT=1')
if warnings_as_errors:
if compiler_type in ('unix', 'mingw32'):
extra_args.append('-Werror')
elif compiler_type == 'msvc':
extra_args.append('/WX')
else:
assert False
libraries = ['zstd'] if system_zstd else []
# Python 3.7 doesn't like absolute paths. So normalize to relative.
sources = [os.path.relpath(p, root) for p in sources]
include_dirs = [os.path.relpath(p, root) for p in include_dirs]
depends = [os.path.relpath(p, root) for p in depends]
# TODO compile with optimizations.
return Extension(name, sources,
include_dirs=include_dirs,
depends=depends,
extra_compile_args=extra_args,
libraries=libraries) | [
"def",
"get_c_extension",
"(",
"support_legacy",
"=",
"False",
",",
"system_zstd",
"=",
"False",
",",
"name",
"=",
"'zstd'",
",",
"warnings_as_errors",
"=",
"False",
",",
"root",
"=",
"None",
")",
":",
"actual_root",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"root",
"=",
"root",
"or",
"actual_root",
"sources",
"=",
"set",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"p",
")",
"for",
"p",
"in",
"ext_sources",
"]",
")",
"if",
"not",
"system_zstd",
":",
"sources",
".",
"update",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"p",
")",
"for",
"p",
"in",
"zstd_sources",
"]",
")",
"if",
"support_legacy",
":",
"sources",
".",
"update",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"p",
")",
"for",
"p",
"in",
"zstd_sources_legacy",
"]",
")",
"sources",
"=",
"list",
"(",
"sources",
")",
"include_dirs",
"=",
"set",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"d",
")",
"for",
"d",
"in",
"ext_includes",
"]",
")",
"if",
"not",
"system_zstd",
":",
"include_dirs",
".",
"update",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"d",
")",
"for",
"d",
"in",
"zstd_includes",
"]",
")",
"if",
"support_legacy",
":",
"include_dirs",
".",
"update",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"d",
")",
"for",
"d",
"in",
"zstd_includes_legacy",
"]",
")",
"include_dirs",
"=",
"list",
"(",
"include_dirs",
")",
"depends",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"actual_root",
",",
"p",
")",
"for",
"p",
"in",
"zstd_depends",
"]",
"compiler",
"=",
"distutils",
".",
"ccompiler",
".",
"new_compiler",
"(",
")",
"# Needed for MSVC.",
"if",
"hasattr",
"(",
"compiler",
",",
"'initialize'",
")",
":",
"compiler",
".",
"initialize",
"(",
")",
"if",
"compiler",
".",
"compiler_type",
"==",
"'unix'",
":",
"compiler_type",
"=",
"'unix'",
"elif",
"compiler",
".",
"compiler_type",
"==",
"'msvc'",
":",
"compiler_type",
"=",
"'msvc'",
"elif",
"compiler",
".",
"compiler_type",
"==",
"'mingw32'",
":",
"compiler_type",
"=",
"'mingw32'",
"else",
":",
"raise",
"Exception",
"(",
"'unhandled compiler type: %s'",
"%",
"compiler",
".",
"compiler_type",
")",
"extra_args",
"=",
"[",
"'-DZSTD_MULTITHREAD'",
"]",
"if",
"not",
"system_zstd",
":",
"extra_args",
".",
"append",
"(",
"'-DZSTDLIB_VISIBILITY='",
")",
"extra_args",
".",
"append",
"(",
"'-DZDICTLIB_VISIBILITY='",
")",
"extra_args",
".",
"append",
"(",
"'-DZSTDERRORLIB_VISIBILITY='",
")",
"if",
"compiler_type",
"==",
"'unix'",
":",
"extra_args",
".",
"append",
"(",
"'-fvisibility=hidden'",
")",
"if",
"not",
"system_zstd",
"and",
"support_legacy",
":",
"extra_args",
".",
"append",
"(",
"'-DZSTD_LEGACY_SUPPORT=1'",
")",
"if",
"warnings_as_errors",
":",
"if",
"compiler_type",
"in",
"(",
"'unix'",
",",
"'mingw32'",
")",
":",
"extra_args",
".",
"append",
"(",
"'-Werror'",
")",
"elif",
"compiler_type",
"==",
"'msvc'",
":",
"extra_args",
".",
"append",
"(",
"'/WX'",
")",
"else",
":",
"assert",
"False",
"libraries",
"=",
"[",
"'zstd'",
"]",
"if",
"system_zstd",
"else",
"[",
"]",
"# Python 3.7 doesn't like absolute paths. So normalize to relative.",
"sources",
"=",
"[",
"os",
".",
"path",
".",
"relpath",
"(",
"p",
",",
"root",
")",
"for",
"p",
"in",
"sources",
"]",
"include_dirs",
"=",
"[",
"os",
".",
"path",
".",
"relpath",
"(",
"p",
",",
"root",
")",
"for",
"p",
"in",
"include_dirs",
"]",
"depends",
"=",
"[",
"os",
".",
"path",
".",
"relpath",
"(",
"p",
",",
"root",
")",
"for",
"p",
"in",
"depends",
"]",
"# TODO compile with optimizations.",
"return",
"Extension",
"(",
"name",
",",
"sources",
",",
"include_dirs",
"=",
"include_dirs",
",",
"depends",
"=",
"depends",
",",
"extra_compile_args",
"=",
"extra_args",
",",
"libraries",
"=",
"libraries",
")"
] | Obtain a distutils.extension.Extension for the C extension.
``support_legacy`` controls whether to compile in legacy zstd format support.
``system_zstd`` controls whether to compile against the system zstd library.
For this to work, the system zstd library and headers must match what
python-zstandard is coded against exactly.
``name`` is the module name of the C extension to produce.
``warnings_as_errors`` controls whether compiler warnings are turned into
compiler errors.
``root`` defines a root path that source should be computed as relative
to. This should be the directory with the main ``setup.py`` that is
being invoked. If not defined, paths will be relative to this file. | [
"Obtain",
"a",
"distutils",
".",
"extension",
".",
"Extension",
"for",
"the",
"C",
"extension",
"."
] | 74fa5904c3e7df67a4260344bf919356a181487e | https://github.com/indygreg/python-zstandard/blob/74fa5904c3e7df67a4260344bf919356a181487e/setup_zstd.py#L100-L190 | train |
sffjunkie/astral | src/astral.py | Location.timezone | def timezone(self):
"""The name of the time zone for the location.
A list of time zone names can be obtained from pytz. For example.
>>> from pytz import all_timezones
>>> for timezone in all_timezones:
... print(timezone)
"""
if not self._timezone_group and not self._timezone_location:
return None
if self._timezone_location != "":
return "%s/%s" % (self._timezone_group, self._timezone_location)
else:
return self._timezone_group | python | def timezone(self):
"""The name of the time zone for the location.
A list of time zone names can be obtained from pytz. For example.
>>> from pytz import all_timezones
>>> for timezone in all_timezones:
... print(timezone)
"""
if not self._timezone_group and not self._timezone_location:
return None
if self._timezone_location != "":
return "%s/%s" % (self._timezone_group, self._timezone_location)
else:
return self._timezone_group | [
"def",
"timezone",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_timezone_group",
"and",
"not",
"self",
".",
"_timezone_location",
":",
"return",
"None",
"if",
"self",
".",
"_timezone_location",
"!=",
"\"\"",
":",
"return",
"\"%s/%s\"",
"%",
"(",
"self",
".",
"_timezone_group",
",",
"self",
".",
"_timezone_location",
")",
"else",
":",
"return",
"self",
".",
"_timezone_group"
] | The name of the time zone for the location.
A list of time zone names can be obtained from pytz. For example.
>>> from pytz import all_timezones
>>> for timezone in all_timezones:
... print(timezone) | [
"The",
"name",
"of",
"the",
"time",
"zone",
"for",
"the",
"location",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L681-L697 | train |
sffjunkie/astral | src/astral.py | Location.tz | def tz(self):
"""Time zone information."""
if self.timezone is None:
return None
try:
tz = pytz.timezone(self.timezone)
return tz
except pytz.UnknownTimeZoneError:
raise AstralError("Unknown timezone '%s'" % self.timezone) | python | def tz(self):
"""Time zone information."""
if self.timezone is None:
return None
try:
tz = pytz.timezone(self.timezone)
return tz
except pytz.UnknownTimeZoneError:
raise AstralError("Unknown timezone '%s'" % self.timezone) | [
"def",
"tz",
"(",
"self",
")",
":",
"if",
"self",
".",
"timezone",
"is",
"None",
":",
"return",
"None",
"try",
":",
"tz",
"=",
"pytz",
".",
"timezone",
"(",
"self",
".",
"timezone",
")",
"return",
"tz",
"except",
"pytz",
".",
"UnknownTimeZoneError",
":",
"raise",
"AstralError",
"(",
"\"Unknown timezone '%s'\"",
"%",
"self",
".",
"timezone",
")"
] | Time zone information. | [
"Time",
"zone",
"information",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L716-L726 | train |
sffjunkie/astral | src/astral.py | Location.sun | def sun(self, date=None, local=True, use_elevation=True):
"""Returns dawn, sunrise, noon, sunset and dusk as a dictionary.
:param date: The date for which to calculate the times.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``,
``sunset`` and ``dusk`` whose values are the results of the
corresponding methods.
:rtype: dict
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
sun = self.astral.sun_utc(date, self.latitude, self.longitude, observer_elevation=elevation)
if local:
for key, dt in sun.items():
sun[key] = dt.astimezone(self.tz)
return sun | python | def sun(self, date=None, local=True, use_elevation=True):
"""Returns dawn, sunrise, noon, sunset and dusk as a dictionary.
:param date: The date for which to calculate the times.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``,
``sunset`` and ``dusk`` whose values are the results of the
corresponding methods.
:rtype: dict
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
sun = self.astral.sun_utc(date, self.latitude, self.longitude, observer_elevation=elevation)
if local:
for key, dt in sun.items():
sun[key] = dt.astimezone(self.tz)
return sun | [
"def",
"sun",
"(",
"self",
",",
"date",
"=",
"None",
",",
"local",
"=",
"True",
",",
"use_elevation",
"=",
"True",
")",
":",
"if",
"local",
"and",
"self",
".",
"timezone",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Local time requested but Location has no timezone set.\"",
")",
"if",
"self",
".",
"astral",
"is",
"None",
":",
"self",
".",
"astral",
"=",
"Astral",
"(",
")",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"elevation",
"=",
"self",
".",
"elevation",
"if",
"use_elevation",
"else",
"0",
"sun",
"=",
"self",
".",
"astral",
".",
"sun_utc",
"(",
"date",
",",
"self",
".",
"latitude",
",",
"self",
".",
"longitude",
",",
"observer_elevation",
"=",
"elevation",
")",
"if",
"local",
":",
"for",
"key",
",",
"dt",
"in",
"sun",
".",
"items",
"(",
")",
":",
"sun",
"[",
"key",
"]",
"=",
"dt",
".",
"astimezone",
"(",
"self",
".",
"tz",
")",
"return",
"sun"
] | Returns dawn, sunrise, noon, sunset and dusk as a dictionary.
:param date: The date for which to calculate the times.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``,
``sunset`` and ``dusk`` whose values are the results of the
corresponding methods.
:rtype: dict | [
"Returns",
"dawn",
"sunrise",
"noon",
"sunset",
"and",
"dusk",
"as",
"a",
"dictionary",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L756-L795 | train |
sffjunkie/astral | src/astral.py | Location.sunrise | def sunrise(self, date=None, local=True, use_elevation=True):
"""Return sunrise time.
Calculates the time in the morning when the sun is a 0.833 degrees
below the horizon. This is to account for refraction.
:param date: The date for which to calculate the sunrise time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: The date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime`
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude, elevation)
if local:
return sunrise.astimezone(self.tz)
else:
return sunrise | python | def sunrise(self, date=None, local=True, use_elevation=True):
"""Return sunrise time.
Calculates the time in the morning when the sun is a 0.833 degrees
below the horizon. This is to account for refraction.
:param date: The date for which to calculate the sunrise time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: The date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime`
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude, elevation)
if local:
return sunrise.astimezone(self.tz)
else:
return sunrise | [
"def",
"sunrise",
"(",
"self",
",",
"date",
"=",
"None",
",",
"local",
"=",
"True",
",",
"use_elevation",
"=",
"True",
")",
":",
"if",
"local",
"and",
"self",
".",
"timezone",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Local time requested but Location has no timezone set.\"",
")",
"if",
"self",
".",
"astral",
"is",
"None",
":",
"self",
".",
"astral",
"=",
"Astral",
"(",
")",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"elevation",
"=",
"self",
".",
"elevation",
"if",
"use_elevation",
"else",
"0",
"sunrise",
"=",
"self",
".",
"astral",
".",
"sunrise_utc",
"(",
"date",
",",
"self",
".",
"latitude",
",",
"self",
".",
"longitude",
",",
"elevation",
")",
"if",
"local",
":",
"return",
"sunrise",
".",
"astimezone",
"(",
"self",
".",
"tz",
")",
"else",
":",
"return",
"sunrise"
] | Return sunrise time.
Calculates the time in the morning when the sun is a 0.833 degrees
below the horizon. This is to account for refraction.
:param date: The date for which to calculate the sunrise time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: The date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime` | [
"Return",
"sunrise",
"time",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L837-L876 | train |
sffjunkie/astral | src/astral.py | Location.time_at_elevation | def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True):
"""Calculate the time when the sun is at the specified elevation.
Note:
This method uses positive elevations for those above the horizon.
Elevations greater than 90 degrees are converted to a setting sun
i.e. an elevation of 110 will calculate a setting sun at 70 degrees.
:param elevation: Elevation in degrees above the horizon to calculate for.
:type elevation: float
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: The date for which to calculate the elevation time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:returns: The date and time at which dusk occurs.
:rtype: :class:`~datetime.datetime`
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
if elevation > 90.0:
elevation = 180.0 - elevation
direction = SUN_SETTING
time_ = self.astral.time_at_elevation_utc(
elevation, direction, date, self.latitude, self.longitude
)
if local:
return time_.astimezone(self.tz)
else:
return time_ | python | def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True):
"""Calculate the time when the sun is at the specified elevation.
Note:
This method uses positive elevations for those above the horizon.
Elevations greater than 90 degrees are converted to a setting sun
i.e. an elevation of 110 will calculate a setting sun at 70 degrees.
:param elevation: Elevation in degrees above the horizon to calculate for.
:type elevation: float
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: The date for which to calculate the elevation time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:returns: The date and time at which dusk occurs.
:rtype: :class:`~datetime.datetime`
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
if elevation > 90.0:
elevation = 180.0 - elevation
direction = SUN_SETTING
time_ = self.astral.time_at_elevation_utc(
elevation, direction, date, self.latitude, self.longitude
)
if local:
return time_.astimezone(self.tz)
else:
return time_ | [
"def",
"time_at_elevation",
"(",
"self",
",",
"elevation",
",",
"direction",
"=",
"SUN_RISING",
",",
"date",
"=",
"None",
",",
"local",
"=",
"True",
")",
":",
"if",
"local",
"and",
"self",
".",
"timezone",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Local time requested but Location has no timezone set.\"",
")",
"if",
"self",
".",
"astral",
"is",
"None",
":",
"self",
".",
"astral",
"=",
"Astral",
"(",
")",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"if",
"elevation",
">",
"90.0",
":",
"elevation",
"=",
"180.0",
"-",
"elevation",
"direction",
"=",
"SUN_SETTING",
"time_",
"=",
"self",
".",
"astral",
".",
"time_at_elevation_utc",
"(",
"elevation",
",",
"direction",
",",
"date",
",",
"self",
".",
"latitude",
",",
"self",
".",
"longitude",
")",
"if",
"local",
":",
"return",
"time_",
".",
"astimezone",
"(",
"self",
".",
"tz",
")",
"else",
":",
"return",
"time_"
] | Calculate the time when the sun is at the specified elevation.
Note:
This method uses positive elevations for those above the horizon.
Elevations greater than 90 degrees are converted to a setting sun
i.e. an elevation of 110 will calculate a setting sun at 70 degrees.
:param elevation: Elevation in degrees above the horizon to calculate for.
:type elevation: float
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: The date for which to calculate the elevation time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:returns: The date and time at which dusk occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"the",
"time",
"when",
"the",
"sun",
"is",
"at",
"the",
"specified",
"elevation",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1145-L1194 | train |
sffjunkie/astral | src/astral.py | Location.blue_hour | def blue_hour(self, direction=SUN_RISING, date=None, local=True, use_elevation=True):
"""Returns the start and end times of the Blue Hour when the sun is traversing
in the specified direction.
This method uses the definition from PhotoPills i.e. the
blue hour is when the sun is between 6 and 4 degrees below the horizon.
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: The date for which to calculate the times.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Times to be returned in location's time zone;
False = Times to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:return: A tuple of the date and time at which the Blue Hour starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
start, end = self.astral.blue_hour_utc(
direction, date, self.latitude, self.longitude, elevation
)
if local:
start = start.astimezone(self.tz)
end = end.astimezone(self.tz)
return start, end | python | def blue_hour(self, direction=SUN_RISING, date=None, local=True, use_elevation=True):
"""Returns the start and end times of the Blue Hour when the sun is traversing
in the specified direction.
This method uses the definition from PhotoPills i.e. the
blue hour is when the sun is between 6 and 4 degrees below the horizon.
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: The date for which to calculate the times.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Times to be returned in location's time zone;
False = Times to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:return: A tuple of the date and time at which the Blue Hour starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
start, end = self.astral.blue_hour_utc(
direction, date, self.latitude, self.longitude, elevation
)
if local:
start = start.astimezone(self.tz)
end = end.astimezone(self.tz)
return start, end | [
"def",
"blue_hour",
"(",
"self",
",",
"direction",
"=",
"SUN_RISING",
",",
"date",
"=",
"None",
",",
"local",
"=",
"True",
",",
"use_elevation",
"=",
"True",
")",
":",
"if",
"local",
"and",
"self",
".",
"timezone",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Local time requested but Location has no timezone set.\"",
")",
"if",
"self",
".",
"astral",
"is",
"None",
":",
"self",
".",
"astral",
"=",
"Astral",
"(",
")",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"elevation",
"=",
"self",
".",
"elevation",
"if",
"use_elevation",
"else",
"0",
"start",
",",
"end",
"=",
"self",
".",
"astral",
".",
"blue_hour_utc",
"(",
"direction",
",",
"date",
",",
"self",
".",
"latitude",
",",
"self",
".",
"longitude",
",",
"elevation",
")",
"if",
"local",
":",
"start",
"=",
"start",
".",
"astimezone",
"(",
"self",
".",
"tz",
")",
"end",
"=",
"end",
".",
"astimezone",
"(",
"self",
".",
"tz",
")",
"return",
"start",
",",
"end"
] | Returns the start and end times of the Blue Hour when the sun is traversing
in the specified direction.
This method uses the definition from PhotoPills i.e. the
blue hour is when the sun is between 6 and 4 degrees below the horizon.
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: The date for which to calculate the times.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Times to be returned in location's time zone;
False = Times to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:return: A tuple of the date and time at which the Blue Hour starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) | [
"Returns",
"the",
"start",
"and",
"end",
"times",
"of",
"the",
"Blue",
"Hour",
"when",
"the",
"sun",
"is",
"traversing",
"in",
"the",
"specified",
"direction",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1285-L1332 | train |
sffjunkie/astral | src/astral.py | Location.moon_phase | def moon_phase(self, date=None, rtype=int):
"""Calculates the moon phase for a specific date.
:param date: The date to calculate the phase for.
If ommitted the current date is used.
:type date: :class:`datetime.date`
:returns:
A number designating the phase
| 0 = New moon
| 7 = First quarter
| 14 = Full moon
| 21 = Last quarter
"""
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
return self.astral.moon_phase(date, rtype) | python | def moon_phase(self, date=None, rtype=int):
"""Calculates the moon phase for a specific date.
:param date: The date to calculate the phase for.
If ommitted the current date is used.
:type date: :class:`datetime.date`
:returns:
A number designating the phase
| 0 = New moon
| 7 = First quarter
| 14 = Full moon
| 21 = Last quarter
"""
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
return self.astral.moon_phase(date, rtype) | [
"def",
"moon_phase",
"(",
"self",
",",
"date",
"=",
"None",
",",
"rtype",
"=",
"int",
")",
":",
"if",
"self",
".",
"astral",
"is",
"None",
":",
"self",
".",
"astral",
"=",
"Astral",
"(",
")",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"return",
"self",
".",
"astral",
".",
"moon_phase",
"(",
"date",
",",
"rtype",
")"
] | Calculates the moon phase for a specific date.
:param date: The date to calculate the phase for.
If ommitted the current date is used.
:type date: :class:`datetime.date`
:returns:
A number designating the phase
| 0 = New moon
| 7 = First quarter
| 14 = Full moon
| 21 = Last quarter | [
"Calculates",
"the",
"moon",
"phase",
"for",
"a",
"specific",
"date",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1390-L1412 | train |
sffjunkie/astral | src/astral.py | AstralGeocoder.add_locations | def add_locations(self, locations):
"""Add extra locations to AstralGeocoder.
Extra locations can be
* A single string containing one or more locations separated by a newline.
* A list of strings
* A list of lists/tuples that are passed to a :class:`Location` constructor
"""
if isinstance(locations, (str, ustr)):
self._add_from_str(locations)
elif isinstance(locations, (list, tuple)):
self._add_from_list(locations) | python | def add_locations(self, locations):
"""Add extra locations to AstralGeocoder.
Extra locations can be
* A single string containing one or more locations separated by a newline.
* A list of strings
* A list of lists/tuples that are passed to a :class:`Location` constructor
"""
if isinstance(locations, (str, ustr)):
self._add_from_str(locations)
elif isinstance(locations, (list, tuple)):
self._add_from_list(locations) | [
"def",
"add_locations",
"(",
"self",
",",
"locations",
")",
":",
"if",
"isinstance",
"(",
"locations",
",",
"(",
"str",
",",
"ustr",
")",
")",
":",
"self",
".",
"_add_from_str",
"(",
"locations",
")",
"elif",
"isinstance",
"(",
"locations",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"self",
".",
"_add_from_list",
"(",
"locations",
")"
] | Add extra locations to AstralGeocoder.
Extra locations can be
* A single string containing one or more locations separated by a newline.
* A list of strings
* A list of lists/tuples that are passed to a :class:`Location` constructor | [
"Add",
"extra",
"locations",
"to",
"AstralGeocoder",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1512-L1525 | train |
sffjunkie/astral | src/astral.py | AstralGeocoder._add_from_str | def _add_from_str(self, s):
"""Add locations from a string"""
if sys.version_info[0] < 3 and isinstance(s, str):
s = s.decode('utf-8')
for line in s.split("\n"):
self._parse_line(line) | python | def _add_from_str(self, s):
"""Add locations from a string"""
if sys.version_info[0] < 3 and isinstance(s, str):
s = s.decode('utf-8')
for line in s.split("\n"):
self._parse_line(line) | [
"def",
"_add_from_str",
"(",
"self",
",",
"s",
")",
":",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
"and",
"isinstance",
"(",
"s",
",",
"str",
")",
":",
"s",
"=",
"s",
".",
"decode",
"(",
"'utf-8'",
")",
"for",
"line",
"in",
"s",
".",
"split",
"(",
"\"\\n\"",
")",
":",
"self",
".",
"_parse_line",
"(",
"line",
")"
] | Add locations from a string | [
"Add",
"locations",
"from",
"a",
"string"
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1527-L1534 | train |
sffjunkie/astral | src/astral.py | AstralGeocoder._add_from_list | def _add_from_list(self, l):
"""Add locations from a list of either strings or lists or tuples.
Lists of lists and tuples are passed to the Location constructor
"""
for item in l:
if isinstance(item, (str, ustr)):
self._add_from_str(item)
elif isinstance(item, (list, tuple)):
location = Location(item)
self._add_location(location) | python | def _add_from_list(self, l):
"""Add locations from a list of either strings or lists or tuples.
Lists of lists and tuples are passed to the Location constructor
"""
for item in l:
if isinstance(item, (str, ustr)):
self._add_from_str(item)
elif isinstance(item, (list, tuple)):
location = Location(item)
self._add_location(location) | [
"def",
"_add_from_list",
"(",
"self",
",",
"l",
")",
":",
"for",
"item",
"in",
"l",
":",
"if",
"isinstance",
"(",
"item",
",",
"(",
"str",
",",
"ustr",
")",
")",
":",
"self",
".",
"_add_from_str",
"(",
"item",
")",
"elif",
"isinstance",
"(",
"item",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"location",
"=",
"Location",
"(",
"item",
")",
"self",
".",
"_add_location",
"(",
"location",
")"
] | Add locations from a list of either strings or lists or tuples.
Lists of lists and tuples are passed to the Location constructor | [
"Add",
"locations",
"from",
"a",
"list",
"of",
"either",
"strings",
"or",
"lists",
"or",
"tuples",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1536-L1547 | train |
sffjunkie/astral | src/astral.py | GoogleGeocoder._get_geocoding | def _get_geocoding(self, key, location):
"""Lookup the Google geocoding API information for `key`"""
url = self._location_query_base % quote_plus(key)
if self.api_key:
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
formatted_address = response["results"][0]["formatted_address"]
pos = formatted_address.find(",")
if pos == -1:
location.name = formatted_address
location.region = ""
else:
location.name = formatted_address[:pos].strip()
location.region = formatted_address[pos + 1 :].strip()
geo_location = response["results"][0]["geometry"]["location"]
location.latitude = float(geo_location["lat"])
location.longitude = float(geo_location["lng"])
else:
raise AstralError("GoogleGeocoder: Unable to locate %s. Server Response=%s" %
(key, response["status"])) | python | def _get_geocoding(self, key, location):
"""Lookup the Google geocoding API information for `key`"""
url = self._location_query_base % quote_plus(key)
if self.api_key:
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
formatted_address = response["results"][0]["formatted_address"]
pos = formatted_address.find(",")
if pos == -1:
location.name = formatted_address
location.region = ""
else:
location.name = formatted_address[:pos].strip()
location.region = formatted_address[pos + 1 :].strip()
geo_location = response["results"][0]["geometry"]["location"]
location.latitude = float(geo_location["lat"])
location.longitude = float(geo_location["lng"])
else:
raise AstralError("GoogleGeocoder: Unable to locate %s. Server Response=%s" %
(key, response["status"])) | [
"def",
"_get_geocoding",
"(",
"self",
",",
"key",
",",
"location",
")",
":",
"url",
"=",
"self",
".",
"_location_query_base",
"%",
"quote_plus",
"(",
"key",
")",
"if",
"self",
".",
"api_key",
":",
"url",
"+=",
"\"&key=%s\"",
"%",
"self",
".",
"api_key",
"data",
"=",
"self",
".",
"_read_from_url",
"(",
"url",
")",
"response",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"==",
"\"OK\"",
":",
"formatted_address",
"=",
"response",
"[",
"\"results\"",
"]",
"[",
"0",
"]",
"[",
"\"formatted_address\"",
"]",
"pos",
"=",
"formatted_address",
".",
"find",
"(",
"\",\"",
")",
"if",
"pos",
"==",
"-",
"1",
":",
"location",
".",
"name",
"=",
"formatted_address",
"location",
".",
"region",
"=",
"\"\"",
"else",
":",
"location",
".",
"name",
"=",
"formatted_address",
"[",
":",
"pos",
"]",
".",
"strip",
"(",
")",
"location",
".",
"region",
"=",
"formatted_address",
"[",
"pos",
"+",
"1",
":",
"]",
".",
"strip",
"(",
")",
"geo_location",
"=",
"response",
"[",
"\"results\"",
"]",
"[",
"0",
"]",
"[",
"\"geometry\"",
"]",
"[",
"\"location\"",
"]",
"location",
".",
"latitude",
"=",
"float",
"(",
"geo_location",
"[",
"\"lat\"",
"]",
")",
"location",
".",
"longitude",
"=",
"float",
"(",
"geo_location",
"[",
"\"lng\"",
"]",
")",
"else",
":",
"raise",
"AstralError",
"(",
"\"GoogleGeocoder: Unable to locate %s. Server Response=%s\"",
"%",
"(",
"key",
",",
"response",
"[",
"\"status\"",
"]",
")",
")"
] | Lookup the Google geocoding API information for `key` | [
"Lookup",
"the",
"Google",
"geocoding",
"API",
"information",
"for",
"key"
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1684-L1707 | train |
sffjunkie/astral | src/astral.py | GoogleGeocoder._get_timezone | def _get_timezone(self, location):
"""Query the timezone information with the latitude and longitude of
the specified `location`.
This function assumes the timezone of the location has always been
the same as it is now by using time() in the query string.
"""
url = self._timezone_query_base % (
location.latitude,
location.longitude,
int(time()),
)
if self.api_key != "":
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
location.timezone = response["timeZoneId"]
else:
location.timezone = "UTC" | python | def _get_timezone(self, location):
"""Query the timezone information with the latitude and longitude of
the specified `location`.
This function assumes the timezone of the location has always been
the same as it is now by using time() in the query string.
"""
url = self._timezone_query_base % (
location.latitude,
location.longitude,
int(time()),
)
if self.api_key != "":
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
location.timezone = response["timeZoneId"]
else:
location.timezone = "UTC" | [
"def",
"_get_timezone",
"(",
"self",
",",
"location",
")",
":",
"url",
"=",
"self",
".",
"_timezone_query_base",
"%",
"(",
"location",
".",
"latitude",
",",
"location",
".",
"longitude",
",",
"int",
"(",
"time",
"(",
")",
")",
",",
")",
"if",
"self",
".",
"api_key",
"!=",
"\"\"",
":",
"url",
"+=",
"\"&key=%s\"",
"%",
"self",
".",
"api_key",
"data",
"=",
"self",
".",
"_read_from_url",
"(",
"url",
")",
"response",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"==",
"\"OK\"",
":",
"location",
".",
"timezone",
"=",
"response",
"[",
"\"timeZoneId\"",
"]",
"else",
":",
"location",
".",
"timezone",
"=",
"\"UTC\""
] | Query the timezone information with the latitude and longitude of
the specified `location`.
This function assumes the timezone of the location has always been
the same as it is now by using time() in the query string. | [
"Query",
"the",
"timezone",
"information",
"with",
"the",
"latitude",
"and",
"longitude",
"of",
"the",
"specified",
"location",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1709-L1729 | train |
sffjunkie/astral | src/astral.py | GoogleGeocoder._get_elevation | def _get_elevation(self, location):
"""Query the elevation information with the latitude and longitude of
the specified `location`.
"""
url = self._elevation_query_base % (location.latitude, location.longitude)
if self.api_key != "":
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
location.elevation = int(float(response["results"][0]["elevation"]))
else:
location.elevation = 0 | python | def _get_elevation(self, location):
"""Query the elevation information with the latitude and longitude of
the specified `location`.
"""
url = self._elevation_query_base % (location.latitude, location.longitude)
if self.api_key != "":
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
location.elevation = int(float(response["results"][0]["elevation"]))
else:
location.elevation = 0 | [
"def",
"_get_elevation",
"(",
"self",
",",
"location",
")",
":",
"url",
"=",
"self",
".",
"_elevation_query_base",
"%",
"(",
"location",
".",
"latitude",
",",
"location",
".",
"longitude",
")",
"if",
"self",
".",
"api_key",
"!=",
"\"\"",
":",
"url",
"+=",
"\"&key=%s\"",
"%",
"self",
".",
"api_key",
"data",
"=",
"self",
".",
"_read_from_url",
"(",
"url",
")",
"response",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"==",
"\"OK\"",
":",
"location",
".",
"elevation",
"=",
"int",
"(",
"float",
"(",
"response",
"[",
"\"results\"",
"]",
"[",
"0",
"]",
"[",
"\"elevation\"",
"]",
")",
")",
"else",
":",
"location",
".",
"elevation",
"=",
"0"
] | Query the elevation information with the latitude and longitude of
the specified `location`. | [
"Query",
"the",
"elevation",
"information",
"with",
"the",
"latitude",
"and",
"longitude",
"of",
"the",
"specified",
"location",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1731-L1744 | train |
sffjunkie/astral | src/astral.py | Astral.sun_utc | def sun_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate all the info for the sun at once.
All times are returned in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sun for
:type observer_elevation: int
:returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``,
``sunset`` and ``dusk`` whose values are the results of the
corresponding `_utc` methods.
:rtype: dict
"""
dawn = self.dawn_utc(date, latitude, longitude, observer_elevation=observer_elevation)
sunrise = self.sunrise_utc(date, latitude, longitude, observer_elevation=observer_elevation)
noon = self.solar_noon_utc(date, longitude)
sunset = self.sunset_utc(date, latitude, longitude, observer_elevation=observer_elevation)
dusk = self.dusk_utc(date, latitude, longitude, observer_elevation=observer_elevation)
return {
"dawn": dawn,
"sunrise": sunrise,
"noon": noon,
"sunset": sunset,
"dusk": dusk,
} | python | def sun_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate all the info for the sun at once.
All times are returned in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sun for
:type observer_elevation: int
:returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``,
``sunset`` and ``dusk`` whose values are the results of the
corresponding `_utc` methods.
:rtype: dict
"""
dawn = self.dawn_utc(date, latitude, longitude, observer_elevation=observer_elevation)
sunrise = self.sunrise_utc(date, latitude, longitude, observer_elevation=observer_elevation)
noon = self.solar_noon_utc(date, longitude)
sunset = self.sunset_utc(date, latitude, longitude, observer_elevation=observer_elevation)
dusk = self.dusk_utc(date, latitude, longitude, observer_elevation=observer_elevation)
return {
"dawn": dawn,
"sunrise": sunrise,
"noon": noon,
"sunset": sunset,
"dusk": dusk,
} | [
"def",
"sun_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"dawn",
"=",
"self",
".",
"dawn_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"observer_elevation",
")",
"sunrise",
"=",
"self",
".",
"sunrise_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"observer_elevation",
")",
"noon",
"=",
"self",
".",
"solar_noon_utc",
"(",
"date",
",",
"longitude",
")",
"sunset",
"=",
"self",
".",
"sunset_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"observer_elevation",
")",
"dusk",
"=",
"self",
".",
"dusk_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"observer_elevation",
")",
"return",
"{",
"\"dawn\"",
":",
"dawn",
",",
"\"sunrise\"",
":",
"sunrise",
",",
"\"noon\"",
":",
"noon",
",",
"\"sunset\"",
":",
"sunset",
",",
"\"dusk\"",
":",
"dusk",
",",
"}"
] | Calculate all the info for the sun at once.
All times are returned in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sun for
:type observer_elevation: int
:returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``,
``sunset`` and ``dusk`` whose values are the results of the
corresponding `_utc` methods.
:rtype: dict | [
"Calculate",
"all",
"the",
"info",
"for",
"the",
"sun",
"at",
"once",
".",
"All",
"times",
"are",
"returned",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1805-L1836 | train |
sffjunkie/astral | src/astral.py | Astral.dawn_utc | def dawn_utc(self, date, latitude, longitude, depression=0, observer_elevation=0):
"""Calculate dawn time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param depression: Override the depression used
:type depression: float
:param observer_elevation: Elevation in metres to calculate dawn for
:type observer_elevation: int
:return: The UTC date and time at which dawn occurs.
:rtype: :class:`~datetime.datetime`
"""
if depression == 0:
depression = self._depression
depression += 90
try:
return self._calc_time(depression, SUN_RISING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
(
"Sun never reaches %d degrees below the horizon, "
"at this location."
)
% (depression - 90)
)
else:
raise | python | def dawn_utc(self, date, latitude, longitude, depression=0, observer_elevation=0):
"""Calculate dawn time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param depression: Override the depression used
:type depression: float
:param observer_elevation: Elevation in metres to calculate dawn for
:type observer_elevation: int
:return: The UTC date and time at which dawn occurs.
:rtype: :class:`~datetime.datetime`
"""
if depression == 0:
depression = self._depression
depression += 90
try:
return self._calc_time(depression, SUN_RISING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
(
"Sun never reaches %d degrees below the horizon, "
"at this location."
)
% (depression - 90)
)
else:
raise | [
"def",
"dawn_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"depression",
"=",
"0",
",",
"observer_elevation",
"=",
"0",
")",
":",
"if",
"depression",
"==",
"0",
":",
"depression",
"=",
"self",
".",
"_depression",
"depression",
"+=",
"90",
"try",
":",
"return",
"self",
".",
"_calc_time",
"(",
"depression",
",",
"SUN_RISING",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"except",
"ValueError",
"as",
"exc",
":",
"if",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"\"math domain error\"",
":",
"raise",
"AstralError",
"(",
"(",
"\"Sun never reaches %d degrees below the horizon, \"",
"\"at this location.\"",
")",
"%",
"(",
"depression",
"-",
"90",
")",
")",
"else",
":",
"raise"
] | Calculate dawn time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param depression: Override the depression used
:type depression: float
:param observer_elevation: Elevation in metres to calculate dawn for
:type observer_elevation: int
:return: The UTC date and time at which dawn occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"dawn",
"time",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1838-L1872 | train |
sffjunkie/astral | src/astral.py | Astral.sunrise_utc | def sunrise_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate sunrise time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sunrise for
:type observer_elevation: int
:return: The UTC date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime`
"""
try:
return self._calc_time(90 + 0.833, SUN_RISING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
("Sun never reaches the horizon on this day, " "at this location.")
)
else:
raise | python | def sunrise_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate sunrise time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sunrise for
:type observer_elevation: int
:return: The UTC date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime`
"""
try:
return self._calc_time(90 + 0.833, SUN_RISING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
("Sun never reaches the horizon on this day, " "at this location.")
)
else:
raise | [
"def",
"sunrise_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"try",
":",
"return",
"self",
".",
"_calc_time",
"(",
"90",
"+",
"0.833",
",",
"SUN_RISING",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"except",
"ValueError",
"as",
"exc",
":",
"if",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"\"math domain error\"",
":",
"raise",
"AstralError",
"(",
"(",
"\"Sun never reaches the horizon on this day, \"",
"\"at this location.\"",
")",
")",
"else",
":",
"raise"
] | Calculate sunrise time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sunrise for
:type observer_elevation: int
:return: The UTC date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"sunrise",
"time",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1874-L1898 | train |
sffjunkie/astral | src/astral.py | Astral.solar_noon_utc | def solar_noon_utc(self, date, longitude):
"""Calculate solar noon time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The UTC date and time at which noon occurs.
:rtype: :class:`~datetime.datetime`
"""
jc = self._jday_to_jcentury(self._julianday(date))
eqtime = self._eq_of_time(jc)
timeUTC = (720.0 - (4 * longitude) - eqtime) / 60.0
hour = int(timeUTC)
minute = int((timeUTC - hour) * 60)
second = int((((timeUTC - hour) * 60) - minute) * 60)
if second > 59:
second -= 60
minute += 1
elif second < 0:
second += 60
minute -= 1
if minute > 59:
minute -= 60
hour += 1
elif minute < 0:
minute += 60
hour -= 1
if hour > 23:
hour -= 24
date += datetime.timedelta(days=1)
elif hour < 0:
hour += 24
date -= datetime.timedelta(days=1)
noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second)
noon = pytz.UTC.localize(noon) # pylint: disable=E1120
return noon | python | def solar_noon_utc(self, date, longitude):
"""Calculate solar noon time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The UTC date and time at which noon occurs.
:rtype: :class:`~datetime.datetime`
"""
jc = self._jday_to_jcentury(self._julianday(date))
eqtime = self._eq_of_time(jc)
timeUTC = (720.0 - (4 * longitude) - eqtime) / 60.0
hour = int(timeUTC)
minute = int((timeUTC - hour) * 60)
second = int((((timeUTC - hour) * 60) - minute) * 60)
if second > 59:
second -= 60
minute += 1
elif second < 0:
second += 60
minute -= 1
if minute > 59:
minute -= 60
hour += 1
elif minute < 0:
minute += 60
hour -= 1
if hour > 23:
hour -= 24
date += datetime.timedelta(days=1)
elif hour < 0:
hour += 24
date -= datetime.timedelta(days=1)
noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second)
noon = pytz.UTC.localize(noon) # pylint: disable=E1120
return noon | [
"def",
"solar_noon_utc",
"(",
"self",
",",
"date",
",",
"longitude",
")",
":",
"jc",
"=",
"self",
".",
"_jday_to_jcentury",
"(",
"self",
".",
"_julianday",
"(",
"date",
")",
")",
"eqtime",
"=",
"self",
".",
"_eq_of_time",
"(",
"jc",
")",
"timeUTC",
"=",
"(",
"720.0",
"-",
"(",
"4",
"*",
"longitude",
")",
"-",
"eqtime",
")",
"/",
"60.0",
"hour",
"=",
"int",
"(",
"timeUTC",
")",
"minute",
"=",
"int",
"(",
"(",
"timeUTC",
"-",
"hour",
")",
"*",
"60",
")",
"second",
"=",
"int",
"(",
"(",
"(",
"(",
"timeUTC",
"-",
"hour",
")",
"*",
"60",
")",
"-",
"minute",
")",
"*",
"60",
")",
"if",
"second",
">",
"59",
":",
"second",
"-=",
"60",
"minute",
"+=",
"1",
"elif",
"second",
"<",
"0",
":",
"second",
"+=",
"60",
"minute",
"-=",
"1",
"if",
"minute",
">",
"59",
":",
"minute",
"-=",
"60",
"hour",
"+=",
"1",
"elif",
"minute",
"<",
"0",
":",
"minute",
"+=",
"60",
"hour",
"-=",
"1",
"if",
"hour",
">",
"23",
":",
"hour",
"-=",
"24",
"date",
"+=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"elif",
"hour",
"<",
"0",
":",
"hour",
"+=",
"24",
"date",
"-=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"noon",
"=",
"datetime",
".",
"datetime",
"(",
"date",
".",
"year",
",",
"date",
".",
"month",
",",
"date",
".",
"day",
",",
"hour",
",",
"minute",
",",
"second",
")",
"noon",
"=",
"pytz",
".",
"UTC",
".",
"localize",
"(",
"noon",
")",
"# pylint: disable=E1120",
"return",
"noon"
] | Calculate solar noon time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The UTC date and time at which noon occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"solar",
"noon",
"time",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1900-L1944 | train |
sffjunkie/astral | src/astral.py | Astral.sunset_utc | def sunset_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate sunset time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sunset for
:type observer_elevation: int
:return: The UTC date and time at which sunset occurs.
:rtype: :class:`~datetime.datetime`
"""
try:
return self._calc_time(90 + 0.833, SUN_SETTING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
("Sun never reaches the horizon on this day, " "at this location.")
)
else:
raise | python | def sunset_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate sunset time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sunset for
:type observer_elevation: int
:return: The UTC date and time at which sunset occurs.
:rtype: :class:`~datetime.datetime`
"""
try:
return self._calc_time(90 + 0.833, SUN_SETTING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
("Sun never reaches the horizon on this day, " "at this location.")
)
else:
raise | [
"def",
"sunset_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"try",
":",
"return",
"self",
".",
"_calc_time",
"(",
"90",
"+",
"0.833",
",",
"SUN_SETTING",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"except",
"ValueError",
"as",
"exc",
":",
"if",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"\"math domain error\"",
":",
"raise",
"AstralError",
"(",
"(",
"\"Sun never reaches the horizon on this day, \"",
"\"at this location.\"",
")",
")",
"else",
":",
"raise"
] | Calculate sunset time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate sunset for
:type observer_elevation: int
:return: The UTC date and time at which sunset occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"sunset",
"time",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1946-L1970 | train |
sffjunkie/astral | src/astral.py | Astral.dusk_utc | def dusk_utc(self, date, latitude, longitude, depression=0, observer_elevation=0):
"""Calculate dusk time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param depression: Override the depression used
:type depression: float
:param observer_elevation: Elevation in metres to calculate dusk for
:type observer_elevation: int
:return: The UTC date and time at which dusk occurs.
:rtype: :class:`~datetime.datetime`
"""
if depression == 0:
depression = self._depression
depression += 90
try:
return self._calc_time(depression, SUN_SETTING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
(
"Sun never reaches %d degrees below the horizon, "
"at this location."
)
% (depression - 90)
)
else:
raise | python | def dusk_utc(self, date, latitude, longitude, depression=0, observer_elevation=0):
"""Calculate dusk time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param depression: Override the depression used
:type depression: float
:param observer_elevation: Elevation in metres to calculate dusk for
:type observer_elevation: int
:return: The UTC date and time at which dusk occurs.
:rtype: :class:`~datetime.datetime`
"""
if depression == 0:
depression = self._depression
depression += 90
try:
return self._calc_time(depression, SUN_SETTING, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
(
"Sun never reaches %d degrees below the horizon, "
"at this location."
)
% (depression - 90)
)
else:
raise | [
"def",
"dusk_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"depression",
"=",
"0",
",",
"observer_elevation",
"=",
"0",
")",
":",
"if",
"depression",
"==",
"0",
":",
"depression",
"=",
"self",
".",
"_depression",
"depression",
"+=",
"90",
"try",
":",
"return",
"self",
".",
"_calc_time",
"(",
"depression",
",",
"SUN_SETTING",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"except",
"ValueError",
"as",
"exc",
":",
"if",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"\"math domain error\"",
":",
"raise",
"AstralError",
"(",
"(",
"\"Sun never reaches %d degrees below the horizon, \"",
"\"at this location.\"",
")",
"%",
"(",
"depression",
"-",
"90",
")",
")",
"else",
":",
"raise"
] | Calculate dusk time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param depression: Override the depression used
:type depression: float
:param observer_elevation: Elevation in metres to calculate dusk for
:type observer_elevation: int
:return: The UTC date and time at which dusk occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"dusk",
"time",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L1972-L2006 | train |
sffjunkie/astral | src/astral.py | Astral.solar_midnight_utc | def solar_midnight_utc(self, date, longitude):
"""Calculate solar midnight time in the UTC timezone.
Note that this claculates the solar midgnight that is closest
to 00:00:00 of the specified date i.e. it may return a time that is on
the previous day.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The UTC date and time at which midnight occurs.
:rtype: :class:`~datetime.datetime`
"""
julianday = self._julianday(date)
newt = self._jday_to_jcentury(julianday + 0.5 + -longitude / 360.0)
eqtime = self._eq_of_time(newt)
timeUTC = (-longitude * 4.0) - eqtime
timeUTC = timeUTC / 60.0
hour = int(timeUTC)
minute = int((timeUTC - hour) * 60)
second = int((((timeUTC - hour) * 60) - minute) * 60)
if second > 59:
second -= 60
minute += 1
elif second < 0:
second += 60
minute -= 1
if minute > 59:
minute -= 60
hour += 1
elif minute < 0:
minute += 60
hour -= 1
if hour < 0:
hour += 24
date -= datetime.timedelta(days=1)
midnight = datetime.datetime(
date.year, date.month, date.day, hour, minute, second
)
midnight = pytz.UTC.localize(midnight) # pylint: disable=E1120
return midnight | python | def solar_midnight_utc(self, date, longitude):
"""Calculate solar midnight time in the UTC timezone.
Note that this claculates the solar midgnight that is closest
to 00:00:00 of the specified date i.e. it may return a time that is on
the previous day.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The UTC date and time at which midnight occurs.
:rtype: :class:`~datetime.datetime`
"""
julianday = self._julianday(date)
newt = self._jday_to_jcentury(julianday + 0.5 + -longitude / 360.0)
eqtime = self._eq_of_time(newt)
timeUTC = (-longitude * 4.0) - eqtime
timeUTC = timeUTC / 60.0
hour = int(timeUTC)
minute = int((timeUTC - hour) * 60)
second = int((((timeUTC - hour) * 60) - minute) * 60)
if second > 59:
second -= 60
minute += 1
elif second < 0:
second += 60
minute -= 1
if minute > 59:
minute -= 60
hour += 1
elif minute < 0:
minute += 60
hour -= 1
if hour < 0:
hour += 24
date -= datetime.timedelta(days=1)
midnight = datetime.datetime(
date.year, date.month, date.day, hour, minute, second
)
midnight = pytz.UTC.localize(midnight) # pylint: disable=E1120
return midnight | [
"def",
"solar_midnight_utc",
"(",
"self",
",",
"date",
",",
"longitude",
")",
":",
"julianday",
"=",
"self",
".",
"_julianday",
"(",
"date",
")",
"newt",
"=",
"self",
".",
"_jday_to_jcentury",
"(",
"julianday",
"+",
"0.5",
"+",
"-",
"longitude",
"/",
"360.0",
")",
"eqtime",
"=",
"self",
".",
"_eq_of_time",
"(",
"newt",
")",
"timeUTC",
"=",
"(",
"-",
"longitude",
"*",
"4.0",
")",
"-",
"eqtime",
"timeUTC",
"=",
"timeUTC",
"/",
"60.0",
"hour",
"=",
"int",
"(",
"timeUTC",
")",
"minute",
"=",
"int",
"(",
"(",
"timeUTC",
"-",
"hour",
")",
"*",
"60",
")",
"second",
"=",
"int",
"(",
"(",
"(",
"(",
"timeUTC",
"-",
"hour",
")",
"*",
"60",
")",
"-",
"minute",
")",
"*",
"60",
")",
"if",
"second",
">",
"59",
":",
"second",
"-=",
"60",
"minute",
"+=",
"1",
"elif",
"second",
"<",
"0",
":",
"second",
"+=",
"60",
"minute",
"-=",
"1",
"if",
"minute",
">",
"59",
":",
"minute",
"-=",
"60",
"hour",
"+=",
"1",
"elif",
"minute",
"<",
"0",
":",
"minute",
"+=",
"60",
"hour",
"-=",
"1",
"if",
"hour",
"<",
"0",
":",
"hour",
"+=",
"24",
"date",
"-=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"midnight",
"=",
"datetime",
".",
"datetime",
"(",
"date",
".",
"year",
",",
"date",
".",
"month",
",",
"date",
".",
"day",
",",
"hour",
",",
"minute",
",",
"second",
")",
"midnight",
"=",
"pytz",
".",
"UTC",
".",
"localize",
"(",
"midnight",
")",
"# pylint: disable=E1120",
"return",
"midnight"
] | Calculate solar midnight time in the UTC timezone.
Note that this claculates the solar midgnight that is closest
to 00:00:00 of the specified date i.e. it may return a time that is on
the previous day.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The UTC date and time at which midnight occurs.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"solar",
"midnight",
"time",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2008-L2059 | train |
sffjunkie/astral | src/astral.py | Astral.daylight_utc | def daylight_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate daylight start and end times in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate daylight for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which daylight starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
start = self.sunrise_utc(date, latitude, longitude, observer_elevation)
end = self.sunset_utc(date, latitude, longitude, observer_elevation)
return start, end | python | def daylight_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate daylight start and end times in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate daylight for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which daylight starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
start = self.sunrise_utc(date, latitude, longitude, observer_elevation)
end = self.sunset_utc(date, latitude, longitude, observer_elevation)
return start, end | [
"def",
"daylight_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"start",
"=",
"self",
".",
"sunrise_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"end",
"=",
"self",
".",
"sunset_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"return",
"start",
",",
"end"
] | Calculate daylight start and end times in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate daylight for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which daylight starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) | [
"Calculate",
"daylight",
"start",
"and",
"end",
"times",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2061-L2080 | train |
sffjunkie/astral | src/astral.py | Astral.night_utc | def night_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate night start and end times in the UTC timezone.
Night is calculated to be between astronomical dusk on the
date specified and astronomical dawn of the next day.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate night for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which night starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
start = self.dusk_utc(date, latitude, longitude, 18, observer_elevation)
tomorrow = date + datetime.timedelta(days=1)
end = self.dawn_utc(tomorrow, latitude, longitude, 18, observer_elevation)
return start, end | python | def night_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate night start and end times in the UTC timezone.
Night is calculated to be between astronomical dusk on the
date specified and astronomical dawn of the next day.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate night for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which night starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
start = self.dusk_utc(date, latitude, longitude, 18, observer_elevation)
tomorrow = date + datetime.timedelta(days=1)
end = self.dawn_utc(tomorrow, latitude, longitude, 18, observer_elevation)
return start, end | [
"def",
"night_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"start",
"=",
"self",
".",
"dusk_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"18",
",",
"observer_elevation",
")",
"tomorrow",
"=",
"date",
"+",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"end",
"=",
"self",
".",
"dawn_utc",
"(",
"tomorrow",
",",
"latitude",
",",
"longitude",
",",
"18",
",",
"observer_elevation",
")",
"return",
"start",
",",
"end"
] | Calculate night start and end times in the UTC timezone.
Night is calculated to be between astronomical dusk on the
date specified and astronomical dawn of the next day.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate night for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which night starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) | [
"Calculate",
"night",
"start",
"and",
"end",
"times",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2082-L2105 | train |
sffjunkie/astral | src/astral.py | Astral.blue_hour_utc | def blue_hour_utc(self, direction, date, latitude, longitude, observer_elevation=0):
"""Returns the start and end times of the Blue Hour in the UTC timezone
when the sun is traversing in the specified direction.
This method uses the definition from PhotoPills i.e. the
blue hour is when the sun is between 6 and 4 degrees below the horizon.
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``.
:type direction: int
:param date: The date for which to calculate the times.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate the blue hour for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which the Blue Hour starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
if date is None:
date = datetime.date.today()
start = self.time_at_elevation_utc(-6, direction, date, latitude, longitude, observer_elevation)
end = self.time_at_elevation_utc(-4, direction, date, latitude, longitude, observer_elevation)
if direction == SUN_RISING:
return start, end
else:
return end, start | python | def blue_hour_utc(self, direction, date, latitude, longitude, observer_elevation=0):
"""Returns the start and end times of the Blue Hour in the UTC timezone
when the sun is traversing in the specified direction.
This method uses the definition from PhotoPills i.e. the
blue hour is when the sun is between 6 and 4 degrees below the horizon.
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``.
:type direction: int
:param date: The date for which to calculate the times.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate the blue hour for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which the Blue Hour starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
"""
if date is None:
date = datetime.date.today()
start = self.time_at_elevation_utc(-6, direction, date, latitude, longitude, observer_elevation)
end = self.time_at_elevation_utc(-4, direction, date, latitude, longitude, observer_elevation)
if direction == SUN_RISING:
return start, end
else:
return end, start | [
"def",
"blue_hour_utc",
"(",
"self",
",",
"direction",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"start",
"=",
"self",
".",
"time_at_elevation_utc",
"(",
"-",
"6",
",",
"direction",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"end",
"=",
"self",
".",
"time_at_elevation_utc",
"(",
"-",
"4",
",",
"direction",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"if",
"direction",
"==",
"SUN_RISING",
":",
"return",
"start",
",",
"end",
"else",
":",
"return",
"end",
",",
"start"
] | Returns the start and end times of the Blue Hour in the UTC timezone
when the sun is traversing in the specified direction.
This method uses the definition from PhotoPills i.e. the
blue hour is when the sun is between 6 and 4 degrees below the horizon.
:param direction: Determines whether the time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``.
:type direction: int
:param date: The date for which to calculate the times.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate the blue hour for
:type observer_elevation: int
:return: A tuple of the UTC date and time at which the Blue Hour starts and ends.
:rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) | [
"Returns",
"the",
"start",
"and",
"end",
"times",
"of",
"the",
"Blue",
"Hour",
"in",
"the",
"UTC",
"timezone",
"when",
"the",
"sun",
"is",
"traversing",
"in",
"the",
"specified",
"direction",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2179-L2211 | train |
sffjunkie/astral | src/astral.py | Astral.time_at_elevation_utc | def time_at_elevation_utc(self, elevation, direction, date, latitude, longitude, observer_elevation=0):
"""Calculate the time in the UTC timezone when the sun is at
the specified elevation on the specified date.
Note: This method uses positive elevations for those above the horizon.
:param elevation: Elevation in degrees above the horizon to calculate for.
:type elevation: float
:param direction: Determines whether the calculated time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate time at elevation for
:type observer_elevation: int
:return: The UTC date and time at which the sun is at the required
elevation.
:rtype: :class:`~datetime.datetime`
"""
if elevation > 90.0:
elevation = 180.0 - elevation
direction = SUN_SETTING
depression = 90 - elevation
try:
return self._calc_time(depression, direction, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
("Sun never reaches an elevation of %d degrees" "at this location.")
% elevation
)
else:
raise | python | def time_at_elevation_utc(self, elevation, direction, date, latitude, longitude, observer_elevation=0):
"""Calculate the time in the UTC timezone when the sun is at
the specified elevation on the specified date.
Note: This method uses positive elevations for those above the horizon.
:param elevation: Elevation in degrees above the horizon to calculate for.
:type elevation: float
:param direction: Determines whether the calculated time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate time at elevation for
:type observer_elevation: int
:return: The UTC date and time at which the sun is at the required
elevation.
:rtype: :class:`~datetime.datetime`
"""
if elevation > 90.0:
elevation = 180.0 - elevation
direction = SUN_SETTING
depression = 90 - elevation
try:
return self._calc_time(depression, direction, date, latitude, longitude, observer_elevation)
except ValueError as exc:
if exc.args[0] == "math domain error":
raise AstralError(
("Sun never reaches an elevation of %d degrees" "at this location.")
% elevation
)
else:
raise | [
"def",
"time_at_elevation_utc",
"(",
"self",
",",
"elevation",
",",
"direction",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"if",
"elevation",
">",
"90.0",
":",
"elevation",
"=",
"180.0",
"-",
"elevation",
"direction",
"=",
"SUN_SETTING",
"depression",
"=",
"90",
"-",
"elevation",
"try",
":",
"return",
"self",
".",
"_calc_time",
"(",
"depression",
",",
"direction",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"except",
"ValueError",
"as",
"exc",
":",
"if",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"\"math domain error\"",
":",
"raise",
"AstralError",
"(",
"(",
"\"Sun never reaches an elevation of %d degrees\"",
"\"at this location.\"",
")",
"%",
"elevation",
")",
"else",
":",
"raise"
] | Calculate the time in the UTC timezone when the sun is at
the specified elevation on the specified date.
Note: This method uses positive elevations for those above the horizon.
:param elevation: Elevation in degrees above the horizon to calculate for.
:type elevation: float
:param direction: Determines whether the calculated time is for the sun rising or setting.
Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising.
:type direction: int
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate time at elevation for
:type observer_elevation: int
:return: The UTC date and time at which the sun is at the required
elevation.
:rtype: :class:`~datetime.datetime` | [
"Calculate",
"the",
"time",
"in",
"the",
"UTC",
"timezone",
"when",
"the",
"sun",
"is",
"at",
"the",
"specified",
"elevation",
"on",
"the",
"specified",
"date",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2213-L2252 | train |
sffjunkie/astral | src/astral.py | Astral.solar_azimuth | def solar_azimuth(self, dateandtime, latitude, longitude):
"""Calculate the azimuth angle of the sun.
:param dateandtime: The date and time for which to calculate
the angle.
:type dateandtime: :class:`~datetime.datetime`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The azimuth angle in degrees clockwise from North.
:rtype: float
If `dateandtime` is a naive Python datetime then it is assumed to be
in the UTC timezone.
"""
if latitude > 89.8:
latitude = 89.8
if latitude < -89.8:
latitude = -89.8
if dateandtime.tzinfo is None:
zone = 0
utc_datetime = dateandtime
else:
zone = -dateandtime.utcoffset().total_seconds() / 3600.0
utc_datetime = dateandtime.astimezone(pytz.utc)
timenow = (
utc_datetime.hour
+ (utc_datetime.minute / 60.0)
+ (utc_datetime.second / 3600.0)
)
JD = self._julianday(dateandtime)
t = self._jday_to_jcentury(JD + timenow / 24.0)
theta = self._sun_declination(t)
eqtime = self._eq_of_time(t)
solarDec = theta # in degrees
solarTimeFix = eqtime - (4.0 * -longitude) + (60 * zone)
trueSolarTime = (
dateandtime.hour * 60.0
+ dateandtime.minute
+ dateandtime.second / 60.0
+ solarTimeFix
)
# in minutes
while trueSolarTime > 1440:
trueSolarTime = trueSolarTime - 1440
hourangle = trueSolarTime / 4.0 - 180.0
# Thanks to Louis Schwarzmayr for the next line:
if hourangle < -180:
hourangle = hourangle + 360.0
harad = radians(hourangle)
csz = sin(radians(latitude)) * sin(radians(solarDec)) + cos(
radians(latitude)
) * cos(radians(solarDec)) * cos(harad)
if csz > 1.0:
csz = 1.0
elif csz < -1.0:
csz = -1.0
zenith = degrees(acos(csz))
azDenom = cos(radians(latitude)) * sin(radians(zenith))
if abs(azDenom) > 0.001:
azRad = (
(sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))
) / azDenom
if abs(azRad) > 1.0:
if azRad < 0:
azRad = -1.0
else:
azRad = 1.0
azimuth = 180.0 - degrees(acos(azRad))
if hourangle > 0.0:
azimuth = -azimuth
else:
if latitude > 0.0:
azimuth = 180.0
else:
azimuth = 0.0
if azimuth < 0.0:
azimuth = azimuth + 360.0
return azimuth | python | def solar_azimuth(self, dateandtime, latitude, longitude):
"""Calculate the azimuth angle of the sun.
:param dateandtime: The date and time for which to calculate
the angle.
:type dateandtime: :class:`~datetime.datetime`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The azimuth angle in degrees clockwise from North.
:rtype: float
If `dateandtime` is a naive Python datetime then it is assumed to be
in the UTC timezone.
"""
if latitude > 89.8:
latitude = 89.8
if latitude < -89.8:
latitude = -89.8
if dateandtime.tzinfo is None:
zone = 0
utc_datetime = dateandtime
else:
zone = -dateandtime.utcoffset().total_seconds() / 3600.0
utc_datetime = dateandtime.astimezone(pytz.utc)
timenow = (
utc_datetime.hour
+ (utc_datetime.minute / 60.0)
+ (utc_datetime.second / 3600.0)
)
JD = self._julianday(dateandtime)
t = self._jday_to_jcentury(JD + timenow / 24.0)
theta = self._sun_declination(t)
eqtime = self._eq_of_time(t)
solarDec = theta # in degrees
solarTimeFix = eqtime - (4.0 * -longitude) + (60 * zone)
trueSolarTime = (
dateandtime.hour * 60.0
+ dateandtime.minute
+ dateandtime.second / 60.0
+ solarTimeFix
)
# in minutes
while trueSolarTime > 1440:
trueSolarTime = trueSolarTime - 1440
hourangle = trueSolarTime / 4.0 - 180.0
# Thanks to Louis Schwarzmayr for the next line:
if hourangle < -180:
hourangle = hourangle + 360.0
harad = radians(hourangle)
csz = sin(radians(latitude)) * sin(radians(solarDec)) + cos(
radians(latitude)
) * cos(radians(solarDec)) * cos(harad)
if csz > 1.0:
csz = 1.0
elif csz < -1.0:
csz = -1.0
zenith = degrees(acos(csz))
azDenom = cos(radians(latitude)) * sin(radians(zenith))
if abs(azDenom) > 0.001:
azRad = (
(sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))
) / azDenom
if abs(azRad) > 1.0:
if azRad < 0:
azRad = -1.0
else:
azRad = 1.0
azimuth = 180.0 - degrees(acos(azRad))
if hourangle > 0.0:
azimuth = -azimuth
else:
if latitude > 0.0:
azimuth = 180.0
else:
azimuth = 0.0
if azimuth < 0.0:
azimuth = azimuth + 360.0
return azimuth | [
"def",
"solar_azimuth",
"(",
"self",
",",
"dateandtime",
",",
"latitude",
",",
"longitude",
")",
":",
"if",
"latitude",
">",
"89.8",
":",
"latitude",
"=",
"89.8",
"if",
"latitude",
"<",
"-",
"89.8",
":",
"latitude",
"=",
"-",
"89.8",
"if",
"dateandtime",
".",
"tzinfo",
"is",
"None",
":",
"zone",
"=",
"0",
"utc_datetime",
"=",
"dateandtime",
"else",
":",
"zone",
"=",
"-",
"dateandtime",
".",
"utcoffset",
"(",
")",
".",
"total_seconds",
"(",
")",
"/",
"3600.0",
"utc_datetime",
"=",
"dateandtime",
".",
"astimezone",
"(",
"pytz",
".",
"utc",
")",
"timenow",
"=",
"(",
"utc_datetime",
".",
"hour",
"+",
"(",
"utc_datetime",
".",
"minute",
"/",
"60.0",
")",
"+",
"(",
"utc_datetime",
".",
"second",
"/",
"3600.0",
")",
")",
"JD",
"=",
"self",
".",
"_julianday",
"(",
"dateandtime",
")",
"t",
"=",
"self",
".",
"_jday_to_jcentury",
"(",
"JD",
"+",
"timenow",
"/",
"24.0",
")",
"theta",
"=",
"self",
".",
"_sun_declination",
"(",
"t",
")",
"eqtime",
"=",
"self",
".",
"_eq_of_time",
"(",
"t",
")",
"solarDec",
"=",
"theta",
"# in degrees",
"solarTimeFix",
"=",
"eqtime",
"-",
"(",
"4.0",
"*",
"-",
"longitude",
")",
"+",
"(",
"60",
"*",
"zone",
")",
"trueSolarTime",
"=",
"(",
"dateandtime",
".",
"hour",
"*",
"60.0",
"+",
"dateandtime",
".",
"minute",
"+",
"dateandtime",
".",
"second",
"/",
"60.0",
"+",
"solarTimeFix",
")",
"# in minutes",
"while",
"trueSolarTime",
">",
"1440",
":",
"trueSolarTime",
"=",
"trueSolarTime",
"-",
"1440",
"hourangle",
"=",
"trueSolarTime",
"/",
"4.0",
"-",
"180.0",
"# Thanks to Louis Schwarzmayr for the next line:",
"if",
"hourangle",
"<",
"-",
"180",
":",
"hourangle",
"=",
"hourangle",
"+",
"360.0",
"harad",
"=",
"radians",
"(",
"hourangle",
")",
"csz",
"=",
"sin",
"(",
"radians",
"(",
"latitude",
")",
")",
"*",
"sin",
"(",
"radians",
"(",
"solarDec",
")",
")",
"+",
"cos",
"(",
"radians",
"(",
"latitude",
")",
")",
"*",
"cos",
"(",
"radians",
"(",
"solarDec",
")",
")",
"*",
"cos",
"(",
"harad",
")",
"if",
"csz",
">",
"1.0",
":",
"csz",
"=",
"1.0",
"elif",
"csz",
"<",
"-",
"1.0",
":",
"csz",
"=",
"-",
"1.0",
"zenith",
"=",
"degrees",
"(",
"acos",
"(",
"csz",
")",
")",
"azDenom",
"=",
"cos",
"(",
"radians",
"(",
"latitude",
")",
")",
"*",
"sin",
"(",
"radians",
"(",
"zenith",
")",
")",
"if",
"abs",
"(",
"azDenom",
")",
">",
"0.001",
":",
"azRad",
"=",
"(",
"(",
"sin",
"(",
"radians",
"(",
"latitude",
")",
")",
"*",
"cos",
"(",
"radians",
"(",
"zenith",
")",
")",
")",
"-",
"sin",
"(",
"radians",
"(",
"solarDec",
")",
")",
")",
"/",
"azDenom",
"if",
"abs",
"(",
"azRad",
")",
">",
"1.0",
":",
"if",
"azRad",
"<",
"0",
":",
"azRad",
"=",
"-",
"1.0",
"else",
":",
"azRad",
"=",
"1.0",
"azimuth",
"=",
"180.0",
"-",
"degrees",
"(",
"acos",
"(",
"azRad",
")",
")",
"if",
"hourangle",
">",
"0.0",
":",
"azimuth",
"=",
"-",
"azimuth",
"else",
":",
"if",
"latitude",
">",
"0.0",
":",
"azimuth",
"=",
"180.0",
"else",
":",
"azimuth",
"=",
"0.0",
"if",
"azimuth",
"<",
"0.0",
":",
"azimuth",
"=",
"azimuth",
"+",
"360.0",
"return",
"azimuth"
] | Calculate the azimuth angle of the sun.
:param dateandtime: The date and time for which to calculate
the angle.
:type dateandtime: :class:`~datetime.datetime`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The azimuth angle in degrees clockwise from North.
:rtype: float
If `dateandtime` is a naive Python datetime then it is assumed to be
in the UTC timezone. | [
"Calculate",
"the",
"azimuth",
"angle",
"of",
"the",
"sun",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2254-L2353 | train |
sffjunkie/astral | src/astral.py | Astral.solar_zenith | def solar_zenith(self, dateandtime, latitude, longitude):
"""Calculates the solar zenith angle.
:param dateandtime: The date and time for which to calculate
the angle.
:type dateandtime: :class:`~datetime.datetime`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The zenith angle in degrees from vertical.
:rtype: float
If `dateandtime` is a naive Python datetime then it is assumed to be
in the UTC timezone.
"""
return 90.0 - self.solar_elevation(dateandtime, latitude, longitude) | python | def solar_zenith(self, dateandtime, latitude, longitude):
"""Calculates the solar zenith angle.
:param dateandtime: The date and time for which to calculate
the angle.
:type dateandtime: :class:`~datetime.datetime`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The zenith angle in degrees from vertical.
:rtype: float
If `dateandtime` is a naive Python datetime then it is assumed to be
in the UTC timezone.
"""
return 90.0 - self.solar_elevation(dateandtime, latitude, longitude) | [
"def",
"solar_zenith",
"(",
"self",
",",
"dateandtime",
",",
"latitude",
",",
"longitude",
")",
":",
"return",
"90.0",
"-",
"self",
".",
"solar_elevation",
"(",
"dateandtime",
",",
"latitude",
",",
"longitude",
")"
] | Calculates the solar zenith angle.
:param dateandtime: The date and time for which to calculate
the angle.
:type dateandtime: :class:`~datetime.datetime`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:return: The zenith angle in degrees from vertical.
:rtype: float
If `dateandtime` is a naive Python datetime then it is assumed to be
in the UTC timezone. | [
"Calculates",
"the",
"solar",
"zenith",
"angle",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2482-L2500 | train |
sffjunkie/astral | src/astral.py | Astral.moon_phase | def moon_phase(self, date, rtype=int):
"""Calculates the phase of the moon on the specified date.
:param date: The date to calculate the phase for.
:type date: :class:`datetime.date`
:param rtype: The type to return either int (default) or float.
:return:
A number designating the phase.
| 0 = New moon
| 7 = First quarter
| 14 = Full moon
| 21 = Last quarter
"""
if rtype != float and rtype != int:
rtype = int
moon = self._moon_phase_asfloat(date)
if moon >= 28.0:
moon -= 28.0
moon = rtype(moon)
return moon | python | def moon_phase(self, date, rtype=int):
"""Calculates the phase of the moon on the specified date.
:param date: The date to calculate the phase for.
:type date: :class:`datetime.date`
:param rtype: The type to return either int (default) or float.
:return:
A number designating the phase.
| 0 = New moon
| 7 = First quarter
| 14 = Full moon
| 21 = Last quarter
"""
if rtype != float and rtype != int:
rtype = int
moon = self._moon_phase_asfloat(date)
if moon >= 28.0:
moon -= 28.0
moon = rtype(moon)
return moon | [
"def",
"moon_phase",
"(",
"self",
",",
"date",
",",
"rtype",
"=",
"int",
")",
":",
"if",
"rtype",
"!=",
"float",
"and",
"rtype",
"!=",
"int",
":",
"rtype",
"=",
"int",
"moon",
"=",
"self",
".",
"_moon_phase_asfloat",
"(",
"date",
")",
"if",
"moon",
">=",
"28.0",
":",
"moon",
"-=",
"28.0",
"moon",
"=",
"rtype",
"(",
"moon",
")",
"return",
"moon"
] | Calculates the phase of the moon on the specified date.
:param date: The date to calculate the phase for.
:type date: :class:`datetime.date`
:param rtype: The type to return either int (default) or float.
:return:
A number designating the phase.
| 0 = New moon
| 7 = First quarter
| 14 = Full moon
| 21 = Last quarter | [
"Calculates",
"the",
"phase",
"of",
"the",
"moon",
"on",
"the",
"specified",
"date",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2502-L2526 | train |
sffjunkie/astral | src/astral.py | Astral.rahukaalam_utc | def rahukaalam_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate ruhakaalam times in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate Rahukaalam for
:type observer_elevation: int
:return: Tuple containing the start and end times for Rahukaalam.
:rtype: tuple
"""
if date is None:
date = datetime.date.today()
sunrise = self.sunrise_utc(date, latitude, longitude, observer_elevation)
sunset = self.sunset_utc(date, latitude, longitude, observer_elevation)
octant_duration = datetime.timedelta(seconds=(sunset - sunrise).seconds / 8)
# Mo,Sa,Fr,We,Th,Tu,Su
octant_index = [1, 6, 4, 5, 3, 2, 7]
weekday = date.weekday()
octant = octant_index[weekday]
start = sunrise + (octant_duration * octant)
end = start + octant_duration
return start, end | python | def rahukaalam_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate ruhakaalam times in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate Rahukaalam for
:type observer_elevation: int
:return: Tuple containing the start and end times for Rahukaalam.
:rtype: tuple
"""
if date is None:
date = datetime.date.today()
sunrise = self.sunrise_utc(date, latitude, longitude, observer_elevation)
sunset = self.sunset_utc(date, latitude, longitude, observer_elevation)
octant_duration = datetime.timedelta(seconds=(sunset - sunrise).seconds / 8)
# Mo,Sa,Fr,We,Th,Tu,Su
octant_index = [1, 6, 4, 5, 3, 2, 7]
weekday = date.weekday()
octant = octant_index[weekday]
start = sunrise + (octant_duration * octant)
end = start + octant_duration
return start, end | [
"def",
"rahukaalam_utc",
"(",
"self",
",",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
"=",
"0",
")",
":",
"if",
"date",
"is",
"None",
":",
"date",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"sunrise",
"=",
"self",
".",
"sunrise_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"sunset",
"=",
"self",
".",
"sunset_utc",
"(",
"date",
",",
"latitude",
",",
"longitude",
",",
"observer_elevation",
")",
"octant_duration",
"=",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"(",
"sunset",
"-",
"sunrise",
")",
".",
"seconds",
"/",
"8",
")",
"# Mo,Sa,Fr,We,Th,Tu,Su",
"octant_index",
"=",
"[",
"1",
",",
"6",
",",
"4",
",",
"5",
",",
"3",
",",
"2",
",",
"7",
"]",
"weekday",
"=",
"date",
".",
"weekday",
"(",
")",
"octant",
"=",
"octant_index",
"[",
"weekday",
"]",
"start",
"=",
"sunrise",
"+",
"(",
"octant_duration",
"*",
"octant",
")",
"end",
"=",
"start",
"+",
"octant_duration",
"return",
"start",
",",
"end"
] | Calculate ruhakaalam times in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be positive
:type longitude: float
:param observer_elevation: Elevation in metres to calculate Rahukaalam for
:type observer_elevation: int
:return: Tuple containing the start and end times for Rahukaalam.
:rtype: tuple | [
"Calculate",
"ruhakaalam",
"times",
"in",
"the",
"UTC",
"timezone",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2528-L2561 | train |
sffjunkie/astral | src/astral.py | Astral._depression_adjustment | def _depression_adjustment(self, elevation):
"""Calculate the extra degrees of depression due to the increase in elevation.
:param elevation: Elevation above the earth in metres
:type elevation: float
"""
if elevation <= 0:
return 0
r = 6356900 # radius of the earth
a1 = r
h1 = r + elevation
theta1 = acos(a1 / h1)
a2 = r * sin(theta1)
b2 = r - (r * cos(theta1))
h2 = sqrt(pow(a2, 2) + pow(b2, 2))
alpha = acos(a2 / h2)
return degrees(alpha) | python | def _depression_adjustment(self, elevation):
"""Calculate the extra degrees of depression due to the increase in elevation.
:param elevation: Elevation above the earth in metres
:type elevation: float
"""
if elevation <= 0:
return 0
r = 6356900 # radius of the earth
a1 = r
h1 = r + elevation
theta1 = acos(a1 / h1)
a2 = r * sin(theta1)
b2 = r - (r * cos(theta1))
h2 = sqrt(pow(a2, 2) + pow(b2, 2))
alpha = acos(a2 / h2)
return degrees(alpha) | [
"def",
"_depression_adjustment",
"(",
"self",
",",
"elevation",
")",
":",
"if",
"elevation",
"<=",
"0",
":",
"return",
"0",
"r",
"=",
"6356900",
"# radius of the earth",
"a1",
"=",
"r",
"h1",
"=",
"r",
"+",
"elevation",
"theta1",
"=",
"acos",
"(",
"a1",
"/",
"h1",
")",
"a2",
"=",
"r",
"*",
"sin",
"(",
"theta1",
")",
"b2",
"=",
"r",
"-",
"(",
"r",
"*",
"cos",
"(",
"theta1",
")",
")",
"h2",
"=",
"sqrt",
"(",
"pow",
"(",
"a2",
",",
"2",
")",
"+",
"pow",
"(",
"b2",
",",
"2",
")",
")",
"alpha",
"=",
"acos",
"(",
"a2",
"/",
"h2",
")",
"return",
"degrees",
"(",
"alpha",
")"
] | Calculate the extra degrees of depression due to the increase in elevation.
:param elevation: Elevation above the earth in metres
:type elevation: float | [
"Calculate",
"the",
"extra",
"degrees",
"of",
"depression",
"due",
"to",
"the",
"increase",
"in",
"elevation",
"."
] | b0aa63fce692357cd33c2bf36c69ed5b6582440c | https://github.com/sffjunkie/astral/blob/b0aa63fce692357cd33c2bf36c69ed5b6582440c/src/astral.py#L2807-L2827 | train |
joowani/kq | example/worker-cli.py | callback | def callback(status, message, job, result, exception, stacktrace):
"""Example callback function.
:param status: Job status. Possible values are "invalid" (job could not be
deserialized or was malformed), "failure" (job raised an exception),
"timeout" (job timed out), or "success" (job finished successfully and
returned a result).
:type status: str
:param message: Kafka message.
:type message: kq.Message
:param job: Job object, or None if **status** was "invalid".
:type job: kq.Job
:param result: Job result, or None if an exception was raised.
:type result: object | None
:param exception: Exception raised by job, or None if there was none.
:type exception: Exception | None
:param stacktrace: Exception traceback, or None if there was none.
:type stacktrace: str | None
"""
assert status in ['invalid', 'success', 'timeout', 'failure']
assert isinstance(message, Message)
if status == 'invalid':
assert job is None
assert result is None
assert exception is None
assert stacktrace is None
if status == 'success':
assert isinstance(job, Job)
assert exception is None
assert stacktrace is None
elif status == 'timeout':
assert isinstance(job, Job)
assert result is None
assert exception is None
assert stacktrace is None
elif status == 'failure':
assert isinstance(job, Job)
assert result is None
assert exception is not None
assert stacktrace is not None | python | def callback(status, message, job, result, exception, stacktrace):
"""Example callback function.
:param status: Job status. Possible values are "invalid" (job could not be
deserialized or was malformed), "failure" (job raised an exception),
"timeout" (job timed out), or "success" (job finished successfully and
returned a result).
:type status: str
:param message: Kafka message.
:type message: kq.Message
:param job: Job object, or None if **status** was "invalid".
:type job: kq.Job
:param result: Job result, or None if an exception was raised.
:type result: object | None
:param exception: Exception raised by job, or None if there was none.
:type exception: Exception | None
:param stacktrace: Exception traceback, or None if there was none.
:type stacktrace: str | None
"""
assert status in ['invalid', 'success', 'timeout', 'failure']
assert isinstance(message, Message)
if status == 'invalid':
assert job is None
assert result is None
assert exception is None
assert stacktrace is None
if status == 'success':
assert isinstance(job, Job)
assert exception is None
assert stacktrace is None
elif status == 'timeout':
assert isinstance(job, Job)
assert result is None
assert exception is None
assert stacktrace is None
elif status == 'failure':
assert isinstance(job, Job)
assert result is None
assert exception is not None
assert stacktrace is not None | [
"def",
"callback",
"(",
"status",
",",
"message",
",",
"job",
",",
"result",
",",
"exception",
",",
"stacktrace",
")",
":",
"assert",
"status",
"in",
"[",
"'invalid'",
",",
"'success'",
",",
"'timeout'",
",",
"'failure'",
"]",
"assert",
"isinstance",
"(",
"message",
",",
"Message",
")",
"if",
"status",
"==",
"'invalid'",
":",
"assert",
"job",
"is",
"None",
"assert",
"result",
"is",
"None",
"assert",
"exception",
"is",
"None",
"assert",
"stacktrace",
"is",
"None",
"if",
"status",
"==",
"'success'",
":",
"assert",
"isinstance",
"(",
"job",
",",
"Job",
")",
"assert",
"exception",
"is",
"None",
"assert",
"stacktrace",
"is",
"None",
"elif",
"status",
"==",
"'timeout'",
":",
"assert",
"isinstance",
"(",
"job",
",",
"Job",
")",
"assert",
"result",
"is",
"None",
"assert",
"exception",
"is",
"None",
"assert",
"stacktrace",
"is",
"None",
"elif",
"status",
"==",
"'failure'",
":",
"assert",
"isinstance",
"(",
"job",
",",
"Job",
")",
"assert",
"result",
"is",
"None",
"assert",
"exception",
"is",
"not",
"None",
"assert",
"stacktrace",
"is",
"not",
"None"
] | Example callback function.
:param status: Job status. Possible values are "invalid" (job could not be
deserialized or was malformed), "failure" (job raised an exception),
"timeout" (job timed out), or "success" (job finished successfully and
returned a result).
:type status: str
:param message: Kafka message.
:type message: kq.Message
:param job: Job object, or None if **status** was "invalid".
:type job: kq.Job
:param result: Job result, or None if an exception was raised.
:type result: object | None
:param exception: Exception raised by job, or None if there was none.
:type exception: Exception | None
:param stacktrace: Exception traceback, or None if there was none.
:type stacktrace: str | None | [
"Example",
"callback",
"function",
"."
] | f5ff3f1828cc1d9de668f82b2d18a98026ce4281 | https://github.com/joowani/kq/blob/f5ff3f1828cc1d9de668f82b2d18a98026ce4281/example/worker-cli.py#L19-L62 | train |
joowani/kq | kq/worker.py | Worker._execute_callback | def _execute_callback(self, status, message, job, res, err, stacktrace):
"""Execute the callback.
:param status: Job status. Possible values are "invalid" (job could not
be deserialized or was malformed), "failure" (job raised an error),
"timeout" (job timed out), or "success" (job finished successfully
and returned a result).
:type status: str
:param message: Kafka message.
:type message: :doc:`kq.Message <message>`
:param job: Job object, or None if **status** was "invalid".
:type job: kq.Job
:param res: Job result, or None if an exception was raised.
:type res: object | None
:param err: Exception raised by job, or None if there was none.
:type err: Exception | None
:param stacktrace: Exception traceback, or None if there was none.
:type stacktrace: str | None
"""
if self._callback is not None:
try:
self._logger.info('Executing callback ...')
self._callback(status, message, job, res, err, stacktrace)
except Exception as e:
self._logger.exception(
'Callback raised an exception: {}'.format(e)) | python | def _execute_callback(self, status, message, job, res, err, stacktrace):
"""Execute the callback.
:param status: Job status. Possible values are "invalid" (job could not
be deserialized or was malformed), "failure" (job raised an error),
"timeout" (job timed out), or "success" (job finished successfully
and returned a result).
:type status: str
:param message: Kafka message.
:type message: :doc:`kq.Message <message>`
:param job: Job object, or None if **status** was "invalid".
:type job: kq.Job
:param res: Job result, or None if an exception was raised.
:type res: object | None
:param err: Exception raised by job, or None if there was none.
:type err: Exception | None
:param stacktrace: Exception traceback, or None if there was none.
:type stacktrace: str | None
"""
if self._callback is not None:
try:
self._logger.info('Executing callback ...')
self._callback(status, message, job, res, err, stacktrace)
except Exception as e:
self._logger.exception(
'Callback raised an exception: {}'.format(e)) | [
"def",
"_execute_callback",
"(",
"self",
",",
"status",
",",
"message",
",",
"job",
",",
"res",
",",
"err",
",",
"stacktrace",
")",
":",
"if",
"self",
".",
"_callback",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"'Executing callback ...'",
")",
"self",
".",
"_callback",
"(",
"status",
",",
"message",
",",
"job",
",",
"res",
",",
"err",
",",
"stacktrace",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"'Callback raised an exception: {}'",
".",
"format",
"(",
"e",
")",
")"
] | Execute the callback.
:param status: Job status. Possible values are "invalid" (job could not
be deserialized or was malformed), "failure" (job raised an error),
"timeout" (job timed out), or "success" (job finished successfully
and returned a result).
:type status: str
:param message: Kafka message.
:type message: :doc:`kq.Message <message>`
:param job: Job object, or None if **status** was "invalid".
:type job: kq.Job
:param res: Job result, or None if an exception was raised.
:type res: object | None
:param err: Exception raised by job, or None if there was none.
:type err: Exception | None
:param stacktrace: Exception traceback, or None if there was none.
:type stacktrace: str | None | [
"Execute",
"the",
"callback",
"."
] | f5ff3f1828cc1d9de668f82b2d18a98026ce4281 | https://github.com/joowani/kq/blob/f5ff3f1828cc1d9de668f82b2d18a98026ce4281/kq/worker.py#L106-L131 | train |
joowani/kq | kq/worker.py | Worker._process_message | def _process_message(self, msg):
"""De-serialize the message and execute the job.
:param msg: Kafka message.
:type msg: :doc:`kq.Message <message>`
"""
self._logger.info(
'Processing Message(topic={}, partition={}, offset={}) ...'
.format(msg.topic, msg.partition, msg.offset))
try:
job = self._deserializer(msg.value)
job_repr = get_call_repr(job.func, *job.args, **job.kwargs)
except Exception as err:
self._logger.exception('Job was invalid: {}'.format(err))
self._execute_callback('invalid', msg, None, None, None, None)
else:
self._logger.info('Executing job {}: {}'.format(job.id, job_repr))
if job.timeout:
timer = threading.Timer(job.timeout, _thread.interrupt_main)
timer.start()
else:
timer = None
try:
res = job.func(*job.args, **job.kwargs)
except KeyboardInterrupt:
self._logger.error(
'Job {} timed out or was interrupted'.format(job.id))
self._execute_callback('timeout', msg, job, None, None, None)
except Exception as err:
self._logger.exception(
'Job {} raised an exception:'.format(job.id))
tb = traceback.format_exc()
self._execute_callback('failure', msg, job, None, err, tb)
else:
self._logger.info('Job {} returned: {}'.format(job.id, res))
self._execute_callback('success', msg, job, res, None, None)
finally:
if timer is not None:
timer.cancel() | python | def _process_message(self, msg):
"""De-serialize the message and execute the job.
:param msg: Kafka message.
:type msg: :doc:`kq.Message <message>`
"""
self._logger.info(
'Processing Message(topic={}, partition={}, offset={}) ...'
.format(msg.topic, msg.partition, msg.offset))
try:
job = self._deserializer(msg.value)
job_repr = get_call_repr(job.func, *job.args, **job.kwargs)
except Exception as err:
self._logger.exception('Job was invalid: {}'.format(err))
self._execute_callback('invalid', msg, None, None, None, None)
else:
self._logger.info('Executing job {}: {}'.format(job.id, job_repr))
if job.timeout:
timer = threading.Timer(job.timeout, _thread.interrupt_main)
timer.start()
else:
timer = None
try:
res = job.func(*job.args, **job.kwargs)
except KeyboardInterrupt:
self._logger.error(
'Job {} timed out or was interrupted'.format(job.id))
self._execute_callback('timeout', msg, job, None, None, None)
except Exception as err:
self._logger.exception(
'Job {} raised an exception:'.format(job.id))
tb = traceback.format_exc()
self._execute_callback('failure', msg, job, None, err, tb)
else:
self._logger.info('Job {} returned: {}'.format(job.id, res))
self._execute_callback('success', msg, job, res, None, None)
finally:
if timer is not None:
timer.cancel() | [
"def",
"_process_message",
"(",
"self",
",",
"msg",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"'Processing Message(topic={}, partition={}, offset={}) ...'",
".",
"format",
"(",
"msg",
".",
"topic",
",",
"msg",
".",
"partition",
",",
"msg",
".",
"offset",
")",
")",
"try",
":",
"job",
"=",
"self",
".",
"_deserializer",
"(",
"msg",
".",
"value",
")",
"job_repr",
"=",
"get_call_repr",
"(",
"job",
".",
"func",
",",
"*",
"job",
".",
"args",
",",
"*",
"*",
"job",
".",
"kwargs",
")",
"except",
"Exception",
"as",
"err",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"'Job was invalid: {}'",
".",
"format",
"(",
"err",
")",
")",
"self",
".",
"_execute_callback",
"(",
"'invalid'",
",",
"msg",
",",
"None",
",",
"None",
",",
"None",
",",
"None",
")",
"else",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"'Executing job {}: {}'",
".",
"format",
"(",
"job",
".",
"id",
",",
"job_repr",
")",
")",
"if",
"job",
".",
"timeout",
":",
"timer",
"=",
"threading",
".",
"Timer",
"(",
"job",
".",
"timeout",
",",
"_thread",
".",
"interrupt_main",
")",
"timer",
".",
"start",
"(",
")",
"else",
":",
"timer",
"=",
"None",
"try",
":",
"res",
"=",
"job",
".",
"func",
"(",
"*",
"job",
".",
"args",
",",
"*",
"*",
"job",
".",
"kwargs",
")",
"except",
"KeyboardInterrupt",
":",
"self",
".",
"_logger",
".",
"error",
"(",
"'Job {} timed out or was interrupted'",
".",
"format",
"(",
"job",
".",
"id",
")",
")",
"self",
".",
"_execute_callback",
"(",
"'timeout'",
",",
"msg",
",",
"job",
",",
"None",
",",
"None",
",",
"None",
")",
"except",
"Exception",
"as",
"err",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"'Job {} raised an exception:'",
".",
"format",
"(",
"job",
".",
"id",
")",
")",
"tb",
"=",
"traceback",
".",
"format_exc",
"(",
")",
"self",
".",
"_execute_callback",
"(",
"'failure'",
",",
"msg",
",",
"job",
",",
"None",
",",
"err",
",",
"tb",
")",
"else",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"'Job {} returned: {}'",
".",
"format",
"(",
"job",
".",
"id",
",",
"res",
")",
")",
"self",
".",
"_execute_callback",
"(",
"'success'",
",",
"msg",
",",
"job",
",",
"res",
",",
"None",
",",
"None",
")",
"finally",
":",
"if",
"timer",
"is",
"not",
"None",
":",
"timer",
".",
"cancel",
"(",
")"
] | De-serialize the message and execute the job.
:param msg: Kafka message.
:type msg: :doc:`kq.Message <message>` | [
"De",
"-",
"serialize",
"the",
"message",
"and",
"execute",
"the",
"job",
"."
] | f5ff3f1828cc1d9de668f82b2d18a98026ce4281 | https://github.com/joowani/kq/blob/f5ff3f1828cc1d9de668f82b2d18a98026ce4281/kq/worker.py#L133-L173 | train |
joowani/kq | kq/worker.py | Worker.start | def start(self, max_messages=math.inf, commit_offsets=True):
"""Start processing Kafka messages and executing jobs.
:param max_messages: Maximum number of Kafka messages to process before
stopping. If not set, worker runs until interrupted.
:type max_messages: int
:param commit_offsets: If set to True, consumer offsets are committed
every time a message is processed (default: True).
:type commit_offsets: bool
:return: Total number of messages processed.
:rtype: int
"""
self._logger.info('Starting {} ...'.format(self))
self._consumer.unsubscribe()
self._consumer.subscribe([self.topic])
messages_processed = 0
while messages_processed < max_messages:
record = next(self._consumer)
message = Message(
topic=record.topic,
partition=record.partition,
offset=record.offset,
key=record.key,
value=record.value
)
self._process_message(message)
if commit_offsets:
self._consumer.commit()
messages_processed += 1
return messages_processed | python | def start(self, max_messages=math.inf, commit_offsets=True):
"""Start processing Kafka messages and executing jobs.
:param max_messages: Maximum number of Kafka messages to process before
stopping. If not set, worker runs until interrupted.
:type max_messages: int
:param commit_offsets: If set to True, consumer offsets are committed
every time a message is processed (default: True).
:type commit_offsets: bool
:return: Total number of messages processed.
:rtype: int
"""
self._logger.info('Starting {} ...'.format(self))
self._consumer.unsubscribe()
self._consumer.subscribe([self.topic])
messages_processed = 0
while messages_processed < max_messages:
record = next(self._consumer)
message = Message(
topic=record.topic,
partition=record.partition,
offset=record.offset,
key=record.key,
value=record.value
)
self._process_message(message)
if commit_offsets:
self._consumer.commit()
messages_processed += 1
return messages_processed | [
"def",
"start",
"(",
"self",
",",
"max_messages",
"=",
"math",
".",
"inf",
",",
"commit_offsets",
"=",
"True",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"'Starting {} ...'",
".",
"format",
"(",
"self",
")",
")",
"self",
".",
"_consumer",
".",
"unsubscribe",
"(",
")",
"self",
".",
"_consumer",
".",
"subscribe",
"(",
"[",
"self",
".",
"topic",
"]",
")",
"messages_processed",
"=",
"0",
"while",
"messages_processed",
"<",
"max_messages",
":",
"record",
"=",
"next",
"(",
"self",
".",
"_consumer",
")",
"message",
"=",
"Message",
"(",
"topic",
"=",
"record",
".",
"topic",
",",
"partition",
"=",
"record",
".",
"partition",
",",
"offset",
"=",
"record",
".",
"offset",
",",
"key",
"=",
"record",
".",
"key",
",",
"value",
"=",
"record",
".",
"value",
")",
"self",
".",
"_process_message",
"(",
"message",
")",
"if",
"commit_offsets",
":",
"self",
".",
"_consumer",
".",
"commit",
"(",
")",
"messages_processed",
"+=",
"1",
"return",
"messages_processed"
] | Start processing Kafka messages and executing jobs.
:param max_messages: Maximum number of Kafka messages to process before
stopping. If not set, worker runs until interrupted.
:type max_messages: int
:param commit_offsets: If set to True, consumer offsets are committed
every time a message is processed (default: True).
:type commit_offsets: bool
:return: Total number of messages processed.
:rtype: int | [
"Start",
"processing",
"Kafka",
"messages",
"and",
"executing",
"jobs",
"."
] | f5ff3f1828cc1d9de668f82b2d18a98026ce4281 | https://github.com/joowani/kq/blob/f5ff3f1828cc1d9de668f82b2d18a98026ce4281/kq/worker.py#L229-L264 | train |
joowani/kq | kq/utils.py | get_call_repr | def get_call_repr(func, *args, **kwargs):
"""Return the string representation of the function call.
:param func: A callable (e.g. function, method).
:type func: callable
:param args: Positional arguments for the callable.
:param kwargs: Keyword arguments for the callable.
:return: String representation of the function call.
:rtype: str
"""
# Functions, builtins and methods
if ismethod(func) or isfunction(func) or isbuiltin(func):
func_repr = '{}.{}'.format(func.__module__, func.__qualname__)
# A callable class instance
elif not isclass(func) and hasattr(func, '__call__'):
func_repr = '{}.{}'.format(func.__module__, func.__class__.__name__)
else:
func_repr = repr(func)
args_reprs = [repr(arg) for arg in args]
kwargs_reprs = [k + '=' + repr(v) for k, v in sorted(kwargs.items())]
return '{}({})'.format(func_repr, ', '.join(args_reprs + kwargs_reprs)) | python | def get_call_repr(func, *args, **kwargs):
"""Return the string representation of the function call.
:param func: A callable (e.g. function, method).
:type func: callable
:param args: Positional arguments for the callable.
:param kwargs: Keyword arguments for the callable.
:return: String representation of the function call.
:rtype: str
"""
# Functions, builtins and methods
if ismethod(func) or isfunction(func) or isbuiltin(func):
func_repr = '{}.{}'.format(func.__module__, func.__qualname__)
# A callable class instance
elif not isclass(func) and hasattr(func, '__call__'):
func_repr = '{}.{}'.format(func.__module__, func.__class__.__name__)
else:
func_repr = repr(func)
args_reprs = [repr(arg) for arg in args]
kwargs_reprs = [k + '=' + repr(v) for k, v in sorted(kwargs.items())]
return '{}({})'.format(func_repr, ', '.join(args_reprs + kwargs_reprs)) | [
"def",
"get_call_repr",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Functions, builtins and methods",
"if",
"ismethod",
"(",
"func",
")",
"or",
"isfunction",
"(",
"func",
")",
"or",
"isbuiltin",
"(",
"func",
")",
":",
"func_repr",
"=",
"'{}.{}'",
".",
"format",
"(",
"func",
".",
"__module__",
",",
"func",
".",
"__qualname__",
")",
"# A callable class instance",
"elif",
"not",
"isclass",
"(",
"func",
")",
"and",
"hasattr",
"(",
"func",
",",
"'__call__'",
")",
":",
"func_repr",
"=",
"'{}.{}'",
".",
"format",
"(",
"func",
".",
"__module__",
",",
"func",
".",
"__class__",
".",
"__name__",
")",
"else",
":",
"func_repr",
"=",
"repr",
"(",
"func",
")",
"args_reprs",
"=",
"[",
"repr",
"(",
"arg",
")",
"for",
"arg",
"in",
"args",
"]",
"kwargs_reprs",
"=",
"[",
"k",
"+",
"'='",
"+",
"repr",
"(",
"v",
")",
"for",
"k",
",",
"v",
"in",
"sorted",
"(",
"kwargs",
".",
"items",
"(",
")",
")",
"]",
"return",
"'{}({})'",
".",
"format",
"(",
"func_repr",
",",
"', '",
".",
"join",
"(",
"args_reprs",
"+",
"kwargs_reprs",
")",
")"
] | Return the string representation of the function call.
:param func: A callable (e.g. function, method).
:type func: callable
:param args: Positional arguments for the callable.
:param kwargs: Keyword arguments for the callable.
:return: String representation of the function call.
:rtype: str | [
"Return",
"the",
"string",
"representation",
"of",
"the",
"function",
"call",
"."
] | f5ff3f1828cc1d9de668f82b2d18a98026ce4281 | https://github.com/joowani/kq/blob/f5ff3f1828cc1d9de668f82b2d18a98026ce4281/kq/utils.py#L5-L26 | train |
joowani/kq | kq/queue.py | Queue.using | def using(self, timeout=None, key=None, partition=None):
"""Set enqueue specifications such as timeout, key and partition.
:param timeout: Job timeout threshold in seconds. If not set, default
timeout (specified during queue initialization) is used instead.
:type timeout: int | float
:param key: Kafka message key. Jobs with the same keys are sent to the
same topic partition and executed sequentially. Applies only if the
**partition** parameter is not set, and the producer’s partitioner
configuration is left as default. For more details on producers,
refer to kafka-python's documentation_.
:type key: bytes
:param partition: Topic partition the message is sent to. If not set,
the producer's partitioner selects the partition. For more details
on producers, refer to kafka-python's documentation_.
:type partition: int
:return: Enqueue specification object which has an ``enqueue`` method
with the same signature as :func:`kq.queue.Queue.enqueue`.
**Example:**
.. testcode::
import requests
from kafka import KafkaProducer
from kq import Job, Queue
# Set up a Kafka producer.
producer = KafkaProducer(bootstrap_servers='127.0.0.1:9092')
# Set up a queue.
queue = Queue(topic='topic', producer=producer)
url = 'https://www.google.com/'
# Enqueue a function call in partition 0 with message key 'foo'.
queue.using(partition=0, key=b'foo').enqueue(requests.get, url)
# Enqueue a function call with a timeout of 10 seconds.
queue.using(timeout=10).enqueue(requests.get, url)
# Job values are preferred over values set with "using" method.
job = Job(func=requests.get, args=[url], timeout=5)
queue.using(timeout=10).enqueue(job) # timeout is still 5
.. _documentation: http://kafka-python.rtfd.io/en/master/#kafkaproducer
"""
return EnqueueSpec(
topic=self._topic,
producer=self._producer,
serializer=self._serializer,
logger=self._logger,
timeout=timeout or self._timeout,
key=key,
partition=partition
) | python | def using(self, timeout=None, key=None, partition=None):
"""Set enqueue specifications such as timeout, key and partition.
:param timeout: Job timeout threshold in seconds. If not set, default
timeout (specified during queue initialization) is used instead.
:type timeout: int | float
:param key: Kafka message key. Jobs with the same keys are sent to the
same topic partition and executed sequentially. Applies only if the
**partition** parameter is not set, and the producer’s partitioner
configuration is left as default. For more details on producers,
refer to kafka-python's documentation_.
:type key: bytes
:param partition: Topic partition the message is sent to. If not set,
the producer's partitioner selects the partition. For more details
on producers, refer to kafka-python's documentation_.
:type partition: int
:return: Enqueue specification object which has an ``enqueue`` method
with the same signature as :func:`kq.queue.Queue.enqueue`.
**Example:**
.. testcode::
import requests
from kafka import KafkaProducer
from kq import Job, Queue
# Set up a Kafka producer.
producer = KafkaProducer(bootstrap_servers='127.0.0.1:9092')
# Set up a queue.
queue = Queue(topic='topic', producer=producer)
url = 'https://www.google.com/'
# Enqueue a function call in partition 0 with message key 'foo'.
queue.using(partition=0, key=b'foo').enqueue(requests.get, url)
# Enqueue a function call with a timeout of 10 seconds.
queue.using(timeout=10).enqueue(requests.get, url)
# Job values are preferred over values set with "using" method.
job = Job(func=requests.get, args=[url], timeout=5)
queue.using(timeout=10).enqueue(job) # timeout is still 5
.. _documentation: http://kafka-python.rtfd.io/en/master/#kafkaproducer
"""
return EnqueueSpec(
topic=self._topic,
producer=self._producer,
serializer=self._serializer,
logger=self._logger,
timeout=timeout or self._timeout,
key=key,
partition=partition
) | [
"def",
"using",
"(",
"self",
",",
"timeout",
"=",
"None",
",",
"key",
"=",
"None",
",",
"partition",
"=",
"None",
")",
":",
"return",
"EnqueueSpec",
"(",
"topic",
"=",
"self",
".",
"_topic",
",",
"producer",
"=",
"self",
".",
"_producer",
",",
"serializer",
"=",
"self",
".",
"_serializer",
",",
"logger",
"=",
"self",
".",
"_logger",
",",
"timeout",
"=",
"timeout",
"or",
"self",
".",
"_timeout",
",",
"key",
"=",
"key",
",",
"partition",
"=",
"partition",
")"
] | Set enqueue specifications such as timeout, key and partition.
:param timeout: Job timeout threshold in seconds. If not set, default
timeout (specified during queue initialization) is used instead.
:type timeout: int | float
:param key: Kafka message key. Jobs with the same keys are sent to the
same topic partition and executed sequentially. Applies only if the
**partition** parameter is not set, and the producer’s partitioner
configuration is left as default. For more details on producers,
refer to kafka-python's documentation_.
:type key: bytes
:param partition: Topic partition the message is sent to. If not set,
the producer's partitioner selects the partition. For more details
on producers, refer to kafka-python's documentation_.
:type partition: int
:return: Enqueue specification object which has an ``enqueue`` method
with the same signature as :func:`kq.queue.Queue.enqueue`.
**Example:**
.. testcode::
import requests
from kafka import KafkaProducer
from kq import Job, Queue
# Set up a Kafka producer.
producer = KafkaProducer(bootstrap_servers='127.0.0.1:9092')
# Set up a queue.
queue = Queue(topic='topic', producer=producer)
url = 'https://www.google.com/'
# Enqueue a function call in partition 0 with message key 'foo'.
queue.using(partition=0, key=b'foo').enqueue(requests.get, url)
# Enqueue a function call with a timeout of 10 seconds.
queue.using(timeout=10).enqueue(requests.get, url)
# Job values are preferred over values set with "using" method.
job = Job(func=requests.get, args=[url], timeout=5)
queue.using(timeout=10).enqueue(job) # timeout is still 5
.. _documentation: http://kafka-python.rtfd.io/en/master/#kafkaproducer | [
"Set",
"enqueue",
"specifications",
"such",
"as",
"timeout",
"key",
"and",
"partition",
"."
] | f5ff3f1828cc1d9de668f82b2d18a98026ce4281 | https://github.com/joowani/kq/blob/f5ff3f1828cc1d9de668f82b2d18a98026ce4281/kq/queue.py#L207-L263 | train |
pklaus/brother_ql | brother_ql/backends/helpers.py | send | def send(instructions, printer_identifier=None, backend_identifier=None, blocking=True):
"""
Send instruction bytes to a printer.
:param bytes instructions: The instructions to be sent to the printer.
:param str printer_identifier: Identifier for the printer.
:param str backend_identifier: Can enforce the use of a specific backend.
:param bool blocking: Indicates whether the function call should block while waiting for the completion of the printing.
"""
status = {
'instructions_sent': True, # The instructions were sent to the printer.
'outcome': 'unknown', # String description of the outcome of the sending operation like: 'unknown', 'sent', 'printed', 'error'
'printer_state': None, # If the selected backend supports reading back the printer state, this key will contain it.
'did_print': False, # If True, a print was produced. It defaults to False if the outcome is uncertain (due to a backend without read-back capability).
'ready_for_next_job': False, # If True, the printer is ready to receive the next instructions. It defaults to False if the state is unknown.
}
selected_backend = None
if backend_identifier:
selected_backend = backend_identifier
else:
try:
selected_backend = guess_backend(printer_identifier)
except:
logger.info("No backend stated. Selecting the default linux_kernel backend.")
selected_backend = 'linux_kernel'
be = backend_factory(selected_backend)
list_available_devices = be['list_available_devices']
BrotherQLBackend = be['backend_class']
printer = BrotherQLBackend(printer_identifier)
start = time.time()
logger.info('Sending instructions to the printer. Total: %d bytes.', len(instructions))
printer.write(instructions)
status['outcome'] = 'sent'
if not blocking:
return status
if selected_backend == 'network':
""" No need to wait for completion. The network backend doesn't support readback. """
return status
while time.time() - start < 10:
data = printer.read()
if not data:
time.sleep(0.005)
continue
try:
result = interpret_response(data)
except ValueError:
logger.error("TIME %.3f - Couln't understand response: %s", time.time()-start, data)
continue
status['printer_state'] = result
logger.debug('TIME %.3f - result: %s', time.time()-start, result)
if result['errors']:
logger.error('Errors occured: %s', result['errors'])
status['outcome'] = 'error'
break
if result['status_type'] == 'Printing completed':
status['did_print'] = True
status['outcome'] = 'printed'
if result['status_type'] == 'Phase change' and result['phase_type'] == 'Waiting to receive':
status['ready_for_next_job'] = True
if status['did_print'] and status['ready_for_next_job']:
break
if not status['did_print']:
logger.warning("'printing completed' status not received.")
if not status['ready_for_next_job']:
logger.warning("'waiting to receive' status not received.")
if (not status['did_print']) or (not status['ready_for_next_job']):
logger.warning('Printing potentially not successful?')
if status['did_print'] and status['ready_for_next_job']:
logger.info("Printing was successful. Waiting for the next job.")
return status | python | def send(instructions, printer_identifier=None, backend_identifier=None, blocking=True):
"""
Send instruction bytes to a printer.
:param bytes instructions: The instructions to be sent to the printer.
:param str printer_identifier: Identifier for the printer.
:param str backend_identifier: Can enforce the use of a specific backend.
:param bool blocking: Indicates whether the function call should block while waiting for the completion of the printing.
"""
status = {
'instructions_sent': True, # The instructions were sent to the printer.
'outcome': 'unknown', # String description of the outcome of the sending operation like: 'unknown', 'sent', 'printed', 'error'
'printer_state': None, # If the selected backend supports reading back the printer state, this key will contain it.
'did_print': False, # If True, a print was produced. It defaults to False if the outcome is uncertain (due to a backend without read-back capability).
'ready_for_next_job': False, # If True, the printer is ready to receive the next instructions. It defaults to False if the state is unknown.
}
selected_backend = None
if backend_identifier:
selected_backend = backend_identifier
else:
try:
selected_backend = guess_backend(printer_identifier)
except:
logger.info("No backend stated. Selecting the default linux_kernel backend.")
selected_backend = 'linux_kernel'
be = backend_factory(selected_backend)
list_available_devices = be['list_available_devices']
BrotherQLBackend = be['backend_class']
printer = BrotherQLBackend(printer_identifier)
start = time.time()
logger.info('Sending instructions to the printer. Total: %d bytes.', len(instructions))
printer.write(instructions)
status['outcome'] = 'sent'
if not blocking:
return status
if selected_backend == 'network':
""" No need to wait for completion. The network backend doesn't support readback. """
return status
while time.time() - start < 10:
data = printer.read()
if not data:
time.sleep(0.005)
continue
try:
result = interpret_response(data)
except ValueError:
logger.error("TIME %.3f - Couln't understand response: %s", time.time()-start, data)
continue
status['printer_state'] = result
logger.debug('TIME %.3f - result: %s', time.time()-start, result)
if result['errors']:
logger.error('Errors occured: %s', result['errors'])
status['outcome'] = 'error'
break
if result['status_type'] == 'Printing completed':
status['did_print'] = True
status['outcome'] = 'printed'
if result['status_type'] == 'Phase change' and result['phase_type'] == 'Waiting to receive':
status['ready_for_next_job'] = True
if status['did_print'] and status['ready_for_next_job']:
break
if not status['did_print']:
logger.warning("'printing completed' status not received.")
if not status['ready_for_next_job']:
logger.warning("'waiting to receive' status not received.")
if (not status['did_print']) or (not status['ready_for_next_job']):
logger.warning('Printing potentially not successful?')
if status['did_print'] and status['ready_for_next_job']:
logger.info("Printing was successful. Waiting for the next job.")
return status | [
"def",
"send",
"(",
"instructions",
",",
"printer_identifier",
"=",
"None",
",",
"backend_identifier",
"=",
"None",
",",
"blocking",
"=",
"True",
")",
":",
"status",
"=",
"{",
"'instructions_sent'",
":",
"True",
",",
"# The instructions were sent to the printer.",
"'outcome'",
":",
"'unknown'",
",",
"# String description of the outcome of the sending operation like: 'unknown', 'sent', 'printed', 'error'",
"'printer_state'",
":",
"None",
",",
"# If the selected backend supports reading back the printer state, this key will contain it.",
"'did_print'",
":",
"False",
",",
"# If True, a print was produced. It defaults to False if the outcome is uncertain (due to a backend without read-back capability).",
"'ready_for_next_job'",
":",
"False",
",",
"# If True, the printer is ready to receive the next instructions. It defaults to False if the state is unknown.",
"}",
"selected_backend",
"=",
"None",
"if",
"backend_identifier",
":",
"selected_backend",
"=",
"backend_identifier",
"else",
":",
"try",
":",
"selected_backend",
"=",
"guess_backend",
"(",
"printer_identifier",
")",
"except",
":",
"logger",
".",
"info",
"(",
"\"No backend stated. Selecting the default linux_kernel backend.\"",
")",
"selected_backend",
"=",
"'linux_kernel'",
"be",
"=",
"backend_factory",
"(",
"selected_backend",
")",
"list_available_devices",
"=",
"be",
"[",
"'list_available_devices'",
"]",
"BrotherQLBackend",
"=",
"be",
"[",
"'backend_class'",
"]",
"printer",
"=",
"BrotherQLBackend",
"(",
"printer_identifier",
")",
"start",
"=",
"time",
".",
"time",
"(",
")",
"logger",
".",
"info",
"(",
"'Sending instructions to the printer. Total: %d bytes.'",
",",
"len",
"(",
"instructions",
")",
")",
"printer",
".",
"write",
"(",
"instructions",
")",
"status",
"[",
"'outcome'",
"]",
"=",
"'sent'",
"if",
"not",
"blocking",
":",
"return",
"status",
"if",
"selected_backend",
"==",
"'network'",
":",
"\"\"\" No need to wait for completion. The network backend doesn't support readback. \"\"\"",
"return",
"status",
"while",
"time",
".",
"time",
"(",
")",
"-",
"start",
"<",
"10",
":",
"data",
"=",
"printer",
".",
"read",
"(",
")",
"if",
"not",
"data",
":",
"time",
".",
"sleep",
"(",
"0.005",
")",
"continue",
"try",
":",
"result",
"=",
"interpret_response",
"(",
"data",
")",
"except",
"ValueError",
":",
"logger",
".",
"error",
"(",
"\"TIME %.3f - Couln't understand response: %s\"",
",",
"time",
".",
"time",
"(",
")",
"-",
"start",
",",
"data",
")",
"continue",
"status",
"[",
"'printer_state'",
"]",
"=",
"result",
"logger",
".",
"debug",
"(",
"'TIME %.3f - result: %s'",
",",
"time",
".",
"time",
"(",
")",
"-",
"start",
",",
"result",
")",
"if",
"result",
"[",
"'errors'",
"]",
":",
"logger",
".",
"error",
"(",
"'Errors occured: %s'",
",",
"result",
"[",
"'errors'",
"]",
")",
"status",
"[",
"'outcome'",
"]",
"=",
"'error'",
"break",
"if",
"result",
"[",
"'status_type'",
"]",
"==",
"'Printing completed'",
":",
"status",
"[",
"'did_print'",
"]",
"=",
"True",
"status",
"[",
"'outcome'",
"]",
"=",
"'printed'",
"if",
"result",
"[",
"'status_type'",
"]",
"==",
"'Phase change'",
"and",
"result",
"[",
"'phase_type'",
"]",
"==",
"'Waiting to receive'",
":",
"status",
"[",
"'ready_for_next_job'",
"]",
"=",
"True",
"if",
"status",
"[",
"'did_print'",
"]",
"and",
"status",
"[",
"'ready_for_next_job'",
"]",
":",
"break",
"if",
"not",
"status",
"[",
"'did_print'",
"]",
":",
"logger",
".",
"warning",
"(",
"\"'printing completed' status not received.\"",
")",
"if",
"not",
"status",
"[",
"'ready_for_next_job'",
"]",
":",
"logger",
".",
"warning",
"(",
"\"'waiting to receive' status not received.\"",
")",
"if",
"(",
"not",
"status",
"[",
"'did_print'",
"]",
")",
"or",
"(",
"not",
"status",
"[",
"'ready_for_next_job'",
"]",
")",
":",
"logger",
".",
"warning",
"(",
"'Printing potentially not successful?'",
")",
"if",
"status",
"[",
"'did_print'",
"]",
"and",
"status",
"[",
"'ready_for_next_job'",
"]",
":",
"logger",
".",
"info",
"(",
"\"Printing was successful. Waiting for the next job.\"",
")",
"return",
"status"
] | Send instruction bytes to a printer.
:param bytes instructions: The instructions to be sent to the printer.
:param str printer_identifier: Identifier for the printer.
:param str backend_identifier: Can enforce the use of a specific backend.
:param bool blocking: Indicates whether the function call should block while waiting for the completion of the printing. | [
"Send",
"instruction",
"bytes",
"to",
"a",
"printer",
"."
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/backends/helpers.py#L26-L103 | train |
pklaus/brother_ql | brother_ql/reader.py | merge_specific_instructions | def merge_specific_instructions(chunks, join_preamble=True, join_raster=True):
"""
Process a list of instructions by merging subsequent instuctions with
identical opcodes into "large instructions".
"""
new_instructions = []
last_opcode = None
instruction_buffer = b''
for instruction in chunks:
opcode = match_opcode(instruction)
if join_preamble and OPCODES[opcode][0] == 'preamble' and last_opcode == 'preamble':
instruction_buffer += instruction
elif join_raster and 'raster' in OPCODES[opcode][0] and 'raster' in last_opcode:
instruction_buffer += instruction
else:
if instruction_buffer:
new_instructions.append(instruction_buffer)
instruction_buffer = instruction
last_opcode = OPCODES[opcode][0]
if instruction_buffer:
new_instructions.append(instruction_buffer)
return new_instructions | python | def merge_specific_instructions(chunks, join_preamble=True, join_raster=True):
"""
Process a list of instructions by merging subsequent instuctions with
identical opcodes into "large instructions".
"""
new_instructions = []
last_opcode = None
instruction_buffer = b''
for instruction in chunks:
opcode = match_opcode(instruction)
if join_preamble and OPCODES[opcode][0] == 'preamble' and last_opcode == 'preamble':
instruction_buffer += instruction
elif join_raster and 'raster' in OPCODES[opcode][0] and 'raster' in last_opcode:
instruction_buffer += instruction
else:
if instruction_buffer:
new_instructions.append(instruction_buffer)
instruction_buffer = instruction
last_opcode = OPCODES[opcode][0]
if instruction_buffer:
new_instructions.append(instruction_buffer)
return new_instructions | [
"def",
"merge_specific_instructions",
"(",
"chunks",
",",
"join_preamble",
"=",
"True",
",",
"join_raster",
"=",
"True",
")",
":",
"new_instructions",
"=",
"[",
"]",
"last_opcode",
"=",
"None",
"instruction_buffer",
"=",
"b''",
"for",
"instruction",
"in",
"chunks",
":",
"opcode",
"=",
"match_opcode",
"(",
"instruction",
")",
"if",
"join_preamble",
"and",
"OPCODES",
"[",
"opcode",
"]",
"[",
"0",
"]",
"==",
"'preamble'",
"and",
"last_opcode",
"==",
"'preamble'",
":",
"instruction_buffer",
"+=",
"instruction",
"elif",
"join_raster",
"and",
"'raster'",
"in",
"OPCODES",
"[",
"opcode",
"]",
"[",
"0",
"]",
"and",
"'raster'",
"in",
"last_opcode",
":",
"instruction_buffer",
"+=",
"instruction",
"else",
":",
"if",
"instruction_buffer",
":",
"new_instructions",
".",
"append",
"(",
"instruction_buffer",
")",
"instruction_buffer",
"=",
"instruction",
"last_opcode",
"=",
"OPCODES",
"[",
"opcode",
"]",
"[",
"0",
"]",
"if",
"instruction_buffer",
":",
"new_instructions",
".",
"append",
"(",
"instruction_buffer",
")",
"return",
"new_instructions"
] | Process a list of instructions by merging subsequent instuctions with
identical opcodes into "large instructions". | [
"Process",
"a",
"list",
"of",
"instructions",
"by",
"merging",
"subsequent",
"instuctions",
"with",
"identical",
"opcodes",
"into",
"large",
"instructions",
"."
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/reader.py#L209-L230 | train |
pklaus/brother_ql | brother_ql/cli.py | cli | def cli(ctx, *args, **kwargs):
""" Command line interface for the brother_ql Python package. """
backend = kwargs.get('backend', None)
model = kwargs.get('model', None)
printer = kwargs.get('printer', None)
debug = kwargs.get('debug')
# Store the general CLI options in the context meta dictionary.
# The name corresponds to the second half of the respective envvar:
ctx.meta['MODEL'] = model
ctx.meta['BACKEND'] = backend
ctx.meta['PRINTER'] = printer
logging.basicConfig(level='DEBUG' if debug else 'INFO') | python | def cli(ctx, *args, **kwargs):
""" Command line interface for the brother_ql Python package. """
backend = kwargs.get('backend', None)
model = kwargs.get('model', None)
printer = kwargs.get('printer', None)
debug = kwargs.get('debug')
# Store the general CLI options in the context meta dictionary.
# The name corresponds to the second half of the respective envvar:
ctx.meta['MODEL'] = model
ctx.meta['BACKEND'] = backend
ctx.meta['PRINTER'] = printer
logging.basicConfig(level='DEBUG' if debug else 'INFO') | [
"def",
"cli",
"(",
"ctx",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"backend",
"=",
"kwargs",
".",
"get",
"(",
"'backend'",
",",
"None",
")",
"model",
"=",
"kwargs",
".",
"get",
"(",
"'model'",
",",
"None",
")",
"printer",
"=",
"kwargs",
".",
"get",
"(",
"'printer'",
",",
"None",
")",
"debug",
"=",
"kwargs",
".",
"get",
"(",
"'debug'",
")",
"# Store the general CLI options in the context meta dictionary.",
"# The name corresponds to the second half of the respective envvar:",
"ctx",
".",
"meta",
"[",
"'MODEL'",
"]",
"=",
"model",
"ctx",
".",
"meta",
"[",
"'BACKEND'",
"]",
"=",
"backend",
"ctx",
".",
"meta",
"[",
"'PRINTER'",
"]",
"=",
"printer",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"'DEBUG'",
"if",
"debug",
"else",
"'INFO'",
")"
] | Command line interface for the brother_ql Python package. | [
"Command",
"line",
"interface",
"for",
"the",
"brother_ql",
"Python",
"package",
"."
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/cli.py#L26-L40 | train |
pklaus/brother_ql | brother_ql/cli.py | env | def env(ctx, *args, **kwargs):
"""
print debug info about running environment
"""
import sys, platform, os, shutil
from pkg_resources import get_distribution, working_set
print("\n##################\n")
print("Information about the running environment of brother_ql.")
print("(Please provide this information when reporting any issue.)\n")
# computer
print("About the computer:")
for attr in ('platform', 'processor', 'release', 'system', 'machine', 'architecture'):
print(' * '+attr.title()+':', getattr(platform, attr)())
# Python
print("About the installed Python version:")
py_version = str(sys.version).replace('\n', ' ')
print(" *", py_version)
# brother_ql
print("About the brother_ql package:")
pkg = get_distribution('brother_ql')
print(" * package location:", pkg.location)
print(" * package version: ", pkg.version)
try:
cli_loc = shutil.which('brother_ql')
except:
cli_loc = 'unknown'
print(" * brother_ql CLI path:", cli_loc)
# brother_ql's requirements
print("About the requirements of brother_ql:")
fmt = " {req:14s} | {spec:10s} | {ins_vers:17s}"
print(fmt.format(req='requirement', spec='requested', ins_vers='installed version'))
print(fmt.format(req='-' * 14, spec='-'*10, ins_vers='-'*17))
requirements = list(pkg.requires())
requirements.sort(key=lambda x: x.project_name)
for req in requirements:
proj = req.project_name
req_pkg = get_distribution(proj)
spec = ' '.join(req.specs[0]) if req.specs else 'any'
print(fmt.format(req=proj, spec=spec, ins_vers=req_pkg.version))
print("\n##################\n") | python | def env(ctx, *args, **kwargs):
"""
print debug info about running environment
"""
import sys, platform, os, shutil
from pkg_resources import get_distribution, working_set
print("\n##################\n")
print("Information about the running environment of brother_ql.")
print("(Please provide this information when reporting any issue.)\n")
# computer
print("About the computer:")
for attr in ('platform', 'processor', 'release', 'system', 'machine', 'architecture'):
print(' * '+attr.title()+':', getattr(platform, attr)())
# Python
print("About the installed Python version:")
py_version = str(sys.version).replace('\n', ' ')
print(" *", py_version)
# brother_ql
print("About the brother_ql package:")
pkg = get_distribution('brother_ql')
print(" * package location:", pkg.location)
print(" * package version: ", pkg.version)
try:
cli_loc = shutil.which('brother_ql')
except:
cli_loc = 'unknown'
print(" * brother_ql CLI path:", cli_loc)
# brother_ql's requirements
print("About the requirements of brother_ql:")
fmt = " {req:14s} | {spec:10s} | {ins_vers:17s}"
print(fmt.format(req='requirement', spec='requested', ins_vers='installed version'))
print(fmt.format(req='-' * 14, spec='-'*10, ins_vers='-'*17))
requirements = list(pkg.requires())
requirements.sort(key=lambda x: x.project_name)
for req in requirements:
proj = req.project_name
req_pkg = get_distribution(proj)
spec = ' '.join(req.specs[0]) if req.specs else 'any'
print(fmt.format(req=proj, spec=spec, ins_vers=req_pkg.version))
print("\n##################\n") | [
"def",
"env",
"(",
"ctx",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"import",
"sys",
",",
"platform",
",",
"os",
",",
"shutil",
"from",
"pkg_resources",
"import",
"get_distribution",
",",
"working_set",
"print",
"(",
"\"\\n##################\\n\"",
")",
"print",
"(",
"\"Information about the running environment of brother_ql.\"",
")",
"print",
"(",
"\"(Please provide this information when reporting any issue.)\\n\"",
")",
"# computer",
"print",
"(",
"\"About the computer:\"",
")",
"for",
"attr",
"in",
"(",
"'platform'",
",",
"'processor'",
",",
"'release'",
",",
"'system'",
",",
"'machine'",
",",
"'architecture'",
")",
":",
"print",
"(",
"' * '",
"+",
"attr",
".",
"title",
"(",
")",
"+",
"':'",
",",
"getattr",
"(",
"platform",
",",
"attr",
")",
"(",
")",
")",
"# Python",
"print",
"(",
"\"About the installed Python version:\"",
")",
"py_version",
"=",
"str",
"(",
"sys",
".",
"version",
")",
".",
"replace",
"(",
"'\\n'",
",",
"' '",
")",
"print",
"(",
"\" *\"",
",",
"py_version",
")",
"# brother_ql",
"print",
"(",
"\"About the brother_ql package:\"",
")",
"pkg",
"=",
"get_distribution",
"(",
"'brother_ql'",
")",
"print",
"(",
"\" * package location:\"",
",",
"pkg",
".",
"location",
")",
"print",
"(",
"\" * package version: \"",
",",
"pkg",
".",
"version",
")",
"try",
":",
"cli_loc",
"=",
"shutil",
".",
"which",
"(",
"'brother_ql'",
")",
"except",
":",
"cli_loc",
"=",
"'unknown'",
"print",
"(",
"\" * brother_ql CLI path:\"",
",",
"cli_loc",
")",
"# brother_ql's requirements",
"print",
"(",
"\"About the requirements of brother_ql:\"",
")",
"fmt",
"=",
"\" {req:14s} | {spec:10s} | {ins_vers:17s}\"",
"print",
"(",
"fmt",
".",
"format",
"(",
"req",
"=",
"'requirement'",
",",
"spec",
"=",
"'requested'",
",",
"ins_vers",
"=",
"'installed version'",
")",
")",
"print",
"(",
"fmt",
".",
"format",
"(",
"req",
"=",
"'-'",
"*",
"14",
",",
"spec",
"=",
"'-'",
"*",
"10",
",",
"ins_vers",
"=",
"'-'",
"*",
"17",
")",
")",
"requirements",
"=",
"list",
"(",
"pkg",
".",
"requires",
"(",
")",
")",
"requirements",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
".",
"project_name",
")",
"for",
"req",
"in",
"requirements",
":",
"proj",
"=",
"req",
".",
"project_name",
"req_pkg",
"=",
"get_distribution",
"(",
"proj",
")",
"spec",
"=",
"' '",
".",
"join",
"(",
"req",
".",
"specs",
"[",
"0",
"]",
")",
"if",
"req",
".",
"specs",
"else",
"'any'",
"print",
"(",
"fmt",
".",
"format",
"(",
"req",
"=",
"proj",
",",
"spec",
"=",
"spec",
",",
"ins_vers",
"=",
"req_pkg",
".",
"version",
")",
")",
"print",
"(",
"\"\\n##################\\n\"",
")"
] | print debug info about running environment | [
"print",
"debug",
"info",
"about",
"running",
"environment"
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/cli.py#L81-L120 | train |
pklaus/brother_ql | brother_ql/cli.py | print_cmd | def print_cmd(ctx, *args, **kwargs):
""" Print a label of the provided IMAGE. """
backend = ctx.meta.get('BACKEND', 'pyusb')
model = ctx.meta.get('MODEL')
printer = ctx.meta.get('PRINTER')
from brother_ql.conversion import convert
from brother_ql.backends.helpers import send
from brother_ql.raster import BrotherQLRaster
qlr = BrotherQLRaster(model)
qlr.exception_on_warning = True
kwargs['cut'] = not kwargs['no_cut']
del kwargs['no_cut']
instructions = convert(qlr=qlr, **kwargs)
send(instructions=instructions, printer_identifier=printer, backend_identifier=backend, blocking=True) | python | def print_cmd(ctx, *args, **kwargs):
""" Print a label of the provided IMAGE. """
backend = ctx.meta.get('BACKEND', 'pyusb')
model = ctx.meta.get('MODEL')
printer = ctx.meta.get('PRINTER')
from brother_ql.conversion import convert
from brother_ql.backends.helpers import send
from brother_ql.raster import BrotherQLRaster
qlr = BrotherQLRaster(model)
qlr.exception_on_warning = True
kwargs['cut'] = not kwargs['no_cut']
del kwargs['no_cut']
instructions = convert(qlr=qlr, **kwargs)
send(instructions=instructions, printer_identifier=printer, backend_identifier=backend, blocking=True) | [
"def",
"print_cmd",
"(",
"ctx",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"backend",
"=",
"ctx",
".",
"meta",
".",
"get",
"(",
"'BACKEND'",
",",
"'pyusb'",
")",
"model",
"=",
"ctx",
".",
"meta",
".",
"get",
"(",
"'MODEL'",
")",
"printer",
"=",
"ctx",
".",
"meta",
".",
"get",
"(",
"'PRINTER'",
")",
"from",
"brother_ql",
".",
"conversion",
"import",
"convert",
"from",
"brother_ql",
".",
"backends",
".",
"helpers",
"import",
"send",
"from",
"brother_ql",
".",
"raster",
"import",
"BrotherQLRaster",
"qlr",
"=",
"BrotherQLRaster",
"(",
"model",
")",
"qlr",
".",
"exception_on_warning",
"=",
"True",
"kwargs",
"[",
"'cut'",
"]",
"=",
"not",
"kwargs",
"[",
"'no_cut'",
"]",
"del",
"kwargs",
"[",
"'no_cut'",
"]",
"instructions",
"=",
"convert",
"(",
"qlr",
"=",
"qlr",
",",
"*",
"*",
"kwargs",
")",
"send",
"(",
"instructions",
"=",
"instructions",
",",
"printer_identifier",
"=",
"printer",
",",
"backend_identifier",
"=",
"backend",
",",
"blocking",
"=",
"True",
")"
] | Print a label of the provided IMAGE. | [
"Print",
"a",
"label",
"of",
"the",
"provided",
"IMAGE",
"."
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/cli.py#L134-L147 | train |
pklaus/brother_ql | brother_ql/backends/pyusb.py | list_available_devices | def list_available_devices():
"""
List all available devices for the respective backend
returns: devices: a list of dictionaries with the keys 'identifier' and 'instance': \
[ {'identifier': 'usb://0x04f9:0x2015/C5Z315686', 'instance': pyusb.core.Device()}, ]
The 'identifier' is of the format idVendor:idProduct_iSerialNumber.
"""
class find_class(object):
def __init__(self, class_):
self._class = class_
def __call__(self, device):
# first, let's check the device
if device.bDeviceClass == self._class:
return True
# ok, transverse all devices to find an interface that matches our class
for cfg in device:
# find_descriptor: what's it?
intf = usb.util.find_descriptor(cfg, bInterfaceClass=self._class)
if intf is not None:
return True
return False
# only Brother printers
printers = usb.core.find(find_all=1, custom_match=find_class(7), idVendor=0x04f9)
def identifier(dev):
try:
serial = usb.util.get_string(dev, 256, dev.iSerialNumber)
return 'usb://0x{:04x}:0x{:04x}_{}'.format(dev.idVendor, dev.idProduct, serial)
except:
return 'usb://0x{:04x}:0x{:04x}'.format(dev.idVendor, dev.idProduct)
return [{'identifier': identifier(printer), 'instance': printer} for printer in printers] | python | def list_available_devices():
"""
List all available devices for the respective backend
returns: devices: a list of dictionaries with the keys 'identifier' and 'instance': \
[ {'identifier': 'usb://0x04f9:0x2015/C5Z315686', 'instance': pyusb.core.Device()}, ]
The 'identifier' is of the format idVendor:idProduct_iSerialNumber.
"""
class find_class(object):
def __init__(self, class_):
self._class = class_
def __call__(self, device):
# first, let's check the device
if device.bDeviceClass == self._class:
return True
# ok, transverse all devices to find an interface that matches our class
for cfg in device:
# find_descriptor: what's it?
intf = usb.util.find_descriptor(cfg, bInterfaceClass=self._class)
if intf is not None:
return True
return False
# only Brother printers
printers = usb.core.find(find_all=1, custom_match=find_class(7), idVendor=0x04f9)
def identifier(dev):
try:
serial = usb.util.get_string(dev, 256, dev.iSerialNumber)
return 'usb://0x{:04x}:0x{:04x}_{}'.format(dev.idVendor, dev.idProduct, serial)
except:
return 'usb://0x{:04x}:0x{:04x}'.format(dev.idVendor, dev.idProduct)
return [{'identifier': identifier(printer), 'instance': printer} for printer in printers] | [
"def",
"list_available_devices",
"(",
")",
":",
"class",
"find_class",
"(",
"object",
")",
":",
"def",
"__init__",
"(",
"self",
",",
"class_",
")",
":",
"self",
".",
"_class",
"=",
"class_",
"def",
"__call__",
"(",
"self",
",",
"device",
")",
":",
"# first, let's check the device",
"if",
"device",
".",
"bDeviceClass",
"==",
"self",
".",
"_class",
":",
"return",
"True",
"# ok, transverse all devices to find an interface that matches our class",
"for",
"cfg",
"in",
"device",
":",
"# find_descriptor: what's it?",
"intf",
"=",
"usb",
".",
"util",
".",
"find_descriptor",
"(",
"cfg",
",",
"bInterfaceClass",
"=",
"self",
".",
"_class",
")",
"if",
"intf",
"is",
"not",
"None",
":",
"return",
"True",
"return",
"False",
"# only Brother printers",
"printers",
"=",
"usb",
".",
"core",
".",
"find",
"(",
"find_all",
"=",
"1",
",",
"custom_match",
"=",
"find_class",
"(",
"7",
")",
",",
"idVendor",
"=",
"0x04f9",
")",
"def",
"identifier",
"(",
"dev",
")",
":",
"try",
":",
"serial",
"=",
"usb",
".",
"util",
".",
"get_string",
"(",
"dev",
",",
"256",
",",
"dev",
".",
"iSerialNumber",
")",
"return",
"'usb://0x{:04x}:0x{:04x}_{}'",
".",
"format",
"(",
"dev",
".",
"idVendor",
",",
"dev",
".",
"idProduct",
",",
"serial",
")",
"except",
":",
"return",
"'usb://0x{:04x}:0x{:04x}'",
".",
"format",
"(",
"dev",
".",
"idVendor",
",",
"dev",
".",
"idProduct",
")",
"return",
"[",
"{",
"'identifier'",
":",
"identifier",
"(",
"printer",
")",
",",
"'instance'",
":",
"printer",
"}",
"for",
"printer",
"in",
"printers",
"]"
] | List all available devices for the respective backend
returns: devices: a list of dictionaries with the keys 'identifier' and 'instance': \
[ {'identifier': 'usb://0x04f9:0x2015/C5Z315686', 'instance': pyusb.core.Device()}, ]
The 'identifier' is of the format idVendor:idProduct_iSerialNumber. | [
"List",
"all",
"available",
"devices",
"for",
"the",
"respective",
"backend"
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/backends/pyusb.py#L21-L55 | train |
pklaus/brother_ql | brother_ql/devicedependent.py | _populate_label_legacy_structures | def _populate_label_legacy_structures():
"""
We contain this code inside a function so that the imports
we do in here are not visible at the module level.
"""
global DIE_CUT_LABEL, ENDLESS_LABEL, ROUND_DIE_CUT_LABEL
global label_sizes, label_type_specs
from brother_ql.labels import FormFactor
DIE_CUT_LABEL = FormFactor.DIE_CUT
ENDLESS_LABEL = FormFactor.ENDLESS
ROUND_DIE_CUT_LABEL = FormFactor.ROUND_DIE_CUT
from brother_ql.labels import LabelsManager
lm = LabelsManager()
label_sizes = list(lm.iter_identifiers())
for label in lm.iter_elements():
l = {}
l['name'] = label.name
l['kind'] = label.form_factor
l['color'] = label.color
l['tape_size'] = label.tape_size
l['dots_total'] = label.dots_total
l['dots_printable'] = label.dots_printable
l['right_margin_dots'] = label.offset_r
l['feed_margin'] = label.feed_margin
l['restrict_printers'] = label.restricted_to_models
label_type_specs[label.identifier] = l | python | def _populate_label_legacy_structures():
"""
We contain this code inside a function so that the imports
we do in here are not visible at the module level.
"""
global DIE_CUT_LABEL, ENDLESS_LABEL, ROUND_DIE_CUT_LABEL
global label_sizes, label_type_specs
from brother_ql.labels import FormFactor
DIE_CUT_LABEL = FormFactor.DIE_CUT
ENDLESS_LABEL = FormFactor.ENDLESS
ROUND_DIE_CUT_LABEL = FormFactor.ROUND_DIE_CUT
from brother_ql.labels import LabelsManager
lm = LabelsManager()
label_sizes = list(lm.iter_identifiers())
for label in lm.iter_elements():
l = {}
l['name'] = label.name
l['kind'] = label.form_factor
l['color'] = label.color
l['tape_size'] = label.tape_size
l['dots_total'] = label.dots_total
l['dots_printable'] = label.dots_printable
l['right_margin_dots'] = label.offset_r
l['feed_margin'] = label.feed_margin
l['restrict_printers'] = label.restricted_to_models
label_type_specs[label.identifier] = l | [
"def",
"_populate_label_legacy_structures",
"(",
")",
":",
"global",
"DIE_CUT_LABEL",
",",
"ENDLESS_LABEL",
",",
"ROUND_DIE_CUT_LABEL",
"global",
"label_sizes",
",",
"label_type_specs",
"from",
"brother_ql",
".",
"labels",
"import",
"FormFactor",
"DIE_CUT_LABEL",
"=",
"FormFactor",
".",
"DIE_CUT",
"ENDLESS_LABEL",
"=",
"FormFactor",
".",
"ENDLESS",
"ROUND_DIE_CUT_LABEL",
"=",
"FormFactor",
".",
"ROUND_DIE_CUT",
"from",
"brother_ql",
".",
"labels",
"import",
"LabelsManager",
"lm",
"=",
"LabelsManager",
"(",
")",
"label_sizes",
"=",
"list",
"(",
"lm",
".",
"iter_identifiers",
"(",
")",
")",
"for",
"label",
"in",
"lm",
".",
"iter_elements",
"(",
")",
":",
"l",
"=",
"{",
"}",
"l",
"[",
"'name'",
"]",
"=",
"label",
".",
"name",
"l",
"[",
"'kind'",
"]",
"=",
"label",
".",
"form_factor",
"l",
"[",
"'color'",
"]",
"=",
"label",
".",
"color",
"l",
"[",
"'tape_size'",
"]",
"=",
"label",
".",
"tape_size",
"l",
"[",
"'dots_total'",
"]",
"=",
"label",
".",
"dots_total",
"l",
"[",
"'dots_printable'",
"]",
"=",
"label",
".",
"dots_printable",
"l",
"[",
"'right_margin_dots'",
"]",
"=",
"label",
".",
"offset_r",
"l",
"[",
"'feed_margin'",
"]",
"=",
"label",
".",
"feed_margin",
"l",
"[",
"'restrict_printers'",
"]",
"=",
"label",
".",
"restricted_to_models",
"label_type_specs",
"[",
"label",
".",
"identifier",
"]",
"=",
"l"
] | We contain this code inside a function so that the imports
we do in here are not visible at the module level. | [
"We",
"contain",
"this",
"code",
"inside",
"a",
"function",
"so",
"that",
"the",
"imports",
"we",
"do",
"in",
"here",
"are",
"not",
"visible",
"at",
"the",
"module",
"level",
"."
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/devicedependent.py#L59-L86 | train |
pklaus/brother_ql | brother_ql/backends/__init__.py | guess_backend | def guess_backend(identifier):
""" guess the backend from a given identifier string for the device """
if identifier.startswith('usb://') or identifier.startswith('0x'):
return 'pyusb'
elif identifier.startswith('file://') or identifier.startswith('/dev/usb/') or identifier.startswith('lp'):
return 'linux_kernel'
elif identifier.startswith('tcp://'):
return 'network'
else:
raise ValueError('Cannot guess backend for given identifier: %s' % identifier) | python | def guess_backend(identifier):
""" guess the backend from a given identifier string for the device """
if identifier.startswith('usb://') or identifier.startswith('0x'):
return 'pyusb'
elif identifier.startswith('file://') or identifier.startswith('/dev/usb/') or identifier.startswith('lp'):
return 'linux_kernel'
elif identifier.startswith('tcp://'):
return 'network'
else:
raise ValueError('Cannot guess backend for given identifier: %s' % identifier) | [
"def",
"guess_backend",
"(",
"identifier",
")",
":",
"if",
"identifier",
".",
"startswith",
"(",
"'usb://'",
")",
"or",
"identifier",
".",
"startswith",
"(",
"'0x'",
")",
":",
"return",
"'pyusb'",
"elif",
"identifier",
".",
"startswith",
"(",
"'file://'",
")",
"or",
"identifier",
".",
"startswith",
"(",
"'/dev/usb/'",
")",
"or",
"identifier",
".",
"startswith",
"(",
"'lp'",
")",
":",
"return",
"'linux_kernel'",
"elif",
"identifier",
".",
"startswith",
"(",
"'tcp://'",
")",
":",
"return",
"'network'",
"else",
":",
"raise",
"ValueError",
"(",
"'Cannot guess backend for given identifier: %s'",
"%",
"identifier",
")"
] | guess the backend from a given identifier string for the device | [
"guess",
"the",
"backend",
"from",
"a",
"given",
"identifier",
"string",
"for",
"the",
"device"
] | b551b1fc944873f3a2ead7032d144dfd81011e79 | https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/backends/__init__.py#L11-L20 | train |
gfairchild/yelpapi | yelpapi/yelpapi.py | YelpAPI.featured_event_query | def featured_event_query(self, **kwargs):
"""
Query the Yelp Featured Event API.
documentation: https://www.yelp.com/developers/documentation/v3/featured_event
required parameters:
* one of either:
* location - text specifying a location to search for
* latitude and longitude
"""
if not kwargs.get('location') and (not kwargs.get('latitude') or not kwargs.get('longitude')):
raise ValueError('A valid location (parameter "location") or latitude/longitude combination '
'(parameters "latitude" and "longitude") must be provided.')
return self._query(FEATURED_EVENT_API_URL, **kwargs) | python | def featured_event_query(self, **kwargs):
"""
Query the Yelp Featured Event API.
documentation: https://www.yelp.com/developers/documentation/v3/featured_event
required parameters:
* one of either:
* location - text specifying a location to search for
* latitude and longitude
"""
if not kwargs.get('location') and (not kwargs.get('latitude') or not kwargs.get('longitude')):
raise ValueError('A valid location (parameter "location") or latitude/longitude combination '
'(parameters "latitude" and "longitude") must be provided.')
return self._query(FEATURED_EVENT_API_URL, **kwargs) | [
"def",
"featured_event_query",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"kwargs",
".",
"get",
"(",
"'location'",
")",
"and",
"(",
"not",
"kwargs",
".",
"get",
"(",
"'latitude'",
")",
"or",
"not",
"kwargs",
".",
"get",
"(",
"'longitude'",
")",
")",
":",
"raise",
"ValueError",
"(",
"'A valid location (parameter \"location\") or latitude/longitude combination '",
"'(parameters \"latitude\" and \"longitude\") must be provided.'",
")",
"return",
"self",
".",
"_query",
"(",
"FEATURED_EVENT_API_URL",
",",
"*",
"*",
"kwargs",
")"
] | Query the Yelp Featured Event API.
documentation: https://www.yelp.com/developers/documentation/v3/featured_event
required parameters:
* one of either:
* location - text specifying a location to search for
* latitude and longitude | [
"Query",
"the",
"Yelp",
"Featured",
"Event",
"API",
"."
] | 51e35fbe44ac131630ce5e2f1b6f53711846e2a7 | https://github.com/gfairchild/yelpapi/blob/51e35fbe44ac131630ce5e2f1b6f53711846e2a7/yelpapi/yelpapi.py#L174-L189 | train |
gfairchild/yelpapi | yelpapi/yelpapi.py | YelpAPI._get_clean_parameters | def _get_clean_parameters(kwargs):
"""
Clean the parameters by filtering out any parameters that have a None value.
"""
return dict((k, v) for k, v in kwargs.items() if v is not None) | python | def _get_clean_parameters(kwargs):
"""
Clean the parameters by filtering out any parameters that have a None value.
"""
return dict((k, v) for k, v in kwargs.items() if v is not None) | [
"def",
"_get_clean_parameters",
"(",
"kwargs",
")",
":",
"return",
"dict",
"(",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
"if",
"v",
"is",
"not",
"None",
")"
] | Clean the parameters by filtering out any parameters that have a None value. | [
"Clean",
"the",
"parameters",
"by",
"filtering",
"out",
"any",
"parameters",
"that",
"have",
"a",
"None",
"value",
"."
] | 51e35fbe44ac131630ce5e2f1b6f53711846e2a7 | https://github.com/gfairchild/yelpapi/blob/51e35fbe44ac131630ce5e2f1b6f53711846e2a7/yelpapi/yelpapi.py#L258-L262 | train |
gfairchild/yelpapi | yelpapi/yelpapi.py | YelpAPI._query | def _query(self, url, **kwargs):
"""
All query methods have the same logic, so don't repeat it! Query the URL, parse the response as JSON,
and check for errors. If all goes well, return the parsed JSON.
"""
parameters = YelpAPI._get_clean_parameters(kwargs)
response = self._yelp_session.get(
url,
headers=self._headers,
params=parameters,
timeout=self._timeout_s,
)
response_json = response.json() # shouldn't happen, but this will raise a ValueError if the response isn't JSON
# Yelp can return one of many different API errors, so check for one of them.
# The Yelp Fusion API does not yet have a complete list of errors, but this is on the TODO list; see
# https://github.com/Yelp/yelp-fusion/issues/95 for more info.
if 'error' in response_json:
raise YelpAPI.YelpAPIError('{}: {}'.format(response_json['error']['code'],
response_json['error']['description']))
# we got a good response, so return
return response_json | python | def _query(self, url, **kwargs):
"""
All query methods have the same logic, so don't repeat it! Query the URL, parse the response as JSON,
and check for errors. If all goes well, return the parsed JSON.
"""
parameters = YelpAPI._get_clean_parameters(kwargs)
response = self._yelp_session.get(
url,
headers=self._headers,
params=parameters,
timeout=self._timeout_s,
)
response_json = response.json() # shouldn't happen, but this will raise a ValueError if the response isn't JSON
# Yelp can return one of many different API errors, so check for one of them.
# The Yelp Fusion API does not yet have a complete list of errors, but this is on the TODO list; see
# https://github.com/Yelp/yelp-fusion/issues/95 for more info.
if 'error' in response_json:
raise YelpAPI.YelpAPIError('{}: {}'.format(response_json['error']['code'],
response_json['error']['description']))
# we got a good response, so return
return response_json | [
"def",
"_query",
"(",
"self",
",",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"parameters",
"=",
"YelpAPI",
".",
"_get_clean_parameters",
"(",
"kwargs",
")",
"response",
"=",
"self",
".",
"_yelp_session",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"_headers",
",",
"params",
"=",
"parameters",
",",
"timeout",
"=",
"self",
".",
"_timeout_s",
",",
")",
"response_json",
"=",
"response",
".",
"json",
"(",
")",
"# shouldn't happen, but this will raise a ValueError if the response isn't JSON",
"# Yelp can return one of many different API errors, so check for one of them.",
"# The Yelp Fusion API does not yet have a complete list of errors, but this is on the TODO list; see",
"# https://github.com/Yelp/yelp-fusion/issues/95 for more info.",
"if",
"'error'",
"in",
"response_json",
":",
"raise",
"YelpAPI",
".",
"YelpAPIError",
"(",
"'{}: {}'",
".",
"format",
"(",
"response_json",
"[",
"'error'",
"]",
"[",
"'code'",
"]",
",",
"response_json",
"[",
"'error'",
"]",
"[",
"'description'",
"]",
")",
")",
"# we got a good response, so return",
"return",
"response_json"
] | All query methods have the same logic, so don't repeat it! Query the URL, parse the response as JSON,
and check for errors. If all goes well, return the parsed JSON. | [
"All",
"query",
"methods",
"have",
"the",
"same",
"logic",
"so",
"don",
"t",
"repeat",
"it!",
"Query",
"the",
"URL",
"parse",
"the",
"response",
"as",
"JSON",
"and",
"check",
"for",
"errors",
".",
"If",
"all",
"goes",
"well",
"return",
"the",
"parsed",
"JSON",
"."
] | 51e35fbe44ac131630ce5e2f1b6f53711846e2a7 | https://github.com/gfairchild/yelpapi/blob/51e35fbe44ac131630ce5e2f1b6f53711846e2a7/yelpapi/yelpapi.py#L264-L286 | train |
samgiles/slumber | slumber/utils.py | url_join | def url_join(base, *args):
"""
Helper function to join an arbitrary number of url segments together.
"""
scheme, netloc, path, query, fragment = urlsplit(base)
path = path if len(path) else "/"
path = posixpath.join(path, *[('%s' % x) for x in args])
return urlunsplit([scheme, netloc, path, query, fragment]) | python | def url_join(base, *args):
"""
Helper function to join an arbitrary number of url segments together.
"""
scheme, netloc, path, query, fragment = urlsplit(base)
path = path if len(path) else "/"
path = posixpath.join(path, *[('%s' % x) for x in args])
return urlunsplit([scheme, netloc, path, query, fragment]) | [
"def",
"url_join",
"(",
"base",
",",
"*",
"args",
")",
":",
"scheme",
",",
"netloc",
",",
"path",
",",
"query",
",",
"fragment",
"=",
"urlsplit",
"(",
"base",
")",
"path",
"=",
"path",
"if",
"len",
"(",
"path",
")",
"else",
"\"/\"",
"path",
"=",
"posixpath",
".",
"join",
"(",
"path",
",",
"*",
"[",
"(",
"'%s'",
"%",
"x",
")",
"for",
"x",
"in",
"args",
"]",
")",
"return",
"urlunsplit",
"(",
"[",
"scheme",
",",
"netloc",
",",
"path",
",",
"query",
",",
"fragment",
"]",
")"
] | Helper function to join an arbitrary number of url segments together. | [
"Helper",
"function",
"to",
"join",
"an",
"arbitrary",
"number",
"of",
"url",
"segments",
"together",
"."
] | af0f9ef7bd8df8bde6b47088630786c737869bce | https://github.com/samgiles/slumber/blob/af0f9ef7bd8df8bde6b47088630786c737869bce/slumber/utils.py#L9-L16 | train |
20c/vaping | vaping/plugins/vodka.py | probe_to_graphsrv | def probe_to_graphsrv(probe):
"""
takes a probe instance and generates
a graphsrv data group for it using the
probe's config
"""
config = probe.config
# manual group set up via `group` config key
if "group" in config:
source, group = config["group"].split(".")
group_field = config.get("group_field", "host")
group_value = config[group_field]
graphsrv.group.add(source, group, {group_value:{group_field:group_value}}, **config)
return
# automatic group setup for fping
# FIXME: this should be somehow more dynamic
for k, v in list(config.items()):
if isinstance(v, dict) and "hosts" in v:
r = {}
for host in v.get("hosts"):
if isinstance(host, dict):
r[host["host"]] = host
else:
r[host] = {"host":host}
graphsrv.group.add(probe.name, k, r, **v) | python | def probe_to_graphsrv(probe):
"""
takes a probe instance and generates
a graphsrv data group for it using the
probe's config
"""
config = probe.config
# manual group set up via `group` config key
if "group" in config:
source, group = config["group"].split(".")
group_field = config.get("group_field", "host")
group_value = config[group_field]
graphsrv.group.add(source, group, {group_value:{group_field:group_value}}, **config)
return
# automatic group setup for fping
# FIXME: this should be somehow more dynamic
for k, v in list(config.items()):
if isinstance(v, dict) and "hosts" in v:
r = {}
for host in v.get("hosts"):
if isinstance(host, dict):
r[host["host"]] = host
else:
r[host] = {"host":host}
graphsrv.group.add(probe.name, k, r, **v) | [
"def",
"probe_to_graphsrv",
"(",
"probe",
")",
":",
"config",
"=",
"probe",
".",
"config",
"# manual group set up via `group` config key",
"if",
"\"group\"",
"in",
"config",
":",
"source",
",",
"group",
"=",
"config",
"[",
"\"group\"",
"]",
".",
"split",
"(",
"\".\"",
")",
"group_field",
"=",
"config",
".",
"get",
"(",
"\"group_field\"",
",",
"\"host\"",
")",
"group_value",
"=",
"config",
"[",
"group_field",
"]",
"graphsrv",
".",
"group",
".",
"add",
"(",
"source",
",",
"group",
",",
"{",
"group_value",
":",
"{",
"group_field",
":",
"group_value",
"}",
"}",
",",
"*",
"*",
"config",
")",
"return",
"# automatic group setup for fping",
"# FIXME: this should be somehow more dynamic",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"config",
".",
"items",
"(",
")",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"dict",
")",
"and",
"\"hosts\"",
"in",
"v",
":",
"r",
"=",
"{",
"}",
"for",
"host",
"in",
"v",
".",
"get",
"(",
"\"hosts\"",
")",
":",
"if",
"isinstance",
"(",
"host",
",",
"dict",
")",
":",
"r",
"[",
"host",
"[",
"\"host\"",
"]",
"]",
"=",
"host",
"else",
":",
"r",
"[",
"host",
"]",
"=",
"{",
"\"host\"",
":",
"host",
"}",
"graphsrv",
".",
"group",
".",
"add",
"(",
"probe",
".",
"name",
",",
"k",
",",
"r",
",",
"*",
"*",
"v",
")"
] | takes a probe instance and generates
a graphsrv data group for it using the
probe's config | [
"takes",
"a",
"probe",
"instance",
"and",
"generates",
"a",
"graphsrv",
"data",
"group",
"for",
"it",
"using",
"the",
"probe",
"s",
"config"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/vodka.py#L20-L49 | train |
20c/vaping | vaping/plugins/__init__.py | PluginBase.new_message | def new_message(self):
"""
creates a new message, setting `type`, `source`, `ts`, `data`
- `data` is initialized to an empty array
"""
msg = {}
msg['data'] = []
msg['type'] = self.plugin_type
msg['source'] = self.name
msg['ts'] = (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds()
return msg | python | def new_message(self):
"""
creates a new message, setting `type`, `source`, `ts`, `data`
- `data` is initialized to an empty array
"""
msg = {}
msg['data'] = []
msg['type'] = self.plugin_type
msg['source'] = self.name
msg['ts'] = (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds()
return msg | [
"def",
"new_message",
"(",
"self",
")",
":",
"msg",
"=",
"{",
"}",
"msg",
"[",
"'data'",
"]",
"=",
"[",
"]",
"msg",
"[",
"'type'",
"]",
"=",
"self",
".",
"plugin_type",
"msg",
"[",
"'source'",
"]",
"=",
"self",
".",
"name",
"msg",
"[",
"'ts'",
"]",
"=",
"(",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")",
"-",
"datetime",
".",
"datetime",
"(",
"1970",
",",
"1",
",",
"1",
")",
")",
".",
"total_seconds",
"(",
")",
"return",
"msg"
] | creates a new message, setting `type`, `source`, `ts`, `data`
- `data` is initialized to an empty array | [
"creates",
"a",
"new",
"message",
"setting",
"type",
"source",
"ts",
"data",
"-",
"data",
"is",
"initialized",
"to",
"an",
"empty",
"array"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L51-L61 | train |
20c/vaping | vaping/plugins/__init__.py | PluginBase.popen | def popen(self, args, **kwargs):
"""
creates a subprocess with passed args
"""
self.log.debug("popen %s", ' '.join(args))
return vaping.io.subprocess.Popen(args, **kwargs) | python | def popen(self, args, **kwargs):
"""
creates a subprocess with passed args
"""
self.log.debug("popen %s", ' '.join(args))
return vaping.io.subprocess.Popen(args, **kwargs) | [
"def",
"popen",
"(",
"self",
",",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"popen %s\"",
",",
"' '",
".",
"join",
"(",
"args",
")",
")",
"return",
"vaping",
".",
"io",
".",
"subprocess",
".",
"Popen",
"(",
"args",
",",
"*",
"*",
"kwargs",
")"
] | creates a subprocess with passed args | [
"creates",
"a",
"subprocess",
"with",
"passed",
"args"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L63-L68 | train |
20c/vaping | vaping/plugins/__init__.py | ProbeBase.queue_emission | def queue_emission(self, msg):
"""
queue an emission of a message for all output plugins
"""
if not msg:
return
for _emitter in self._emit:
if not hasattr(_emitter, 'emit'):
continue
def emit(emitter=_emitter):
self.log.debug("emit to {}".format(emitter.name))
emitter.emit(msg)
self.log.debug("queue emission to {} ({})".format(
_emitter.name, self._emit_queue.qsize()))
self._emit_queue.put(emit) | python | def queue_emission(self, msg):
"""
queue an emission of a message for all output plugins
"""
if not msg:
return
for _emitter in self._emit:
if not hasattr(_emitter, 'emit'):
continue
def emit(emitter=_emitter):
self.log.debug("emit to {}".format(emitter.name))
emitter.emit(msg)
self.log.debug("queue emission to {} ({})".format(
_emitter.name, self._emit_queue.qsize()))
self._emit_queue.put(emit) | [
"def",
"queue_emission",
"(",
"self",
",",
"msg",
")",
":",
"if",
"not",
"msg",
":",
"return",
"for",
"_emitter",
"in",
"self",
".",
"_emit",
":",
"if",
"not",
"hasattr",
"(",
"_emitter",
",",
"'emit'",
")",
":",
"continue",
"def",
"emit",
"(",
"emitter",
"=",
"_emitter",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"emit to {}\"",
".",
"format",
"(",
"emitter",
".",
"name",
")",
")",
"emitter",
".",
"emit",
"(",
"msg",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"queue emission to {} ({})\"",
".",
"format",
"(",
"_emitter",
".",
"name",
",",
"self",
".",
"_emit_queue",
".",
"qsize",
"(",
")",
")",
")",
"self",
".",
"_emit_queue",
".",
"put",
"(",
"emit",
")"
] | queue an emission of a message for all output plugins | [
"queue",
"an",
"emission",
"of",
"a",
"message",
"for",
"all",
"output",
"plugins"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L131-L145 | train |
20c/vaping | vaping/plugins/__init__.py | ProbeBase.send_emission | def send_emission(self):
"""
emit and remove the first emission in the queue
"""
if self._emit_queue.empty():
return
emit = self._emit_queue.get()
emit() | python | def send_emission(self):
"""
emit and remove the first emission in the queue
"""
if self._emit_queue.empty():
return
emit = self._emit_queue.get()
emit() | [
"def",
"send_emission",
"(",
"self",
")",
":",
"if",
"self",
".",
"_emit_queue",
".",
"empty",
"(",
")",
":",
"return",
"emit",
"=",
"self",
".",
"_emit_queue",
".",
"get",
"(",
")",
"emit",
"(",
")"
] | emit and remove the first emission in the queue | [
"emit",
"and",
"remove",
"the",
"first",
"emission",
"in",
"the",
"queue"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L147-L154 | train |
20c/vaping | vaping/plugins/__init__.py | FileProbe.validate_file_handler | def validate_file_handler(self):
"""
Here we validate that our filehandler is pointing
to an existing file.
If it doesnt, because file has been deleted, we close
the filehander and try to reopen
"""
if self.fh.closed:
try:
self.fh = open(self.path, "r")
self.fh.seek(0, 2)
except OSError as err:
logging.error("Could not reopen file: {}".format(err))
return False
open_stat = os.fstat(self.fh.fileno())
try:
file_stat = os.stat(self.path)
except OSError as err:
logging.error("Could not stat file: {}".format(err))
return False
if open_stat != file_stat:
self.log
self.fh.close()
return False
return True | python | def validate_file_handler(self):
"""
Here we validate that our filehandler is pointing
to an existing file.
If it doesnt, because file has been deleted, we close
the filehander and try to reopen
"""
if self.fh.closed:
try:
self.fh = open(self.path, "r")
self.fh.seek(0, 2)
except OSError as err:
logging.error("Could not reopen file: {}".format(err))
return False
open_stat = os.fstat(self.fh.fileno())
try:
file_stat = os.stat(self.path)
except OSError as err:
logging.error("Could not stat file: {}".format(err))
return False
if open_stat != file_stat:
self.log
self.fh.close()
return False
return True | [
"def",
"validate_file_handler",
"(",
"self",
")",
":",
"if",
"self",
".",
"fh",
".",
"closed",
":",
"try",
":",
"self",
".",
"fh",
"=",
"open",
"(",
"self",
".",
"path",
",",
"\"r\"",
")",
"self",
".",
"fh",
".",
"seek",
"(",
"0",
",",
"2",
")",
"except",
"OSError",
"as",
"err",
":",
"logging",
".",
"error",
"(",
"\"Could not reopen file: {}\"",
".",
"format",
"(",
"err",
")",
")",
"return",
"False",
"open_stat",
"=",
"os",
".",
"fstat",
"(",
"self",
".",
"fh",
".",
"fileno",
"(",
")",
")",
"try",
":",
"file_stat",
"=",
"os",
".",
"stat",
"(",
"self",
".",
"path",
")",
"except",
"OSError",
"as",
"err",
":",
"logging",
".",
"error",
"(",
"\"Could not stat file: {}\"",
".",
"format",
"(",
"err",
")",
")",
"return",
"False",
"if",
"open_stat",
"!=",
"file_stat",
":",
"self",
".",
"log",
"self",
".",
"fh",
".",
"close",
"(",
")",
"return",
"False",
"return",
"True"
] | Here we validate that our filehandler is pointing
to an existing file.
If it doesnt, because file has been deleted, we close
the filehander and try to reopen | [
"Here",
"we",
"validate",
"that",
"our",
"filehandler",
"is",
"pointing",
"to",
"an",
"existing",
"file",
"."
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L245-L273 | train |
20c/vaping | vaping/plugins/__init__.py | FileProbe.probe | def probe(self):
"""
Probe the file for new lines
"""
# make sure the filehandler is still valid
# (e.g. file stat hasnt changed, file exists etc.)
if not self.validate_file_handler():
return []
messages = []
# read any new lines and push them onto the stack
for line in self.fh.readlines(self.max_lines):
data = {"path":self.path}
msg = self.new_message()
# process the line - this is where parsing happens
parsed = self.process_line(line, data)
if not parsed:
continue
data.update(parsed)
# process the probe - this is where data assignment
# happens
data = self.process_probe(data)
msg["data"] = [data]
messages.append(msg)
# process all new messages before returning them
# for emission
messages = self.process_messages(messages)
return messages | python | def probe(self):
"""
Probe the file for new lines
"""
# make sure the filehandler is still valid
# (e.g. file stat hasnt changed, file exists etc.)
if not self.validate_file_handler():
return []
messages = []
# read any new lines and push them onto the stack
for line in self.fh.readlines(self.max_lines):
data = {"path":self.path}
msg = self.new_message()
# process the line - this is where parsing happens
parsed = self.process_line(line, data)
if not parsed:
continue
data.update(parsed)
# process the probe - this is where data assignment
# happens
data = self.process_probe(data)
msg["data"] = [data]
messages.append(msg)
# process all new messages before returning them
# for emission
messages = self.process_messages(messages)
return messages | [
"def",
"probe",
"(",
"self",
")",
":",
"# make sure the filehandler is still valid",
"# (e.g. file stat hasnt changed, file exists etc.)",
"if",
"not",
"self",
".",
"validate_file_handler",
"(",
")",
":",
"return",
"[",
"]",
"messages",
"=",
"[",
"]",
"# read any new lines and push them onto the stack",
"for",
"line",
"in",
"self",
".",
"fh",
".",
"readlines",
"(",
"self",
".",
"max_lines",
")",
":",
"data",
"=",
"{",
"\"path\"",
":",
"self",
".",
"path",
"}",
"msg",
"=",
"self",
".",
"new_message",
"(",
")",
"# process the line - this is where parsing happens",
"parsed",
"=",
"self",
".",
"process_line",
"(",
"line",
",",
"data",
")",
"if",
"not",
"parsed",
":",
"continue",
"data",
".",
"update",
"(",
"parsed",
")",
"# process the probe - this is where data assignment",
"# happens",
"data",
"=",
"self",
".",
"process_probe",
"(",
"data",
")",
"msg",
"[",
"\"data\"",
"]",
"=",
"[",
"data",
"]",
"messages",
".",
"append",
"(",
"msg",
")",
"# process all new messages before returning them",
"# for emission",
"messages",
"=",
"self",
".",
"process_messages",
"(",
"messages",
")",
"return",
"messages"
] | Probe the file for new lines | [
"Probe",
"the",
"file",
"for",
"new",
"lines"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L276-L310 | train |
20c/vaping | vaping/plugins/__init__.py | TimeSeriesDB.filename_formatters | def filename_formatters(self, data, row):
"""
Returns a dict containing the various filename formatter values
Values are gotten from the vaping data message as well as the
currently processed row in the message
- `data`: vaping message
- `row`: vaping message data row
"""
r = {
"source" : data.get("source"),
"field" : self.field,
"type" : data.get("type")
}
r.update(**row)
return r | python | def filename_formatters(self, data, row):
"""
Returns a dict containing the various filename formatter values
Values are gotten from the vaping data message as well as the
currently processed row in the message
- `data`: vaping message
- `row`: vaping message data row
"""
r = {
"source" : data.get("source"),
"field" : self.field,
"type" : data.get("type")
}
r.update(**row)
return r | [
"def",
"filename_formatters",
"(",
"self",
",",
"data",
",",
"row",
")",
":",
"r",
"=",
"{",
"\"source\"",
":",
"data",
".",
"get",
"(",
"\"source\"",
")",
",",
"\"field\"",
":",
"self",
".",
"field",
",",
"\"type\"",
":",
"data",
".",
"get",
"(",
"\"type\"",
")",
"}",
"r",
".",
"update",
"(",
"*",
"*",
"row",
")",
"return",
"r"
] | Returns a dict containing the various filename formatter values
Values are gotten from the vaping data message as well as the
currently processed row in the message
- `data`: vaping message
- `row`: vaping message data row | [
"Returns",
"a",
"dict",
"containing",
"the",
"various",
"filename",
"formatter",
"values"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L394-L411 | train |
20c/vaping | vaping/plugins/__init__.py | TimeSeriesDB.format_filename | def format_filename(self, data, row):
"""
Returns a formatted filename using the template stored
in self.filename
- `data`: vaping message
- `row`: vaping message data row
"""
return self.filename.format(**self.filename_formatters(data, row)) | python | def format_filename(self, data, row):
"""
Returns a formatted filename using the template stored
in self.filename
- `data`: vaping message
- `row`: vaping message data row
"""
return self.filename.format(**self.filename_formatters(data, row)) | [
"def",
"format_filename",
"(",
"self",
",",
"data",
",",
"row",
")",
":",
"return",
"self",
".",
"filename",
".",
"format",
"(",
"*",
"*",
"self",
".",
"filename_formatters",
"(",
"data",
",",
"row",
")",
")"
] | Returns a formatted filename using the template stored
in self.filename
- `data`: vaping message
- `row`: vaping message data row | [
"Returns",
"a",
"formatted",
"filename",
"using",
"the",
"template",
"stored",
"in",
"self",
".",
"filename"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L413-L421 | train |
20c/vaping | vaping/plugins/__init__.py | TimeSeriesDB.emit | def emit(self, message):
"""
emit to database
"""
# handle vaping data that arrives in a list
if isinstance(message.get("data"), list):
for row in message.get("data"):
# format filename from data
filename = self.format_filename(message, row)
# create database file if it does not exist yet
if not os.path.exists(filename):
self.create(filename)
# update database
self.log.debug("storing time:%d, %s:%.5f in %s" % (
message.get("ts"), self.field, row.get(self.field), filename))
self.update(filename, message.get("ts"), row.get(self.field)) | python | def emit(self, message):
"""
emit to database
"""
# handle vaping data that arrives in a list
if isinstance(message.get("data"), list):
for row in message.get("data"):
# format filename from data
filename = self.format_filename(message, row)
# create database file if it does not exist yet
if not os.path.exists(filename):
self.create(filename)
# update database
self.log.debug("storing time:%d, %s:%.5f in %s" % (
message.get("ts"), self.field, row.get(self.field), filename))
self.update(filename, message.get("ts"), row.get(self.field)) | [
"def",
"emit",
"(",
"self",
",",
"message",
")",
":",
"# handle vaping data that arrives in a list",
"if",
"isinstance",
"(",
"message",
".",
"get",
"(",
"\"data\"",
")",
",",
"list",
")",
":",
"for",
"row",
"in",
"message",
".",
"get",
"(",
"\"data\"",
")",
":",
"# format filename from data",
"filename",
"=",
"self",
".",
"format_filename",
"(",
"message",
",",
"row",
")",
"# create database file if it does not exist yet",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"self",
".",
"create",
"(",
"filename",
")",
"# update database",
"self",
".",
"log",
".",
"debug",
"(",
"\"storing time:%d, %s:%.5f in %s\"",
"%",
"(",
"message",
".",
"get",
"(",
"\"ts\"",
")",
",",
"self",
".",
"field",
",",
"row",
".",
"get",
"(",
"self",
".",
"field",
")",
",",
"filename",
")",
")",
"self",
".",
"update",
"(",
"filename",
",",
"message",
".",
"get",
"(",
"\"ts\"",
")",
",",
"row",
".",
"get",
"(",
"self",
".",
"field",
")",
")"
] | emit to database | [
"emit",
"to",
"database"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L423-L442 | train |
20c/vaping | vaping/config.py | parse_interval | def parse_interval(val):
"""
converts a string to float of seconds
.5 = 500ms
90 = 1m30s
"""
re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)")
val = val.strip()
total = 0.0
for match in re_intv.findall(val):
unit = match[1]
count = float(match[0])
if unit == 's':
total += count
elif unit == 'm':
total += count * 60
elif unit == 'ms':
total += count / 1000
elif unit == "h":
total += count * 3600
elif unit == 'd':
total += count * 86400
else:
raise ValueError("unknown unit from interval string '%s'" % val)
return total | python | def parse_interval(val):
"""
converts a string to float of seconds
.5 = 500ms
90 = 1m30s
"""
re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)")
val = val.strip()
total = 0.0
for match in re_intv.findall(val):
unit = match[1]
count = float(match[0])
if unit == 's':
total += count
elif unit == 'm':
total += count * 60
elif unit == 'ms':
total += count / 1000
elif unit == "h":
total += count * 3600
elif unit == 'd':
total += count * 86400
else:
raise ValueError("unknown unit from interval string '%s'" % val)
return total | [
"def",
"parse_interval",
"(",
"val",
")",
":",
"re_intv",
"=",
"re",
".",
"compile",
"(",
"r\"([\\d\\.]+)([a-zA-Z]+)\"",
")",
"val",
"=",
"val",
".",
"strip",
"(",
")",
"total",
"=",
"0.0",
"for",
"match",
"in",
"re_intv",
".",
"findall",
"(",
"val",
")",
":",
"unit",
"=",
"match",
"[",
"1",
"]",
"count",
"=",
"float",
"(",
"match",
"[",
"0",
"]",
")",
"if",
"unit",
"==",
"'s'",
":",
"total",
"+=",
"count",
"elif",
"unit",
"==",
"'m'",
":",
"total",
"+=",
"count",
"*",
"60",
"elif",
"unit",
"==",
"'ms'",
":",
"total",
"+=",
"count",
"/",
"1000",
"elif",
"unit",
"==",
"\"h\"",
":",
"total",
"+=",
"count",
"*",
"3600",
"elif",
"unit",
"==",
"'d'",
":",
"total",
"+=",
"count",
"*",
"86400",
"else",
":",
"raise",
"ValueError",
"(",
"\"unknown unit from interval string '%s'\"",
"%",
"val",
")",
"return",
"total"
] | converts a string to float of seconds
.5 = 500ms
90 = 1m30s | [
"converts",
"a",
"string",
"to",
"float",
"of",
"seconds",
".",
"5",
"=",
"500ms",
"90",
"=",
"1m30s"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/config.py#L8-L33 | train |
20c/vaping | vaping/plugins/fping.py | FPingBase.hosts_args | def hosts_args(self):
"""
hosts list can contain strings specifying a host directly
or dicts containing a "host" key to specify the host
this way we can allow passing further config details (color, name etc.)
with each host as well as simply dropping in addresses for quick
setup depending on the user's needs
"""
host_args = []
for row in self.hosts:
if isinstance(row, dict):
host_args.append(row["host"])
else:
host_args.append(row)
# using a set changes the order
dedupe = list()
for each in host_args:
if each not in dedupe:
dedupe.append(each)
return dedupe | python | def hosts_args(self):
"""
hosts list can contain strings specifying a host directly
or dicts containing a "host" key to specify the host
this way we can allow passing further config details (color, name etc.)
with each host as well as simply dropping in addresses for quick
setup depending on the user's needs
"""
host_args = []
for row in self.hosts:
if isinstance(row, dict):
host_args.append(row["host"])
else:
host_args.append(row)
# using a set changes the order
dedupe = list()
for each in host_args:
if each not in dedupe:
dedupe.append(each)
return dedupe | [
"def",
"hosts_args",
"(",
"self",
")",
":",
"host_args",
"=",
"[",
"]",
"for",
"row",
"in",
"self",
".",
"hosts",
":",
"if",
"isinstance",
"(",
"row",
",",
"dict",
")",
":",
"host_args",
".",
"append",
"(",
"row",
"[",
"\"host\"",
"]",
")",
"else",
":",
"host_args",
".",
"append",
"(",
"row",
")",
"# using a set changes the order",
"dedupe",
"=",
"list",
"(",
")",
"for",
"each",
"in",
"host_args",
":",
"if",
"each",
"not",
"in",
"dedupe",
":",
"dedupe",
".",
"append",
"(",
"each",
")",
"return",
"dedupe"
] | hosts list can contain strings specifying a host directly
or dicts containing a "host" key to specify the host
this way we can allow passing further config details (color, name etc.)
with each host as well as simply dropping in addresses for quick
setup depending on the user's needs | [
"hosts",
"list",
"can",
"contain",
"strings",
"specifying",
"a",
"host",
"directly",
"or",
"dicts",
"containing",
"a",
"host",
"key",
"to",
"specify",
"the",
"host"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/fping.py#L39-L61 | train |
20c/vaping | vaping/plugins/fping.py | FPingBase.parse_verbose | def parse_verbose(self, line):
"""
parse output from verbose format
"""
try:
logging.debug(line)
(host, pings) = line.split(' : ')
cnt = 0
lost = 0
times = []
pings = pings.strip().split(' ')
cnt = len(pings)
for latency in pings:
if latency == '-':
continue
times.append(float(latency))
lost = cnt - len(times)
if lost:
loss = lost / float(cnt)
else:
loss = 0.0
rv = {
'host': host.strip(),
'cnt': cnt,
'loss': loss,
'data': times,
}
if times:
rv['min'] = min(times)
rv['max'] = max(times)
rv['avg'] = sum(times) / len(times)
rv['last'] = times[-1]
return rv
except Exception as e:
logging.error("failed to get data: {}".format(e)) | python | def parse_verbose(self, line):
"""
parse output from verbose format
"""
try:
logging.debug(line)
(host, pings) = line.split(' : ')
cnt = 0
lost = 0
times = []
pings = pings.strip().split(' ')
cnt = len(pings)
for latency in pings:
if latency == '-':
continue
times.append(float(latency))
lost = cnt - len(times)
if lost:
loss = lost / float(cnt)
else:
loss = 0.0
rv = {
'host': host.strip(),
'cnt': cnt,
'loss': loss,
'data': times,
}
if times:
rv['min'] = min(times)
rv['max'] = max(times)
rv['avg'] = sum(times) / len(times)
rv['last'] = times[-1]
return rv
except Exception as e:
logging.error("failed to get data: {}".format(e)) | [
"def",
"parse_verbose",
"(",
"self",
",",
"line",
")",
":",
"try",
":",
"logging",
".",
"debug",
"(",
"line",
")",
"(",
"host",
",",
"pings",
")",
"=",
"line",
".",
"split",
"(",
"' : '",
")",
"cnt",
"=",
"0",
"lost",
"=",
"0",
"times",
"=",
"[",
"]",
"pings",
"=",
"pings",
".",
"strip",
"(",
")",
".",
"split",
"(",
"' '",
")",
"cnt",
"=",
"len",
"(",
"pings",
")",
"for",
"latency",
"in",
"pings",
":",
"if",
"latency",
"==",
"'-'",
":",
"continue",
"times",
".",
"append",
"(",
"float",
"(",
"latency",
")",
")",
"lost",
"=",
"cnt",
"-",
"len",
"(",
"times",
")",
"if",
"lost",
":",
"loss",
"=",
"lost",
"/",
"float",
"(",
"cnt",
")",
"else",
":",
"loss",
"=",
"0.0",
"rv",
"=",
"{",
"'host'",
":",
"host",
".",
"strip",
"(",
")",
",",
"'cnt'",
":",
"cnt",
",",
"'loss'",
":",
"loss",
",",
"'data'",
":",
"times",
",",
"}",
"if",
"times",
":",
"rv",
"[",
"'min'",
"]",
"=",
"min",
"(",
"times",
")",
"rv",
"[",
"'max'",
"]",
"=",
"max",
"(",
"times",
")",
"rv",
"[",
"'avg'",
"]",
"=",
"sum",
"(",
"times",
")",
"/",
"len",
"(",
"times",
")",
"rv",
"[",
"'last'",
"]",
"=",
"times",
"[",
"-",
"1",
"]",
"return",
"rv",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"error",
"(",
"\"failed to get data: {}\"",
".",
"format",
"(",
"e",
")",
")"
] | parse output from verbose format | [
"parse",
"output",
"from",
"verbose",
"format"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/fping.py#L63-L100 | train |
20c/vaping | vaping/cli.py | start | def start(ctx, **kwargs):
"""
start a vaping process
"""
update_context(ctx, kwargs)
daemon = mk_daemon(ctx)
if ctx.debug or kwargs['no_fork']:
daemon.run()
else:
daemon.start() | python | def start(ctx, **kwargs):
"""
start a vaping process
"""
update_context(ctx, kwargs)
daemon = mk_daemon(ctx)
if ctx.debug or kwargs['no_fork']:
daemon.run()
else:
daemon.start() | [
"def",
"start",
"(",
"ctx",
",",
"*",
"*",
"kwargs",
")",
":",
"update_context",
"(",
"ctx",
",",
"kwargs",
")",
"daemon",
"=",
"mk_daemon",
"(",
"ctx",
")",
"if",
"ctx",
".",
"debug",
"or",
"kwargs",
"[",
"'no_fork'",
"]",
":",
"daemon",
".",
"run",
"(",
")",
"else",
":",
"daemon",
".",
"start",
"(",
")"
] | start a vaping process | [
"start",
"a",
"vaping",
"process"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/cli.py#L52-L63 | train |
20c/vaping | vaping/cli.py | stop | def stop(ctx, **kwargs):
"""
stop a vaping process
"""
update_context(ctx, kwargs)
daemon = mk_daemon(ctx)
daemon.stop() | python | def stop(ctx, **kwargs):
"""
stop a vaping process
"""
update_context(ctx, kwargs)
daemon = mk_daemon(ctx)
daemon.stop() | [
"def",
"stop",
"(",
"ctx",
",",
"*",
"*",
"kwargs",
")",
":",
"update_context",
"(",
"ctx",
",",
"kwargs",
")",
"daemon",
"=",
"mk_daemon",
"(",
"ctx",
")",
"daemon",
".",
"stop",
"(",
")"
] | stop a vaping process | [
"stop",
"a",
"vaping",
"process"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/cli.py#L70-L77 | train |
20c/vaping | vaping/cli.py | restart | def restart(ctx, **kwargs):
"""
restart a vaping process
"""
update_context(ctx, kwargs)
daemon = mk_daemon(ctx)
daemon.stop()
daemon.start() | python | def restart(ctx, **kwargs):
"""
restart a vaping process
"""
update_context(ctx, kwargs)
daemon = mk_daemon(ctx)
daemon.stop()
daemon.start() | [
"def",
"restart",
"(",
"ctx",
",",
"*",
"*",
"kwargs",
")",
":",
"update_context",
"(",
"ctx",
",",
"kwargs",
")",
"daemon",
"=",
"mk_daemon",
"(",
"ctx",
")",
"daemon",
".",
"stop",
"(",
")",
"daemon",
".",
"start",
"(",
")"
] | restart a vaping process | [
"restart",
"a",
"vaping",
"process"
] | c51f00586c99edb3d51e4abdbdfe3174755533ee | https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/cli.py#L84-L92 | train |
aio-libs/aiohttp-debugtoolbar | aiohttp_debugtoolbar/panels/base.py | DebugPanel.render_content | def render_content(self, request):
"""Return a string containing the HTML to be rendered for the panel.
By default this will render the template defined by the
:attr:`.template` attribute with a rendering context defined by
:attr:`.data` combined with the ``dict`` returned from
:meth:`.render_vars`.
The ``request`` here is the active request in the toolbar. Not the
original request that this panel represents.
"""
context = self.data.copy()
context.update(self.render_vars(request))
return render(self.template, request.app, context, request=request) | python | def render_content(self, request):
"""Return a string containing the HTML to be rendered for the panel.
By default this will render the template defined by the
:attr:`.template` attribute with a rendering context defined by
:attr:`.data` combined with the ``dict`` returned from
:meth:`.render_vars`.
The ``request`` here is the active request in the toolbar. Not the
original request that this panel represents.
"""
context = self.data.copy()
context.update(self.render_vars(request))
return render(self.template, request.app, context, request=request) | [
"def",
"render_content",
"(",
"self",
",",
"request",
")",
":",
"context",
"=",
"self",
".",
"data",
".",
"copy",
"(",
")",
"context",
".",
"update",
"(",
"self",
".",
"render_vars",
"(",
"request",
")",
")",
"return",
"render",
"(",
"self",
".",
"template",
",",
"request",
".",
"app",
",",
"context",
",",
"request",
"=",
"request",
")"
] | Return a string containing the HTML to be rendered for the panel.
By default this will render the template defined by the
:attr:`.template` attribute with a rendering context defined by
:attr:`.data` combined with the ``dict`` returned from
:meth:`.render_vars`.
The ``request`` here is the active request in the toolbar. Not the
original request that this panel represents. | [
"Return",
"a",
"string",
"containing",
"the",
"HTML",
"to",
"be",
"rendered",
"for",
"the",
"panel",
"."
] | a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322 | https://github.com/aio-libs/aiohttp-debugtoolbar/blob/a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322/aiohttp_debugtoolbar/panels/base.py#L82-L95 | train |
aio-libs/aiohttp-debugtoolbar | aiohttp_debugtoolbar/toolbar.py | DebugToolbar.inject | def inject(self, request, response):
"""
Inject the debug toolbar iframe into an HTML response.
"""
# called in host app
if not isinstance(response, Response):
return
settings = request.app[APP_KEY]['settings']
response_html = response.body
route = request.app.router['debugtoolbar.request']
toolbar_url = route.url_for(request_id=request['id'])
button_style = settings['button_style']
css_path = request.app.router[STATIC_ROUTE_NAME].url_for(
filename='css/toolbar_button.css')
toolbar_css = toolbar_css_template % {'css_path': css_path}
toolbar_html = toolbar_html_template % {
'button_style': button_style,
'css_path': css_path,
'toolbar_url': toolbar_url}
toolbar_html = toolbar_html.encode(response.charset or 'utf-8')
toolbar_css = toolbar_css.encode(response.charset or 'utf-8')
response_html = replace_insensitive(
response_html, b'</head>', toolbar_css + b'</head>')
response.body = replace_insensitive(
response_html, b'</body>',
toolbar_html + b'</body>') | python | def inject(self, request, response):
"""
Inject the debug toolbar iframe into an HTML response.
"""
# called in host app
if not isinstance(response, Response):
return
settings = request.app[APP_KEY]['settings']
response_html = response.body
route = request.app.router['debugtoolbar.request']
toolbar_url = route.url_for(request_id=request['id'])
button_style = settings['button_style']
css_path = request.app.router[STATIC_ROUTE_NAME].url_for(
filename='css/toolbar_button.css')
toolbar_css = toolbar_css_template % {'css_path': css_path}
toolbar_html = toolbar_html_template % {
'button_style': button_style,
'css_path': css_path,
'toolbar_url': toolbar_url}
toolbar_html = toolbar_html.encode(response.charset or 'utf-8')
toolbar_css = toolbar_css.encode(response.charset or 'utf-8')
response_html = replace_insensitive(
response_html, b'</head>', toolbar_css + b'</head>')
response.body = replace_insensitive(
response_html, b'</body>',
toolbar_html + b'</body>') | [
"def",
"inject",
"(",
"self",
",",
"request",
",",
"response",
")",
":",
"# called in host app",
"if",
"not",
"isinstance",
"(",
"response",
",",
"Response",
")",
":",
"return",
"settings",
"=",
"request",
".",
"app",
"[",
"APP_KEY",
"]",
"[",
"'settings'",
"]",
"response_html",
"=",
"response",
".",
"body",
"route",
"=",
"request",
".",
"app",
".",
"router",
"[",
"'debugtoolbar.request'",
"]",
"toolbar_url",
"=",
"route",
".",
"url_for",
"(",
"request_id",
"=",
"request",
"[",
"'id'",
"]",
")",
"button_style",
"=",
"settings",
"[",
"'button_style'",
"]",
"css_path",
"=",
"request",
".",
"app",
".",
"router",
"[",
"STATIC_ROUTE_NAME",
"]",
".",
"url_for",
"(",
"filename",
"=",
"'css/toolbar_button.css'",
")",
"toolbar_css",
"=",
"toolbar_css_template",
"%",
"{",
"'css_path'",
":",
"css_path",
"}",
"toolbar_html",
"=",
"toolbar_html_template",
"%",
"{",
"'button_style'",
":",
"button_style",
",",
"'css_path'",
":",
"css_path",
",",
"'toolbar_url'",
":",
"toolbar_url",
"}",
"toolbar_html",
"=",
"toolbar_html",
".",
"encode",
"(",
"response",
".",
"charset",
"or",
"'utf-8'",
")",
"toolbar_css",
"=",
"toolbar_css",
".",
"encode",
"(",
"response",
".",
"charset",
"or",
"'utf-8'",
")",
"response_html",
"=",
"replace_insensitive",
"(",
"response_html",
",",
"b'</head>'",
",",
"toolbar_css",
"+",
"b'</head>'",
")",
"response",
".",
"body",
"=",
"replace_insensitive",
"(",
"response_html",
",",
"b'</body>'",
",",
"toolbar_html",
"+",
"b'</body>'",
")"
] | Inject the debug toolbar iframe into an HTML response. | [
"Inject",
"the",
"debug",
"toolbar",
"iframe",
"into",
"an",
"HTML",
"response",
"."
] | a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322 | https://github.com/aio-libs/aiohttp-debugtoolbar/blob/a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322/aiohttp_debugtoolbar/toolbar.py#L52-L81 | train |
aio-libs/aiohttp-debugtoolbar | aiohttp_debugtoolbar/utils.py | common_segment_count | def common_segment_count(path, value):
"""Return the number of path segments common to both"""
i = 0
if len(path) <= len(value):
for x1, x2 in zip(path, value):
if x1 == x2:
i += 1
else:
return 0
return i | python | def common_segment_count(path, value):
"""Return the number of path segments common to both"""
i = 0
if len(path) <= len(value):
for x1, x2 in zip(path, value):
if x1 == x2:
i += 1
else:
return 0
return i | [
"def",
"common_segment_count",
"(",
"path",
",",
"value",
")",
":",
"i",
"=",
"0",
"if",
"len",
"(",
"path",
")",
"<=",
"len",
"(",
"value",
")",
":",
"for",
"x1",
",",
"x2",
"in",
"zip",
"(",
"path",
",",
"value",
")",
":",
"if",
"x1",
"==",
"x2",
":",
"i",
"+=",
"1",
"else",
":",
"return",
"0",
"return",
"i"
] | Return the number of path segments common to both | [
"Return",
"the",
"number",
"of",
"path",
"segments",
"common",
"to",
"both"
] | a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322 | https://github.com/aio-libs/aiohttp-debugtoolbar/blob/a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322/aiohttp_debugtoolbar/utils.py#L83-L92 | train |
jsocol/pystatsd | statsd/client/base.py | StatsClientBase.timing | def timing(self, stat, delta, rate=1):
"""
Send new timing information.
`delta` can be either a number of milliseconds or a timedelta.
"""
if isinstance(delta, timedelta):
# Convert timedelta to number of milliseconds.
delta = delta.total_seconds() * 1000.
self._send_stat(stat, '%0.6f|ms' % delta, rate) | python | def timing(self, stat, delta, rate=1):
"""
Send new timing information.
`delta` can be either a number of milliseconds or a timedelta.
"""
if isinstance(delta, timedelta):
# Convert timedelta to number of milliseconds.
delta = delta.total_seconds() * 1000.
self._send_stat(stat, '%0.6f|ms' % delta, rate) | [
"def",
"timing",
"(",
"self",
",",
"stat",
",",
"delta",
",",
"rate",
"=",
"1",
")",
":",
"if",
"isinstance",
"(",
"delta",
",",
"timedelta",
")",
":",
"# Convert timedelta to number of milliseconds.",
"delta",
"=",
"delta",
".",
"total_seconds",
"(",
")",
"*",
"1000.",
"self",
".",
"_send_stat",
"(",
"stat",
",",
"'%0.6f|ms'",
"%",
"delta",
",",
"rate",
")"
] | Send new timing information.
`delta` can be either a number of milliseconds or a timedelta. | [
"Send",
"new",
"timing",
"information",
"."
] | 006a86394c44ff71e6e8e52529daa3c0fdcc93fb | https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L22-L31 | train |
jsocol/pystatsd | statsd/client/base.py | StatsClientBase.decr | def decr(self, stat, count=1, rate=1):
"""Decrement a stat by `count`."""
self.incr(stat, -count, rate) | python | def decr(self, stat, count=1, rate=1):
"""Decrement a stat by `count`."""
self.incr(stat, -count, rate) | [
"def",
"decr",
"(",
"self",
",",
"stat",
",",
"count",
"=",
"1",
",",
"rate",
"=",
"1",
")",
":",
"self",
".",
"incr",
"(",
"stat",
",",
"-",
"count",
",",
"rate",
")"
] | Decrement a stat by `count`. | [
"Decrement",
"a",
"stat",
"by",
"count",
"."
] | 006a86394c44ff71e6e8e52529daa3c0fdcc93fb | https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L37-L39 | train |
jsocol/pystatsd | statsd/client/base.py | StatsClientBase.gauge | def gauge(self, stat, value, rate=1, delta=False):
"""Set a gauge value."""
if value < 0 and not delta:
if rate < 1:
if random.random() > rate:
return
with self.pipeline() as pipe:
pipe._send_stat(stat, '0|g', 1)
pipe._send_stat(stat, '%s|g' % value, 1)
else:
prefix = '+' if delta and value >= 0 else ''
self._send_stat(stat, '%s%s|g' % (prefix, value), rate) | python | def gauge(self, stat, value, rate=1, delta=False):
"""Set a gauge value."""
if value < 0 and not delta:
if rate < 1:
if random.random() > rate:
return
with self.pipeline() as pipe:
pipe._send_stat(stat, '0|g', 1)
pipe._send_stat(stat, '%s|g' % value, 1)
else:
prefix = '+' if delta and value >= 0 else ''
self._send_stat(stat, '%s%s|g' % (prefix, value), rate) | [
"def",
"gauge",
"(",
"self",
",",
"stat",
",",
"value",
",",
"rate",
"=",
"1",
",",
"delta",
"=",
"False",
")",
":",
"if",
"value",
"<",
"0",
"and",
"not",
"delta",
":",
"if",
"rate",
"<",
"1",
":",
"if",
"random",
".",
"random",
"(",
")",
">",
"rate",
":",
"return",
"with",
"self",
".",
"pipeline",
"(",
")",
"as",
"pipe",
":",
"pipe",
".",
"_send_stat",
"(",
"stat",
",",
"'0|g'",
",",
"1",
")",
"pipe",
".",
"_send_stat",
"(",
"stat",
",",
"'%s|g'",
"%",
"value",
",",
"1",
")",
"else",
":",
"prefix",
"=",
"'+'",
"if",
"delta",
"and",
"value",
">=",
"0",
"else",
"''",
"self",
".",
"_send_stat",
"(",
"stat",
",",
"'%s%s|g'",
"%",
"(",
"prefix",
",",
"value",
")",
",",
"rate",
")"
] | Set a gauge value. | [
"Set",
"a",
"gauge",
"value",
"."
] | 006a86394c44ff71e6e8e52529daa3c0fdcc93fb | https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L41-L52 | train |
jsocol/pystatsd | statsd/client/base.py | StatsClientBase.set | def set(self, stat, value, rate=1):
"""Set a set value."""
self._send_stat(stat, '%s|s' % value, rate) | python | def set(self, stat, value, rate=1):
"""Set a set value."""
self._send_stat(stat, '%s|s' % value, rate) | [
"def",
"set",
"(",
"self",
",",
"stat",
",",
"value",
",",
"rate",
"=",
"1",
")",
":",
"self",
".",
"_send_stat",
"(",
"stat",
",",
"'%s|s'",
"%",
"value",
",",
"rate",
")"
] | Set a set value. | [
"Set",
"a",
"set",
"value",
"."
] | 006a86394c44ff71e6e8e52529daa3c0fdcc93fb | https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L54-L56 | train |
jsocol/pystatsd | statsd/client/timer.py | safe_wraps | def safe_wraps(wrapper, *args, **kwargs):
"""Safely wraps partial functions."""
while isinstance(wrapper, functools.partial):
wrapper = wrapper.func
return functools.wraps(wrapper, *args, **kwargs) | python | def safe_wraps(wrapper, *args, **kwargs):
"""Safely wraps partial functions."""
while isinstance(wrapper, functools.partial):
wrapper = wrapper.func
return functools.wraps(wrapper, *args, **kwargs) | [
"def",
"safe_wraps",
"(",
"wrapper",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"while",
"isinstance",
"(",
"wrapper",
",",
"functools",
".",
"partial",
")",
":",
"wrapper",
"=",
"wrapper",
".",
"func",
"return",
"functools",
".",
"wraps",
"(",
"wrapper",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Safely wraps partial functions. | [
"Safely",
"wraps",
"partial",
"functions",
"."
] | 006a86394c44ff71e6e8e52529daa3c0fdcc93fb | https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/timer.py#L14-L18 | train |
jorisroovers/gitlint | gitlint/user_rules.py | find_rule_classes | def find_rule_classes(extra_path):
"""
Searches a given directory or python module for rule classes. This is done by
adding the directory path to the python path, importing the modules and then finding
any Rule class in those modules.
:param extra_path: absolute directory or file path to search for rule classes
:return: The list of rule classes that are found in the given directory or module
"""
files = []
modules = []
if os.path.isfile(extra_path):
files = [os.path.basename(extra_path)]
directory = os.path.dirname(extra_path)
elif os.path.isdir(extra_path):
files = os.listdir(extra_path)
directory = extra_path
else:
raise UserRuleError(u"Invalid extra-path: {0}".format(extra_path))
# Filter out files that are not python modules
for filename in files:
if fnmatch.fnmatch(filename, '*.py'):
modules.append(os.path.splitext(filename)[0])
# No need to continue if there are no modules specified
if not modules:
return []
# Append the extra rules path to python path so that we can import them
sys.path.append(directory)
# Find all the rule classes in the found python files
rule_classes = []
for module in modules:
# Import the module
try:
importlib.import_module(module)
except Exception as e:
raise UserRuleError(u"Error while importing extra-path module '{0}': {1}".format(module, ustr(e)))
# Find all rule classes in the module. We do this my inspecting all members of the module and checking
# 1) is it a class, if not, skip
# 2) is the parent path the current module. If not, we are dealing with an imported class, skip
# 3) is it a subclass of rule
rule_classes.extend([clazz for _, clazz in inspect.getmembers(sys.modules[module])
if
inspect.isclass(clazz) and # check isclass to ensure clazz.__module__ exists
clazz.__module__ == module and # ignore imported classes
(issubclass(clazz, rules.LineRule) or issubclass(clazz, rules.CommitRule))])
# validate that the rule classes are valid user-defined rules
for rule_class in rule_classes:
assert_valid_rule_class(rule_class)
return rule_classes | python | def find_rule_classes(extra_path):
"""
Searches a given directory or python module for rule classes. This is done by
adding the directory path to the python path, importing the modules and then finding
any Rule class in those modules.
:param extra_path: absolute directory or file path to search for rule classes
:return: The list of rule classes that are found in the given directory or module
"""
files = []
modules = []
if os.path.isfile(extra_path):
files = [os.path.basename(extra_path)]
directory = os.path.dirname(extra_path)
elif os.path.isdir(extra_path):
files = os.listdir(extra_path)
directory = extra_path
else:
raise UserRuleError(u"Invalid extra-path: {0}".format(extra_path))
# Filter out files that are not python modules
for filename in files:
if fnmatch.fnmatch(filename, '*.py'):
modules.append(os.path.splitext(filename)[0])
# No need to continue if there are no modules specified
if not modules:
return []
# Append the extra rules path to python path so that we can import them
sys.path.append(directory)
# Find all the rule classes in the found python files
rule_classes = []
for module in modules:
# Import the module
try:
importlib.import_module(module)
except Exception as e:
raise UserRuleError(u"Error while importing extra-path module '{0}': {1}".format(module, ustr(e)))
# Find all rule classes in the module. We do this my inspecting all members of the module and checking
# 1) is it a class, if not, skip
# 2) is the parent path the current module. If not, we are dealing with an imported class, skip
# 3) is it a subclass of rule
rule_classes.extend([clazz for _, clazz in inspect.getmembers(sys.modules[module])
if
inspect.isclass(clazz) and # check isclass to ensure clazz.__module__ exists
clazz.__module__ == module and # ignore imported classes
(issubclass(clazz, rules.LineRule) or issubclass(clazz, rules.CommitRule))])
# validate that the rule classes are valid user-defined rules
for rule_class in rule_classes:
assert_valid_rule_class(rule_class)
return rule_classes | [
"def",
"find_rule_classes",
"(",
"extra_path",
")",
":",
"files",
"=",
"[",
"]",
"modules",
"=",
"[",
"]",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"extra_path",
")",
":",
"files",
"=",
"[",
"os",
".",
"path",
".",
"basename",
"(",
"extra_path",
")",
"]",
"directory",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"extra_path",
")",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"extra_path",
")",
":",
"files",
"=",
"os",
".",
"listdir",
"(",
"extra_path",
")",
"directory",
"=",
"extra_path",
"else",
":",
"raise",
"UserRuleError",
"(",
"u\"Invalid extra-path: {0}\"",
".",
"format",
"(",
"extra_path",
")",
")",
"# Filter out files that are not python modules",
"for",
"filename",
"in",
"files",
":",
"if",
"fnmatch",
".",
"fnmatch",
"(",
"filename",
",",
"'*.py'",
")",
":",
"modules",
".",
"append",
"(",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"0",
"]",
")",
"# No need to continue if there are no modules specified",
"if",
"not",
"modules",
":",
"return",
"[",
"]",
"# Append the extra rules path to python path so that we can import them",
"sys",
".",
"path",
".",
"append",
"(",
"directory",
")",
"# Find all the rule classes in the found python files",
"rule_classes",
"=",
"[",
"]",
"for",
"module",
"in",
"modules",
":",
"# Import the module",
"try",
":",
"importlib",
".",
"import_module",
"(",
"module",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"UserRuleError",
"(",
"u\"Error while importing extra-path module '{0}': {1}\"",
".",
"format",
"(",
"module",
",",
"ustr",
"(",
"e",
")",
")",
")",
"# Find all rule classes in the module. We do this my inspecting all members of the module and checking",
"# 1) is it a class, if not, skip",
"# 2) is the parent path the current module. If not, we are dealing with an imported class, skip",
"# 3) is it a subclass of rule",
"rule_classes",
".",
"extend",
"(",
"[",
"clazz",
"for",
"_",
",",
"clazz",
"in",
"inspect",
".",
"getmembers",
"(",
"sys",
".",
"modules",
"[",
"module",
"]",
")",
"if",
"inspect",
".",
"isclass",
"(",
"clazz",
")",
"and",
"# check isclass to ensure clazz.__module__ exists",
"clazz",
".",
"__module__",
"==",
"module",
"and",
"# ignore imported classes",
"(",
"issubclass",
"(",
"clazz",
",",
"rules",
".",
"LineRule",
")",
"or",
"issubclass",
"(",
"clazz",
",",
"rules",
".",
"CommitRule",
")",
")",
"]",
")",
"# validate that the rule classes are valid user-defined rules",
"for",
"rule_class",
"in",
"rule_classes",
":",
"assert_valid_rule_class",
"(",
"rule_class",
")",
"return",
"rule_classes"
] | Searches a given directory or python module for rule classes. This is done by
adding the directory path to the python path, importing the modules and then finding
any Rule class in those modules.
:param extra_path: absolute directory or file path to search for rule classes
:return: The list of rule classes that are found in the given directory or module | [
"Searches",
"a",
"given",
"directory",
"or",
"python",
"module",
"for",
"rule",
"classes",
".",
"This",
"is",
"done",
"by",
"adding",
"the",
"directory",
"path",
"to",
"the",
"python",
"path",
"importing",
"the",
"modules",
"and",
"then",
"finding",
"any",
"Rule",
"class",
"in",
"those",
"modules",
"."
] | 6248bd6cbc20c1be3bb6d196a5ec0425af99733b | https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/user_rules.py#L16-L73 | train |
jorisroovers/gitlint | qa/base.py | ustr | def ustr(obj):
""" Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3"""
if sys.version_info[0] == 2:
# If we are getting a string, then do an explicit decode
# else, just call the unicode method of the object
if type(obj) in [str, basestring]: # pragma: no cover # noqa
return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa
else:
return unicode(obj) # pragma: no cover # noqa
else:
if type(obj) in [bytes]:
return obj.decode(DEFAULT_ENCODING)
else:
return str(obj) | python | def ustr(obj):
""" Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3"""
if sys.version_info[0] == 2:
# If we are getting a string, then do an explicit decode
# else, just call the unicode method of the object
if type(obj) in [str, basestring]: # pragma: no cover # noqa
return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa
else:
return unicode(obj) # pragma: no cover # noqa
else:
if type(obj) in [bytes]:
return obj.decode(DEFAULT_ENCODING)
else:
return str(obj) | [
"def",
"ustr",
"(",
"obj",
")",
":",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"==",
"2",
":",
"# If we are getting a string, then do an explicit decode",
"# else, just call the unicode method of the object",
"if",
"type",
"(",
"obj",
")",
"in",
"[",
"str",
",",
"basestring",
"]",
":",
"# pragma: no cover # noqa",
"return",
"unicode",
"(",
"obj",
",",
"DEFAULT_ENCODING",
")",
"# pragma: no cover # noqa",
"else",
":",
"return",
"unicode",
"(",
"obj",
")",
"# pragma: no cover # noqa",
"else",
":",
"if",
"type",
"(",
"obj",
")",
"in",
"[",
"bytes",
"]",
":",
"return",
"obj",
".",
"decode",
"(",
"DEFAULT_ENCODING",
")",
"else",
":",
"return",
"str",
"(",
"obj",
")"
] | Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3 | [
"Python",
"2",
"and",
"3",
"utility",
"method",
"that",
"converts",
"an",
"obj",
"to",
"unicode",
"in",
"python",
"2",
"and",
"to",
"a",
"str",
"object",
"in",
"python",
"3"
] | 6248bd6cbc20c1be3bb6d196a5ec0425af99733b | https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/qa/base.py#L21-L34 | train |
jorisroovers/gitlint | gitlint/config.py | LintConfig.get_rule_option | def get_rule_option(self, rule_name_or_id, option_name):
""" Returns the value of a given option for a given rule. LintConfigErrors will be raised if the
rule or option don't exist. """
option = self._get_option(rule_name_or_id, option_name)
return option.value | python | def get_rule_option(self, rule_name_or_id, option_name):
""" Returns the value of a given option for a given rule. LintConfigErrors will be raised if the
rule or option don't exist. """
option = self._get_option(rule_name_or_id, option_name)
return option.value | [
"def",
"get_rule_option",
"(",
"self",
",",
"rule_name_or_id",
",",
"option_name",
")",
":",
"option",
"=",
"self",
".",
"_get_option",
"(",
"rule_name_or_id",
",",
"option_name",
")",
"return",
"option",
".",
"value"
] | Returns the value of a given option for a given rule. LintConfigErrors will be raised if the
rule or option don't exist. | [
"Returns",
"the",
"value",
"of",
"a",
"given",
"option",
"for",
"a",
"given",
"rule",
".",
"LintConfigErrors",
"will",
"be",
"raised",
"if",
"the",
"rule",
"or",
"option",
"don",
"t",
"exist",
"."
] | 6248bd6cbc20c1be3bb6d196a5ec0425af99733b | https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L207-L211 | train |
jorisroovers/gitlint | gitlint/config.py | LintConfig.set_rule_option | def set_rule_option(self, rule_name_or_id, option_name, option_value):
""" Attempts to set a given value for a given option for a given rule.
LintConfigErrors will be raised if the rule or option don't exist or if the value is invalid. """
option = self._get_option(rule_name_or_id, option_name)
try:
option.set(option_value)
except options.RuleOptionError as e:
msg = u"'{0}' is not a valid value for option '{1}.{2}'. {3}."
raise LintConfigError(msg.format(option_value, rule_name_or_id, option_name, ustr(e))) | python | def set_rule_option(self, rule_name_or_id, option_name, option_value):
""" Attempts to set a given value for a given option for a given rule.
LintConfigErrors will be raised if the rule or option don't exist or if the value is invalid. """
option = self._get_option(rule_name_or_id, option_name)
try:
option.set(option_value)
except options.RuleOptionError as e:
msg = u"'{0}' is not a valid value for option '{1}.{2}'. {3}."
raise LintConfigError(msg.format(option_value, rule_name_or_id, option_name, ustr(e))) | [
"def",
"set_rule_option",
"(",
"self",
",",
"rule_name_or_id",
",",
"option_name",
",",
"option_value",
")",
":",
"option",
"=",
"self",
".",
"_get_option",
"(",
"rule_name_or_id",
",",
"option_name",
")",
"try",
":",
"option",
".",
"set",
"(",
"option_value",
")",
"except",
"options",
".",
"RuleOptionError",
"as",
"e",
":",
"msg",
"=",
"u\"'{0}' is not a valid value for option '{1}.{2}'. {3}.\"",
"raise",
"LintConfigError",
"(",
"msg",
".",
"format",
"(",
"option_value",
",",
"rule_name_or_id",
",",
"option_name",
",",
"ustr",
"(",
"e",
")",
")",
")"
] | Attempts to set a given value for a given option for a given rule.
LintConfigErrors will be raised if the rule or option don't exist or if the value is invalid. | [
"Attempts",
"to",
"set",
"a",
"given",
"value",
"for",
"a",
"given",
"option",
"for",
"a",
"given",
"rule",
".",
"LintConfigErrors",
"will",
"be",
"raised",
"if",
"the",
"rule",
"or",
"option",
"don",
"t",
"exist",
"or",
"if",
"the",
"value",
"is",
"invalid",
"."
] | 6248bd6cbc20c1be3bb6d196a5ec0425af99733b | https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L213-L221 | train |
jorisroovers/gitlint | gitlint/config.py | LintConfigBuilder.set_from_config_file | def set_from_config_file(self, filename):
""" Loads lint config from a ini-style config file """
if not os.path.exists(filename):
raise LintConfigError(u"Invalid file path: {0}".format(filename))
self._config_path = os.path.abspath(filename)
try:
parser = ConfigParser()
parser.read(filename)
for section_name in parser.sections():
for option_name, option_value in parser.items(section_name):
self.set_option(section_name, option_name, ustr(option_value))
except ConfigParserError as e:
raise LintConfigError(ustr(e)) | python | def set_from_config_file(self, filename):
""" Loads lint config from a ini-style config file """
if not os.path.exists(filename):
raise LintConfigError(u"Invalid file path: {0}".format(filename))
self._config_path = os.path.abspath(filename)
try:
parser = ConfigParser()
parser.read(filename)
for section_name in parser.sections():
for option_name, option_value in parser.items(section_name):
self.set_option(section_name, option_name, ustr(option_value))
except ConfigParserError as e:
raise LintConfigError(ustr(e)) | [
"def",
"set_from_config_file",
"(",
"self",
",",
"filename",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"raise",
"LintConfigError",
"(",
"u\"Invalid file path: {0}\"",
".",
"format",
"(",
"filename",
")",
")",
"self",
".",
"_config_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
"try",
":",
"parser",
"=",
"ConfigParser",
"(",
")",
"parser",
".",
"read",
"(",
"filename",
")",
"for",
"section_name",
"in",
"parser",
".",
"sections",
"(",
")",
":",
"for",
"option_name",
",",
"option_value",
"in",
"parser",
".",
"items",
"(",
"section_name",
")",
":",
"self",
".",
"set_option",
"(",
"section_name",
",",
"option_name",
",",
"ustr",
"(",
"option_value",
")",
")",
"except",
"ConfigParserError",
"as",
"e",
":",
"raise",
"LintConfigError",
"(",
"ustr",
"(",
"e",
")",
")"
] | Loads lint config from a ini-style config file | [
"Loads",
"lint",
"config",
"from",
"a",
"ini",
"-",
"style",
"config",
"file"
] | 6248bd6cbc20c1be3bb6d196a5ec0425af99733b | https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L310-L324 | train |
jorisroovers/gitlint | gitlint/config.py | LintConfigBuilder.build | def build(self, config=None):
""" Build a real LintConfig object by normalizing and validating the options that were previously set on this
factory. """
# If we are passed a config object, then rebuild that object instead of building a new lintconfig object from
# scratch
if not config:
config = LintConfig()
config._config_path = self._config_path
# Set general options first as this might change the behavior or validity of the other options
general_section = self._config_blueprint.get('general')
if general_section:
for option_name, option_value in general_section.items():
config.set_general_option(option_name, option_value)
for section_name, section_dict in self._config_blueprint.items():
for option_name, option_value in section_dict.items():
# Skip over the general section, as we've already done that above
if section_name != "general":
config.set_rule_option(section_name, option_name, option_value)
return config | python | def build(self, config=None):
""" Build a real LintConfig object by normalizing and validating the options that were previously set on this
factory. """
# If we are passed a config object, then rebuild that object instead of building a new lintconfig object from
# scratch
if not config:
config = LintConfig()
config._config_path = self._config_path
# Set general options first as this might change the behavior or validity of the other options
general_section = self._config_blueprint.get('general')
if general_section:
for option_name, option_value in general_section.items():
config.set_general_option(option_name, option_value)
for section_name, section_dict in self._config_blueprint.items():
for option_name, option_value in section_dict.items():
# Skip over the general section, as we've already done that above
if section_name != "general":
config.set_rule_option(section_name, option_name, option_value)
return config | [
"def",
"build",
"(",
"self",
",",
"config",
"=",
"None",
")",
":",
"# If we are passed a config object, then rebuild that object instead of building a new lintconfig object from",
"# scratch",
"if",
"not",
"config",
":",
"config",
"=",
"LintConfig",
"(",
")",
"config",
".",
"_config_path",
"=",
"self",
".",
"_config_path",
"# Set general options first as this might change the behavior or validity of the other options",
"general_section",
"=",
"self",
".",
"_config_blueprint",
".",
"get",
"(",
"'general'",
")",
"if",
"general_section",
":",
"for",
"option_name",
",",
"option_value",
"in",
"general_section",
".",
"items",
"(",
")",
":",
"config",
".",
"set_general_option",
"(",
"option_name",
",",
"option_value",
")",
"for",
"section_name",
",",
"section_dict",
"in",
"self",
".",
"_config_blueprint",
".",
"items",
"(",
")",
":",
"for",
"option_name",
",",
"option_value",
"in",
"section_dict",
".",
"items",
"(",
")",
":",
"# Skip over the general section, as we've already done that above",
"if",
"section_name",
"!=",
"\"general\"",
":",
"config",
".",
"set_rule_option",
"(",
"section_name",
",",
"option_name",
",",
"option_value",
")",
"return",
"config"
] | Build a real LintConfig object by normalizing and validating the options that were previously set on this
factory. | [
"Build",
"a",
"real",
"LintConfig",
"object",
"by",
"normalizing",
"and",
"validating",
"the",
"options",
"that",
"were",
"previously",
"set",
"on",
"this",
"factory",
"."
] | 6248bd6cbc20c1be3bb6d196a5ec0425af99733b | https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L326-L349 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.