language
stringclasses 6
values | original_string
stringlengths 25
887k
| text
stringlengths 25
887k
|
---|---|---|
Python | def add(self, thread: Thread) -> 'ThreadCollection':
"""
Add a thread to the collection
:param thread: thread to add
:returns: this thread collection instance
"""
self.threads.append(thread)
return self | def add(self, thread: Thread) -> 'ThreadCollection':
"""
Add a thread to the collection
:param thread: thread to add
:returns: this thread collection instance
"""
self.threads.append(thread)
return self |
Python | def terminate(self, *args, **kwargs) -> 'ThreadCollection':
"""
Call terminate() on all threads that have this method
:returns: this thread collection instance
"""
for thread in self.threads:
try:
thread.terminate(*args, **kwargs)
except AttributeError:
pass
return self | def terminate(self, *args, **kwargs) -> 'ThreadCollection':
"""
Call terminate() on all threads that have this method
:returns: this thread collection instance
"""
for thread in self.threads:
try:
thread.terminate(*args, **kwargs)
except AttributeError:
pass
return self |
Python | def close(self) -> None:
"""
Close this stream. Further write()s and flush()es will raise a ValueError.
No-op if invoked multiple times
"""
self.is_closed = True | def close(self) -> None:
"""
Close this stream. Further write()s and flush()es will raise a ValueError.
No-op if invoked multiple times
"""
self.is_closed = True |
Python | def read_lines(path: str, delete_empty_lines: bool = True,
encoding: str = 'utf-8') -> tp.List[str]:
"""
Read lines from a particular file, removing end-of-line characters and optionally
empty lines. Additionally whitespaces (and end-of-line characters) will be removed
from both ends of each line.
:param path: path of file to read
:param delete_empty_lines: set to False if empty lines are not to be removed
:param encoding: encoding to read the file with
:return: each line as a separate entry
"""
with codecs.open(path, 'r', encoding) as f_in:
lines = [line.strip() for line in f_in.readlines()]
if delete_empty_lines:
lines = [line for line in lines if line]
return lines | def read_lines(path: str, delete_empty_lines: bool = True,
encoding: str = 'utf-8') -> tp.List[str]:
"""
Read lines from a particular file, removing end-of-line characters and optionally
empty lines. Additionally whitespaces (and end-of-line characters) will be removed
from both ends of each line.
:param path: path of file to read
:param delete_empty_lines: set to False if empty lines are not to be removed
:param encoding: encoding to read the file with
:return: each line as a separate entry
"""
with codecs.open(path, 'r', encoding) as f_in:
lines = [line.strip() for line in f_in.readlines()]
if delete_empty_lines:
lines = [line for line in lines if line]
return lines |
Python | def make_noncolliding_name(path: str,
exists_checker: Predicate[str] = os.path.exists) -> str:
"""
Try to make a noncolliding name in such a way that .1, .2, .3, and so on will be appended
to the file name right before the extension (yielding test.1.txt) or at the end of the file
name if the extension isn't present
:param path: path of the file that has not to exist
:param exists_checker: a callable to check with if the file exists
:return: name mutated in such a way that exists_checker returned False on it
"""
path, filename = os.path.split(path)
if '.' in filename:
*filename, extension = filename.split('.')
filename = '.'.join(filename)
extension = '.' + extension
else:
extension = ''
addition = ''
addition_counter = 0
while exists_checker(os.path.join(path, filename + addition + extension)):
addition_counter += 1
addition = '.' + str(addition_counter)
return os.path.join(path, filename + addition + extension) | def make_noncolliding_name(path: str,
exists_checker: Predicate[str] = os.path.exists) -> str:
"""
Try to make a noncolliding name in such a way that .1, .2, .3, and so on will be appended
to the file name right before the extension (yielding test.1.txt) or at the end of the file
name if the extension isn't present
:param path: path of the file that has not to exist
:param exists_checker: a callable to check with if the file exists
:return: name mutated in such a way that exists_checker returned False on it
"""
path, filename = os.path.split(path)
if '.' in filename:
*filename, extension = filename.split('.')
filename = '.'.join(filename)
extension = '.' + extension
else:
extension = ''
addition = ''
addition_counter = 0
while exists_checker(os.path.join(path, filename + addition + extension)):
addition_counter += 1
addition = '.' + str(addition_counter)
return os.path.join(path, filename + addition + extension) |
Python | def write_to_file(path: str, data: tp.Union[bytes, str],
encoding: tp.Optional[str] = None) -> None:
"""
Write provided content as a file, applying given encoding (or data is bytes, if none given)
:param path: Path to put the file under
:param data: Data to write. Must be bytes if no encoding is given, str otherwise
:param encoding: Encoding. Default is None, which means no encoding (bytes will be written)
"""
if encoding is None:
file = open(path, 'wb')
else:
file = codecs.open(path, 'wb', encoding)
try:
file.write(data)
finally:
file.close() | def write_to_file(path: str, data: tp.Union[bytes, str],
encoding: tp.Optional[str] = None) -> None:
"""
Write provided content as a file, applying given encoding (or data is bytes, if none given)
:param path: Path to put the file under
:param data: Data to write. Must be bytes if no encoding is given, str otherwise
:param encoding: Encoding. Default is None, which means no encoding (bytes will be written)
"""
if encoding is None:
file = open(path, 'wb')
else:
file = codecs.open(path, 'wb', encoding)
try:
file.write(data)
finally:
file.close() |
Python | def read_in_file(path: str, encoding: tp.Optional[str] = None,
default: tp.Optional[tp.Union[bytes, str]] = _NOTSET) -> tp.Union[bytes, str]:
"""
Opens a file for reading, reads it in, converts to given encoding (or returns as bytes
if not given), and closes it.
:param path: path of file to read
:param encoding: optional encoding. If default, this will be returned as bytes
:param default: value to return when the file does not exist. Default (None) will raise a
FileNotFoundError
:return: file content, either decoded as a str, or not as bytes
:raises FileNotFoundError: file did not exist and default was not set
"""
if os.path.isdir(path):
if default is not _NOTSET:
return default
raise FileNotFoundError('%s found and is a directory' % (path,))
try:
if encoding is None:
file = open(path, 'rb')
else:
file = codecs.open(path, 'r', encoding)
except FileNotFoundError:
if default is not _NOTSET:
return default
raise
try:
return file.read()
finally:
file.close() | def read_in_file(path: str, encoding: tp.Optional[str] = None,
default: tp.Optional[tp.Union[bytes, str]] = _NOTSET) -> tp.Union[bytes, str]:
"""
Opens a file for reading, reads it in, converts to given encoding (or returns as bytes
if not given), and closes it.
:param path: path of file to read
:param encoding: optional encoding. If default, this will be returned as bytes
:param default: value to return when the file does not exist. Default (None) will raise a
FileNotFoundError
:return: file content, either decoded as a str, or not as bytes
:raises FileNotFoundError: file did not exist and default was not set
"""
if os.path.isdir(path):
if default is not _NOTSET:
return default
raise FileNotFoundError('%s found and is a directory' % (path,))
try:
if encoding is None:
file = open(path, 'rb')
else:
file = codecs.open(path, 'r', encoding)
except FileNotFoundError:
if default is not _NOTSET:
return default
raise
try:
return file.read()
finally:
file.close() |
Python | def read_re_sub_and_write(path: str, pattern: tp.Union[re.compile, str],
repl: tp.Union[tp.Callable[[tp.Any], str]]) -> None:
"""
Read a text file, treat with re.sub and write the contents.
Note that this is not thread or multiprocess safe.
:param path: path of file to treat
:param pattern: regexp compiled pattern or a string, a pattern to match the file contents
:param repl: string or a callable(re.Match)->str to replace the contents
"""
with open(path, 'r') as f_in:
data = f_in.read()
if isinstance(pattern, str):
data = re.sub(pattern, repl, data)
else:
data = pattern.sub(repl, data)
with open(path, 'w') as f_out:
f_out.write(data) | def read_re_sub_and_write(path: str, pattern: tp.Union[re.compile, str],
repl: tp.Union[tp.Callable[[tp.Any], str]]) -> None:
"""
Read a text file, treat with re.sub and write the contents.
Note that this is not thread or multiprocess safe.
:param path: path of file to treat
:param pattern: regexp compiled pattern or a string, a pattern to match the file contents
:param repl: string or a callable(re.Match)->str to replace the contents
"""
with open(path, 'r') as f_in:
data = f_in.read()
if isinstance(pattern, str):
data = re.sub(pattern, repl, data)
else:
data = pattern.sub(repl, data)
with open(path, 'w') as f_out:
f_out.write(data) |
Python | def try_unlink(path: str) -> bool:
"""
A syntactic sugar for:
>>> try:
>>> os.unlink(path)
>>> return True
>>> except FileNotFoundError:
>>> return False
Note that if path is a directory, rmtree from shlex will be called on it, and
any OSErrors will report the deletion as False
:param path: path of file to delete
:return: whether the deletion happened
"""
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.unlink(path)
return True | def try_unlink(path: str) -> bool:
"""
A syntactic sugar for:
>>> try:
>>> os.unlink(path)
>>> return True
>>> except FileNotFoundError:
>>> return False
Note that if path is a directory, rmtree from shlex will be called on it, and
any OSErrors will report the deletion as False
:param path: path of file to delete
:return: whether the deletion happened
"""
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.unlink(path)
return True |
Python | def find_files(path: str, wildcard: str = r'(.*)',
prefix_with: tp.Optional[str] = None,
scan_subdirectories: bool = True,
apply_wildcard_to_entire_path: bool = False,
prefix_with_path: bool = True) -> tp.Iterator[str]:
"""
Look at given path's files and all subdirectories and return an iterator of
file names (paths included) that conform to given wildcard.
Note that wildcard is only applied to the file name if apply_wildcard_to_entire_path
is False, else the wildcard is applied to entire path (including the application of
prefix_with!).
Files will be additionally prefixed with path, but only if prefix_with_path is True
.. warning:: Note that this will try to match only the start of the path. For a complete match
remember to put a $ at the end of the string!
:param path: path to look into.
:param wildcard: a regular expression to match
:param prefix_with: an optional path component to prefix before the filename with os.path.join
:param scan_subdirectories: whether to scan subdirectories
:param apply_wildcard_to_entire_path: whether to take the entire relative path into account
when checking wildcard
:param prefix_with_path: whether to add path to the resulting path
:return: paths with the files. They will be relative paths, relative to path
"""
if prefix_with_path:
prefix_with = _cond_join(prefix_with, path)
for filename in os.listdir(path):
if scan_subdirectories and os.path.isdir(os.path.join(path, filename)):
new_prefix = _cond_join(prefix_with, filename)
yield from find_files(os.path.join(path, filename), wildcard,
prefix_with=new_prefix,
prefix_with_path=False)
else:
if apply_wildcard_to_entire_path:
fn_path = _cond_join(prefix_with, filename)
else:
fn_path = filename
if re.match(wildcard, fn_path):
yield _cond_join(prefix_with, filename) | def find_files(path: str, wildcard: str = r'(.*)',
prefix_with: tp.Optional[str] = None,
scan_subdirectories: bool = True,
apply_wildcard_to_entire_path: bool = False,
prefix_with_path: bool = True) -> tp.Iterator[str]:
"""
Look at given path's files and all subdirectories and return an iterator of
file names (paths included) that conform to given wildcard.
Note that wildcard is only applied to the file name if apply_wildcard_to_entire_path
is False, else the wildcard is applied to entire path (including the application of
prefix_with!).
Files will be additionally prefixed with path, but only if prefix_with_path is True
.. warning:: Note that this will try to match only the start of the path. For a complete match
remember to put a $ at the end of the string!
:param path: path to look into.
:param wildcard: a regular expression to match
:param prefix_with: an optional path component to prefix before the filename with os.path.join
:param scan_subdirectories: whether to scan subdirectories
:param apply_wildcard_to_entire_path: whether to take the entire relative path into account
when checking wildcard
:param prefix_with_path: whether to add path to the resulting path
:return: paths with the files. They will be relative paths, relative to path
"""
if prefix_with_path:
prefix_with = _cond_join(prefix_with, path)
for filename in os.listdir(path):
if scan_subdirectories and os.path.isdir(os.path.join(path, filename)):
new_prefix = _cond_join(prefix_with, filename)
yield from find_files(os.path.join(path, filename), wildcard,
prefix_with=new_prefix,
prefix_with_path=False)
else:
if apply_wildcard_to_entire_path:
fn_path = _cond_join(prefix_with, filename)
else:
fn_path = filename
if re.match(wildcard, fn_path):
yield _cond_join(prefix_with, filename) |
Python | def default_return(v: tp.Any):
"""
Makes the decorated function return v instead of None, if it would return None.
If it would return something else, that else is returned.
Eg:
>>> @default_return(5)
>>> def return(v):
>>> return v
>>> assert return(None) == 5
>>> assert return(2) == 2
:param v: value to return if calling the function would return None
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
v_a = fun(*args, **kwargs)
if v_a is None:
return v
else:
return v_a
return inner
return outer | def default_return(v: tp.Any):
"""
Makes the decorated function return v instead of None, if it would return None.
If it would return something else, that else is returned.
Eg:
>>> @default_return(5)
>>> def return(v):
>>> return v
>>> assert return(None) == 5
>>> assert return(2) == 2
:param v: value to return if calling the function would return None
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
v_a = fun(*args, **kwargs)
if v_a is None:
return v
else:
return v_a
return inner
return outer |
Python | def cache_memoize(cache_duration: float, time_getter: tp.Callable[[], float] = time.monotonic):
"""
A thread-safe memoizer that memoizes the return value for at most cache_duration seconds.
:param cache_duration: cache validity, in seconds
:param time_getter: a callable without arguments that yields us a time marker
Usage example:
>>> @cache_memoize(10)
>>> def expensive_but_idempotent_operation(a):
>>> ...
>>> a = expensive_but_idempotent_operation(2)
>>> b = expensive_but_idempotent_operation(2) # is much faster than computing the value anew
>>> time.sleep(10)
>>> c = expensive_but_idempotent_operation(2) # function body is computed anew
"""
from satella.coding.concurrent import MonitorDict, Monitor
def outer(fun):
fun.memoize_timestamps = MonitorDict()
fun.memoize_values = {}
@wraps(fun)
def inner(*args, **kwargs):
now = time_getter()
with Monitor.acquire(fun.memoize_timestamps):
if args in fun.memoize_timestamps:
ts = fun.memoize_timestamps[args]
if now - ts > cache_duration:
with Monitor.release(fun.memoize_timestamps):
v = fun(*args, **kwargs)
fun.memoize_timestamps[args] = now
fun.memoize_values[args] = v
else:
with Monitor.release(fun.memoize_timestamps):
v = fun(*args, **kwargs)
fun.memoize_timestamps[args] = now
fun.memoize_values[args] = v
return fun.memoize_values[args]
return inner
return outer | def cache_memoize(cache_duration: float, time_getter: tp.Callable[[], float] = time.monotonic):
"""
A thread-safe memoizer that memoizes the return value for at most cache_duration seconds.
:param cache_duration: cache validity, in seconds
:param time_getter: a callable without arguments that yields us a time marker
Usage example:
>>> @cache_memoize(10)
>>> def expensive_but_idempotent_operation(a):
>>> ...
>>> a = expensive_but_idempotent_operation(2)
>>> b = expensive_but_idempotent_operation(2) # is much faster than computing the value anew
>>> time.sleep(10)
>>> c = expensive_but_idempotent_operation(2) # function body is computed anew
"""
from satella.coding.concurrent import MonitorDict, Monitor
def outer(fun):
fun.memoize_timestamps = MonitorDict()
fun.memoize_values = {}
@wraps(fun)
def inner(*args, **kwargs):
now = time_getter()
with Monitor.acquire(fun.memoize_timestamps):
if args in fun.memoize_timestamps:
ts = fun.memoize_timestamps[args]
if now - ts > cache_duration:
with Monitor.release(fun.memoize_timestamps):
v = fun(*args, **kwargs)
fun.memoize_timestamps[args] = now
fun.memoize_values[args] = v
else:
with Monitor.release(fun.memoize_timestamps):
v = fun(*args, **kwargs)
fun.memoize_timestamps[args] = now
fun.memoize_values[args] = v
return fun.memoize_values[args]
return inner
return outer |
Python | def memoize(fun):
"""
A thread safe memoizer based on function's ONLY positional arguments.
Note that this will make your function execute it at most one thread, the
remaining ones will have to wait.
Usage example:
>>> @memoize
>>> def expensive_but_idempotent_operation(a):
>>> ...
>>> a = expensive_but_idempotent_operation(2)
>>> b = expensive_but_idempotent_operation(2) # is much faster than computing the value anew
"""
from satella.coding.concurrent.monitor import MonitorDict, Monitor
fun.memoizer = MonitorDict()
@wraps(fun)
def inner(*args, **kwargs):
with Monitor.acquire(fun.memoizer):
if args in fun.memoizer:
return fun.memoizer[args]
else:
with Monitor.release(fun.memoizer):
v = fun(*args, **kwargs)
fun.memoizer[args] = v
return v
return inner | def memoize(fun):
"""
A thread safe memoizer based on function's ONLY positional arguments.
Note that this will make your function execute it at most one thread, the
remaining ones will have to wait.
Usage example:
>>> @memoize
>>> def expensive_but_idempotent_operation(a):
>>> ...
>>> a = expensive_but_idempotent_operation(2)
>>> b = expensive_but_idempotent_operation(2) # is much faster than computing the value anew
"""
from satella.coding.concurrent.monitor import MonitorDict, Monitor
fun.memoizer = MonitorDict()
@wraps(fun)
def inner(*args, **kwargs):
with Monitor.acquire(fun.memoizer):
if args in fun.memoizer:
return fun.memoizer[args]
else:
with Monitor.release(fun.memoizer):
v = fun(*args, **kwargs)
fun.memoizer[args] = v
return v
return inner |
Python | def has_keys(keys: tp.List[str]):
"""
A decorator for asserting that a dictionary has given keys. Will raise PreconditionError if
it doesn't.
This outputs a callable that accepts a dict and returns True if it has all the keys
necessary.
Returns True if the dict has all necessary keys.
This is meant to be used in conjunction with @precondition
.. deprecated:: 2.14.22
:param keys: list of keys to expect
"""
warnings.warn('This is deprecated and will be removed in Satella 3.0. '
'Use satella.coding.predicates.has_keys instead', DeprecationWarning)
def inner(dictionary: dict) -> bool:
for key in keys:
if key not in dictionary:
raise PreconditionError('Key %s not found in dict' % (key,))
return True
return inner | def has_keys(keys: tp.List[str]):
"""
A decorator for asserting that a dictionary has given keys. Will raise PreconditionError if
it doesn't.
This outputs a callable that accepts a dict and returns True if it has all the keys
necessary.
Returns True if the dict has all necessary keys.
This is meant to be used in conjunction with @precondition
.. deprecated:: 2.14.22
:param keys: list of keys to expect
"""
warnings.warn('This is deprecated and will be removed in Satella 3.0. '
'Use satella.coding.predicates.has_keys instead', DeprecationWarning)
def inner(dictionary: dict) -> bool:
for key in keys:
if key not in dictionary:
raise PreconditionError('Key %s not found in dict' % (key,))
return True
return inner |
Python | def short_none(clb: tp.Union[Expression, tp.Callable[[T], U]]) -> tp.Callable[
[tp.Optional[T]], tp.Optional[U]]:
"""
Accept a callable. Return a callable that executes it only if passed a no-None arg,
and returns its result. If passed a None, return a None
callable can also be a string, in this case it will be appended to lambda x: and eval'd
:param clb: callable/1->1
:return: a modified callable
"""
if isinstance(clb, str):
q = dict(globals())
exec('_callable = lambda x: ' + clb, q)
clb = q['_callable']
@wraps(clb)
def inner(arg: tp.Optional[T]) -> tp.Optional[U]:
if arg is None:
return None
else:
return clb(arg)
return inner | def short_none(clb: tp.Union[Expression, tp.Callable[[T], U]]) -> tp.Callable[
[tp.Optional[T]], tp.Optional[U]]:
"""
Accept a callable. Return a callable that executes it only if passed a no-None arg,
and returns its result. If passed a None, return a None
callable can also be a string, in this case it will be appended to lambda x: and eval'd
:param clb: callable/1->1
:return: a modified callable
"""
if isinstance(clb, str):
q = dict(globals())
exec('_callable = lambda x: ' + clb, q)
clb = q['_callable']
@wraps(clb)
def inner(arg: tp.Optional[T]) -> tp.Optional[U]:
if arg is None:
return None
else:
return clb(arg)
return inner |
Python | def call_method_on_exception(exc_classes, method_name, *args, **kwargs):
"""
A decorator for instance methods to call provided method with given arguments
if given call fails.
Example use:
>>> class Test:
>>> def close(self):
>>> ...
>>> @call_method_on_exception(ValueError, 'close')
>>> def read(self, bytes):
>>> raise ValueError()
Exception class determination is done by isinstance, so you can go wild with metaclassing.
Exception will be swallowed. The return value will be taken from the called function.
Note that the called method must be an instance method.
:param exc_classes: a tuple or an instance class to which react
:param method_name: name of the method. It must be gettable by getattr
:param args: arguments to pass to given method
:param kwargs: keyword arguments to pass to given method
"""
def outer(fun):
@wraps(fun)
def inner(self, *f_args, **f_kwargs):
try:
return fun(self, *f_args, **f_kwargs)
except Exception as e:
if isinstance(e, exc_classes):
return getattr(self, method_name)(*args, **kwargs)
else:
raise
return inner
return outer | def call_method_on_exception(exc_classes, method_name, *args, **kwargs):
"""
A decorator for instance methods to call provided method with given arguments
if given call fails.
Example use:
>>> class Test:
>>> def close(self):
>>> ...
>>> @call_method_on_exception(ValueError, 'close')
>>> def read(self, bytes):
>>> raise ValueError()
Exception class determination is done by isinstance, so you can go wild with metaclassing.
Exception will be swallowed. The return value will be taken from the called function.
Note that the called method must be an instance method.
:param exc_classes: a tuple or an instance class to which react
:param method_name: name of the method. It must be gettable by getattr
:param args: arguments to pass to given method
:param kwargs: keyword arguments to pass to given method
"""
def outer(fun):
@wraps(fun)
def inner(self, *f_args, **f_kwargs):
try:
return fun(self, *f_args, **f_kwargs)
except Exception as e:
if isinstance(e, exc_classes):
return getattr(self, method_name)(*args, **kwargs)
else:
raise
return inner
return outer |
Python | def swap_and_clear_dirty(self) -> tp.Dict[K, V]:
"""
Returns this data, clears self and sets dirty to False
After this is called, this dict will be considered empty.
:return: a plain, normal Python dictionary is returned
"""
a = self.data
self.data = {}
self.dirty = False
return a | def swap_and_clear_dirty(self) -> tp.Dict[K, V]:
"""
Returns this data, clears self and sets dirty to False
After this is called, this dict will be considered empty.
:return: a plain, normal Python dictionary is returned
"""
a = self.data
self.data = {}
self.dirty = False
return a |
Python | def copy_and_clear_dirty(self) -> tp.Dict[K, V]:
"""
Returns a copy of this data and sets dirty to False
:return: a plain, normal Python dictionary is returned
"""
a = self.data.copy()
self.dirty = False
return a | def copy_and_clear_dirty(self) -> tp.Dict[K, V]:
"""
Returns a copy of this data and sets dirty to False
:return: a plain, normal Python dictionary is returned
"""
a = self.data.copy()
self.dirty = False
return a |
Python | def done(self) -> None:
"""
Called when the user is done using given TwoWayDictionary.
Internally this will break the reference cycle, and enable Python GC to collect the objects.
"""
self.reverse.reverse = None
self.reverse = None | def done(self) -> None:
"""
Called when the user is done using given TwoWayDictionary.
Internally this will break the reference cycle, and enable Python GC to collect the objects.
"""
self.reverse.reverse = None
self.reverse = None |
Python | def reverse(self) -> tp.MutableMapping[V, K]:
"""
Return a reverse mapping. Reverse mapping is updated as soon as an operation is done.
"""
return self._reverse | def reverse(self) -> tp.MutableMapping[V, K]:
"""
Return a reverse mapping. Reverse mapping is updated as soon as an operation is done.
"""
return self._reverse |
Python | def add(self, item: T) -> None:
"""
Add a single element to the ranking and recalculate it
"""
index = self.ranking.add(item)
for position, item in enumerate(self.ranking[index:], start=index):
self.element_to_position[id(item)] = position | def add(self, item: T) -> None:
"""
Add a single element to the ranking and recalculate it
"""
index = self.ranking.add(item)
for position, item in enumerate(self.ranking[index:], start=index):
self.element_to_position[id(item)] = position |
Python | def remove(self, item: T) -> None:
"""
Remove a single element from the ranking and recalculate it
"""
index = self.ranking.index(item)
self.ranking.remove(item)
for position, item in enumerate(self.ranking[index:], start=index):
self.element_to_position[id(item)] = position | def remove(self, item: T) -> None:
"""
Remove a single element from the ranking and recalculate it
"""
index = self.ranking.index(item)
self.ranking.remove(item)
for position, item in enumerate(self.ranking[index:], start=index):
self.element_to_position[id(item)] = position |
Python | def run_as_future(fun):
"""
A decorator that accepts a function that should be executed in a separate thread,
and a Future returned instead of it's result, that will enable to watch the function for
completion.
The created thread will be non-demonic
Example usage:
>>> @run_as_future
>>> def parse_a_file(x: str):
>>> ...
>>> fut = parse_a_file('test.txt')
>>> result = fut.result()
"""
@wraps(fun)
def inner(*args, **kwargs):
fut = Future()
fut.set_running_or_notify_cancel()
def separate_target():
try:
fut.set_result(fun(*args, **kwargs))
except Exception as e:
fut.set_exception(e)
Thread(target=separate_target).start()
return fut
return inner | def run_as_future(fun):
"""
A decorator that accepts a function that should be executed in a separate thread,
and a Future returned instead of it's result, that will enable to watch the function for
completion.
The created thread will be non-demonic
Example usage:
>>> @run_as_future
>>> def parse_a_file(x: str):
>>> ...
>>> fut = parse_a_file('test.txt')
>>> result = fut.result()
"""
@wraps(fun)
def inner(*args, **kwargs):
fut = Future()
fut.set_running_or_notify_cancel()
def separate_target():
try:
fut.set_result(fun(*args, **kwargs))
except Exception as e:
fut.set_exception(e)
Thread(target=separate_target).start()
return fut
return inner |
Python | def parallel_execute(callable_: tp.Callable[[T], Future],
args: tp.Iterable[T],
kwargs: tp.Iterable[dict] = infinite_iterator(return_factory=dict)):
"""
Execute a number of calls to callable in parallel.
Callable must be a function that accepts arguments and returns a plain Python future.
Return will be an iterator that will yield every value of the iterator,
or return an instance of exception, if any of the calls excepted.
:param callable_: a callable that returns futures
:param args: an iterable of arguments to provide to the callable
:param kwargs: an iterable of keyword arguments to provide to the callable
:return: an iterator yielding every value (or exception instance if thew) of the future
"""
futures = [callable_(*arg, **kwarg) for arg, kwarg in zip(args, kwargs)]
for future in futures:
try:
yield future.result()
except Exception as e:
yield e | def parallel_execute(callable_: tp.Callable[[T], Future],
args: tp.Iterable[T],
kwargs: tp.Iterable[dict] = infinite_iterator(return_factory=dict)):
"""
Execute a number of calls to callable in parallel.
Callable must be a function that accepts arguments and returns a plain Python future.
Return will be an iterator that will yield every value of the iterator,
or return an instance of exception, if any of the calls excepted.
:param callable_: a callable that returns futures
:param args: an iterable of arguments to provide to the callable
:param kwargs: an iterable of keyword arguments to provide to the callable
:return: an iterator yielding every value (or exception instance if thew) of the future
"""
futures = [callable_(*arg, **kwarg) for arg, kwarg in zip(args, kwargs)]
for future in futures:
try:
yield future.result()
except Exception as e:
yield e |
Python | def import_class(path: str) -> type:
"""
Import a class identified with given module path and class name
:param path: path, eg. subprocess.Popen
:return: imported class
"""
*path, classname = path.split('.')
import_path = '.'.join(path)
try:
return getattr(importlib.import_module(import_path), classname)
except AttributeError:
raise ImportError('%s not found in %s' % (classname, import_path)) | def import_class(path: str) -> type:
"""
Import a class identified with given module path and class name
:param path: path, eg. subprocess.Popen
:return: imported class
"""
*path, classname = path.split('.')
import_path = '.'.join(path)
try:
return getattr(importlib.import_module(import_path), classname)
except AttributeError:
raise ImportError('%s not found in %s' % (classname, import_path)) |
Python | def import_from(path: tp.List[str], package_prefix: str, all_: tp.List[str],
locals_: tp.Dict[str, tp.Any], recursive: bool = True,
fail_on_attributerror: bool = True, create_all: bool = True,
skip_single_underscores: bool = True,
skip_not_having_all: bool = False) -> None:
"""
Import everything from a given module. Append these module's all to.
This will examine __all__ of given module (if it has any, else it will just import everything
from it, which is probably a bad practice and will heavily pollute the namespace.
As a side effect, this will equip all of your packages with __all__.
:param path: module's __path__
:param package_prefix: package prefix to import from. Use __name__
:param all_: module's __all__ to append to
:param recursive: whether to import packages as well
:param fail_on_attributerror: whether to fail if a module reports something in their __all__
that is physically not there (ie. getattr() raised AttributeError
:param locals_: module's locals, obtain them by calling locals() in importing module's context
:param create_all: whether to create artificial __all__'s for modules that don't have them
:param skip_single_underscores: whether to refrain from importing things that are preceded with
a single underscore. Pertains to modules, as well as items
:param skip_not_having_all: skip module's not having an __all__ entry
:raise AttributeError: module's __all__ contained entry that was not in this module
"""
for importer, modname, is_pkg in pkgutil.walk_packages(path, onerror=lambda x: None):
if recursive and is_pkg:
if modname.startswith('_') and skip_single_underscores:
continue
module = importlib.import_module(package_prefix + '.' + modname)
try:
mod_all = module.__all__
except AttributeError:
if skip_not_having_all:
continue
mod_all = []
if create_all:
module.__all__ = mod_all
import_from([os.path.join(path[0], modname)], package_prefix + '.' + modname, mod_all,
module.__dict__, recursive=recursive,
fail_on_attributerror=fail_on_attributerror, create_all=create_all,
skip_not_having_all=skip_not_having_all,
skip_single_underscores=skip_single_underscores),
locals_[modname] = module
if modname not in all_:
all_.append(modname)
elif not is_pkg:
module = importlib.import_module(package_prefix + '.' + modname)
try:
package_ref = module.__all__
except AttributeError:
warnings.warn('Module %s does not contain __all__, enumerating it instead' %
(package_prefix + '.' + modname,), RuntimeWarning)
package_ref = dir(module)
for item in package_ref:
if item.startswith('_') and skip_single_underscores:
continue
try:
locals_[item] = getattr(module, item)
except AttributeError:
if fail_on_attributerror:
raise
else:
if item not in all_:
all_.append(item) | def import_from(path: tp.List[str], package_prefix: str, all_: tp.List[str],
locals_: tp.Dict[str, tp.Any], recursive: bool = True,
fail_on_attributerror: bool = True, create_all: bool = True,
skip_single_underscores: bool = True,
skip_not_having_all: bool = False) -> None:
"""
Import everything from a given module. Append these module's all to.
This will examine __all__ of given module (if it has any, else it will just import everything
from it, which is probably a bad practice and will heavily pollute the namespace.
As a side effect, this will equip all of your packages with __all__.
:param path: module's __path__
:param package_prefix: package prefix to import from. Use __name__
:param all_: module's __all__ to append to
:param recursive: whether to import packages as well
:param fail_on_attributerror: whether to fail if a module reports something in their __all__
that is physically not there (ie. getattr() raised AttributeError
:param locals_: module's locals, obtain them by calling locals() in importing module's context
:param create_all: whether to create artificial __all__'s for modules that don't have them
:param skip_single_underscores: whether to refrain from importing things that are preceded with
a single underscore. Pertains to modules, as well as items
:param skip_not_having_all: skip module's not having an __all__ entry
:raise AttributeError: module's __all__ contained entry that was not in this module
"""
for importer, modname, is_pkg in pkgutil.walk_packages(path, onerror=lambda x: None):
if recursive and is_pkg:
if modname.startswith('_') and skip_single_underscores:
continue
module = importlib.import_module(package_prefix + '.' + modname)
try:
mod_all = module.__all__
except AttributeError:
if skip_not_having_all:
continue
mod_all = []
if create_all:
module.__all__ = mod_all
import_from([os.path.join(path[0], modname)], package_prefix + '.' + modname, mod_all,
module.__dict__, recursive=recursive,
fail_on_attributerror=fail_on_attributerror, create_all=create_all,
skip_not_having_all=skip_not_having_all,
skip_single_underscores=skip_single_underscores),
locals_[modname] = module
if modname not in all_:
all_.append(modname)
elif not is_pkg:
module = importlib.import_module(package_prefix + '.' + modname)
try:
package_ref = module.__all__
except AttributeError:
warnings.warn('Module %s does not contain __all__, enumerating it instead' %
(package_prefix + '.' + modname,), RuntimeWarning)
package_ref = dir(module)
for item in package_ref:
if item.startswith('_') and skip_single_underscores:
continue
try:
locals_[item] = getattr(module, item)
except AttributeError:
if fail_on_attributerror:
raise
else:
if item not in all_:
all_.append(item) |
Python | def register_metric(cls):
"""
Decorator to register your custom metrics
"""
METRIC_NAMES_TO_CLASSES[cls.CLASS_NAME] = cls
return cls | def register_metric(cls):
"""
Decorator to register your custom metrics
"""
METRIC_NAMES_TO_CLASSES[cls.CLASS_NAME] = cls
return cls |
Python | def remove_cancelled(self) -> None:
"""
Remove it's entries that are CancelledCallbacks and that were cancelled
"""
if not self.has_cancelled_callbacks:
return
with DictDeleter(self.callables) as dd:
for callable_ in dd:
if isinstance(callable_, CancellableCallback) and not callable_:
dd.delete() | def remove_cancelled(self) -> None:
"""
Remove it's entries that are CancelledCallbacks and that were cancelled
"""
if not self.has_cancelled_callbacks:
return
with DictDeleter(self.callables) as dd:
for callable_ in dd:
if isinstance(callable_, CancellableCallback) and not callable_:
dd.delete() |
Python | def wrap_future(future: ResponseFuture) -> Future:
"""
Convert a Cassandra's future to a normal Python future.
The returned future will be marked as running.
future is returned when it's already a Python future.
:param future: cassandra future to wrap
:return: a standard Python future
"""
if isinstance(future, Future):
return future
fut = Future()
fut.set_running_or_notify_cancel()
future.add_callback(lambda result: fut.set_result(result))
future.add_errback(lambda exception: fut.set_exception(exception))
return fut | def wrap_future(future: ResponseFuture) -> Future:
"""
Convert a Cassandra's future to a normal Python future.
The returned future will be marked as running.
future is returned when it's already a Python future.
:param future: cassandra future to wrap
:return: a standard Python future
"""
if isinstance(future, Future):
return future
fut = Future()
fut.set_running_or_notify_cancel()
future.add_callback(lambda result: fut.set_result(result))
future.add_errback(lambda exception: fut.set_exception(exception))
return fut |
Python | def assert_queue(self, queue_name: str) -> None:
"""
Assure that we have a queue with a particular name in the dictionary
"""
if queue_name not in self.subqueues:
with self.subqueue_lock:
if queue_name not in self.subqueues: # double check for locking
self.subqueues[queue_name] = queue.Queue() | def assert_queue(self, queue_name: str) -> None:
"""
Assure that we have a queue with a particular name in the dictionary
"""
if queue_name not in self.subqueues:
with self.subqueue_lock:
if queue_name not in self.subqueues: # double check for locking
self.subqueues[queue_name] = queue.Queue() |
Python | def provide(self) -> dict:
"""
Return your configuration, as a dict
:raise ConfigurationError: on invalid configuration
"""
return {} | def provide(self) -> dict:
"""
Return your configuration, as a dict
:raise ConfigurationError: on invalid configuration
"""
return {} |
Python | def hang_until_sig(extra_signals: tp.Optional[tp.Sequence[int]] = None,
sleep_interval: float = 2) -> None:
"""
Will hang until this process receives SIGTERM or SIGINT.
If you pass extra signal IDs (signal.SIG*) with extra_signals,
then also on those signals this call will release.
Periodic sleeping with polling was chosen as the approach of choice, as pause() seemed
to work a bit shakily multi-platform.
:param extra_signals: a list of extra signals to listen to
:param sleep_interval: amount of time to sleep between checking for termination condition.
"""
global end
extra_signals = extra_signals or ()
old_term = signal.getsignal(signal.SIGTERM)
old_int = signal.getsignal(signal.SIGINT)
olds = []
# Set the signal handler
signal.signal(signal.SIGTERM, __sighandler)
signal.signal(signal.SIGINT, __sighandler)
for s in extra_signals:
olds.append(signal.getsignal(s))
signal.signal(s, __sighandler)
while not end:
sleep(sleep_interval, True)
# Unset the signal handler
signal.signal(signal.SIGTERM, old_term)
signal.signal(signal.SIGINT, old_int)
for s, old_handler in zip(extra_signals, olds):
signal.signal(s, old_handler)
end = False | def hang_until_sig(extra_signals: tp.Optional[tp.Sequence[int]] = None,
sleep_interval: float = 2) -> None:
"""
Will hang until this process receives SIGTERM or SIGINT.
If you pass extra signal IDs (signal.SIG*) with extra_signals,
then also on those signals this call will release.
Periodic sleeping with polling was chosen as the approach of choice, as pause() seemed
to work a bit shakily multi-platform.
:param extra_signals: a list of extra signals to listen to
:param sleep_interval: amount of time to sleep between checking for termination condition.
"""
global end
extra_signals = extra_signals or ()
old_term = signal.getsignal(signal.SIGTERM)
old_int = signal.getsignal(signal.SIGINT)
olds = []
# Set the signal handler
signal.signal(signal.SIGTERM, __sighandler)
signal.signal(signal.SIGINT, __sighandler)
for s in extra_signals:
olds.append(signal.getsignal(s))
signal.signal(s, __sighandler)
while not end:
sleep(sleep_interval, True)
# Unset the signal handler
signal.signal(signal.SIGTERM, old_term)
signal.signal(signal.SIGINT, old_int)
for s, old_handler in zip(extra_signals, olds):
signal.signal(s, old_handler)
end = False |
Python | def jsonify(data: tp.Any) -> tp.Optional[tp.Union[str, int, float, list, dict]]:
"""
Convert any data to a value that's serializable via JSON.
Objects that are JSONAble will have their to_json() method called.
Note that enums will be converted to their value.
As a last resort, str() will be called on the object, and if that fails
it will have repr() called on it
:param data: data to convert to a jsonable
:return: JSON-able data
"""
if data is None:
v = None
elif isinstance(data, (int, float, str)):
v = data
elif isinstance(data, enum.Enum):
v = data.value
elif isinstance(data, JSONAble):
v = jsonify(data.to_json())
elif isinstance(data, tp.Mapping):
new_mapping = {}
for key in data:
new_mapping[jsonify(key)] = jsonify(data[key])
v = new_mapping
elif isinstance(data, (tp.Iterable, tp.Iterator)):
v = [jsonify(elem) for elem in data]
else:
try:
v = str(data)
except TypeError:
v = repr(data)
return v | def jsonify(data: tp.Any) -> tp.Optional[tp.Union[str, int, float, list, dict]]:
"""
Convert any data to a value that's serializable via JSON.
Objects that are JSONAble will have their to_json() method called.
Note that enums will be converted to their value.
As a last resort, str() will be called on the object, and if that fails
it will have repr() called on it
:param data: data to convert to a jsonable
:return: JSON-able data
"""
if data is None:
v = None
elif isinstance(data, (int, float, str)):
v = data
elif isinstance(data, enum.Enum):
v = data.value
elif isinstance(data, JSONAble):
v = jsonify(data.to_json())
elif isinstance(data, tp.Mapping):
new_mapping = {}
for key in data:
new_mapping[jsonify(key)] = jsonify(data[key])
v = new_mapping
elif isinstance(data, (tp.Iterable, tp.Iterator)):
v = [jsonify(elem) for elem in data]
else:
try:
v = str(data)
except TypeError:
v = repr(data)
return v |
Python | def linear_interpolate(series: tp.Sequence[tp.Tuple[K, U]], t: K,
clip: bool = False) -> U:
"""
Given a sorted (ascending) series of (t_value, y_value) interpolating linearly a function of
y=f(t) compute a linear approximation of f at t
of two closest values.
t must be larger or equal to t_min and smaller or equal to t_max
:param series: series of (t, y) sorted by t ascending
:param t: t to compute the value for
:param clip: if set to True, then values t: t<t_min f(t_min) will be returned
and for values t: t>t_max f(t_max) will be returned
:return: return value
:raise ValueError: t was smaller than t_min or greater than t_max
"""
if t < series[0][0]:
if clip:
return series[0][1]
else:
raise ValueError('t smaller than t_min')
elif t > series[-1][0]:
if clip:
return series[-1][1]
else:
raise ValueError('t greater than t_max')
if t == series[0][0]:
v = series[0][1]
else:
i = bisect.bisect_left([y[0] for y in series], t) - 1
if i == len(series) - 1:
v = series[-1][1]
else:
t1, v1 = series[i]
t2, v2 = series[i + 1]
assert t1 <= t <= t2, 'Series not sorted!'
v = (v2 - v1) / (t2 - t1) * (t - t1) + v1
return v | def linear_interpolate(series: tp.Sequence[tp.Tuple[K, U]], t: K,
clip: bool = False) -> U:
"""
Given a sorted (ascending) series of (t_value, y_value) interpolating linearly a function of
y=f(t) compute a linear approximation of f at t
of two closest values.
t must be larger or equal to t_min and smaller or equal to t_max
:param series: series of (t, y) sorted by t ascending
:param t: t to compute the value for
:param clip: if set to True, then values t: t<t_min f(t_min) will be returned
and for values t: t>t_max f(t_max) will be returned
:return: return value
:raise ValueError: t was smaller than t_min or greater than t_max
"""
if t < series[0][0]:
if clip:
return series[0][1]
else:
raise ValueError('t smaller than t_min')
elif t > series[-1][0]:
if clip:
return series[-1][1]
else:
raise ValueError('t greater than t_max')
if t == series[0][0]:
v = series[0][1]
else:
i = bisect.bisect_left([y[0] for y in series], t) - 1
if i == len(series) - 1:
v = series[-1][1]
else:
t1, v1 = series[i]
t2, v2 = series[i + 1]
assert t1 <= t <= t2, 'Series not sorted!'
v = (v2 - v1) / (t2 - t1) * (t - t1) + v1
return v |
Python | def random_word(length: int, choice: tp.Sequence[T] = string.ascii_lowercase,
join_fun: tp.Callable[[tp.List[T]], T] = lambda args: ''.join(args)) -> \
tp.Sequence[T]:
"""
Build and return a random word of provided length.
The word will be built by calling join_fun with length of arguments picked
at random from choice.
Best used with strings. Provide a word length, a string to choose from as choice (defaults
to string.ascii_lowercase). Will return by default a string (which coincidentally
happens to be a sequence of strings, albeit one-character ones).
:param length: length of the word
:param choice: a range of characters to use. By default is string.ascii_lowercase
:param join_fun: an argument to be called with a list of randomly picked values.
Defaults to ''.join(args), so your T must be a string. If you're passing a
different type, remember to alter this function because the default one expects strings!
:return: a random word
"""
return join_fun([random.choice(choice) for _ in range(length)]) | def random_word(length: int, choice: tp.Sequence[T] = string.ascii_lowercase,
join_fun: tp.Callable[[tp.List[T]], T] = lambda args: ''.join(args)) -> \
tp.Sequence[T]:
"""
Build and return a random word of provided length.
The word will be built by calling join_fun with length of arguments picked
at random from choice.
Best used with strings. Provide a word length, a string to choose from as choice (defaults
to string.ascii_lowercase). Will return by default a string (which coincidentally
happens to be a sequence of strings, albeit one-character ones).
:param length: length of the word
:param choice: a range of characters to use. By default is string.ascii_lowercase
:param join_fun: an argument to be called with a list of randomly picked values.
Defaults to ''.join(args), so your T must be a string. If you're passing a
different type, remember to alter this function because the default one expects strings!
:return: a random word
"""
return join_fun([random.choice(choice) for _ in range(length)]) |
Python | def random_binary(length: int) -> bytes:
"""
Return a random bytes string of given length.
An attempt will be made to utilize /dev/random, if exists
:param length: length of string to generate
"""
if os.path.exists('/dev/random'):
with open('/dev/random', 'rb') as f_in:
return f_in.read(length)
else:
return bytes([random.randint(0, 255) for _ in range(length)]) | def random_binary(length: int) -> bytes:
"""
Return a random bytes string of given length.
An attempt will be made to utilize /dev/random, if exists
:param length: length of string to generate
"""
if os.path.exists('/dev/random'):
with open('/dev/random', 'rb') as f_in:
return f_in.read(length)
else:
return bytes([random.randint(0, 255) for _ in range(length)]) |
Python | def shuffle_together(*args: tp.Sequence) -> tp.List[tp.List]:
"""
args, being sequences of equal length, will be permuted in such a way
that their indices will still correspond to each other.
So given:
>>> a = [1, 2, 3]
>>> b = ['a', 'b', 'c']
>>> c = shuffle_together(a, b)
Might equal
>>> c == [[3, 1, 2], ['c', 'a', 'b']]
"""
try:
indices = list(range(len(args[0])))
except IndexError:
return [] # empty array
random.shuffle(indices)
return [[arg[i] for i in indices] for arg in args] | def shuffle_together(*args: tp.Sequence) -> tp.List[tp.List]:
"""
args, being sequences of equal length, will be permuted in such a way
that their indices will still correspond to each other.
So given:
>>> a = [1, 2, 3]
>>> b = ['a', 'b', 'c']
>>> c = shuffle_together(a, b)
Might equal
>>> c == [[3, 1, 2], ['c', 'a', 'b']]
"""
try:
indices = list(range(len(args[0])))
except IndexError:
return [] # empty array
random.shuffle(indices)
return [[arg[i] for i in indices] for arg in args] |
Python | def postfix_with(self, postfix: str) -> 'MetricDataCollection':
"""Postfix every child with given postfix and return self"""
for child in self.values:
child.postfix_with(postfix)
return self | def postfix_with(self, postfix: str) -> 'MetricDataCollection':
"""Postfix every child with given postfix and return self"""
for child in self.values:
child.postfix_with(postfix)
return self |
Python | def start(self) -> None:
"""
Order this timer task to be executed in interval seconds
"""
execute_at = time.monotonic() + self.interval
tbt = TimerBackgroundThread()
with Monitor.acquire(tbt):
tbt.timer_objects.put(execute_at, self) | def start(self) -> None:
"""
Order this timer task to be executed in interval seconds
"""
execute_at = time.monotonic() + self.interval
tbt = TimerBackgroundThread()
with Monitor.acquire(tbt):
tbt.timer_objects.put(execute_at, self) |
Python | def hashables_to_int(words: tp.List[K]) -> tp.Dict[K, int]:
"""
Assign each hashable an integer, starting from 0, and return the resulting mapping
:param words: a list of hashables
:return: a dictionary keyed by hashable and values are the assigned integers
"""
dictionary = {}
i = 0
for word in set(words):
dictionary[word] = i
i += 1
return dictionary | def hashables_to_int(words: tp.List[K]) -> tp.Dict[K, int]:
"""
Assign each hashable an integer, starting from 0, and return the resulting mapping
:param words: a list of hashables
:return: a dictionary keyed by hashable and values are the assigned integers
"""
dictionary = {}
i = 0
for word in set(words):
dictionary[word] = i
i += 1
return dictionary |
Python | def dump_memory_on(output: tp.TextIO = sys.stderr):
"""
Dump statistics about current Python memory usage to target stream.
Each Python object will be printed, along with a breakdown of most types and their total usage.
Make sure you have enough memory to generate a breakdown. You can preallocate something at the
start for example.
.. warning:: This will return size of 0 on PyPy
:param output: output, default is stderr
"""
top_scores = {}
instances = {}
for obj in gc.get_objects():
typ = type(obj)
try:
size = sys.getsizeof(obj)
except TypeError:
size = 0
if typ in top_scores:
top_scores[typ] += size
instances[typ] += 1
else:
top_scores[typ] = size
instances[typ] = 1
output.write('object %s type %s size %s bytes\n' % (repr(obj), typ, size))
output.write('----------------------------------\n')
output.write('Memory usage scores: \n')
output.write('----------------------------------\n')
items = list(top_scores.items())
items.sort(key=lambda y: -y[1])
for typ, tot_size in items:
output.write('%s: %s bytes %s instances\n' % (typ, tot_size, instances[typ])) | def dump_memory_on(output: tp.TextIO = sys.stderr):
"""
Dump statistics about current Python memory usage to target stream.
Each Python object will be printed, along with a breakdown of most types and their total usage.
Make sure you have enough memory to generate a breakdown. You can preallocate something at the
start for example.
.. warning:: This will return size of 0 on PyPy
:param output: output, default is stderr
"""
top_scores = {}
instances = {}
for obj in gc.get_objects():
typ = type(obj)
try:
size = sys.getsizeof(obj)
except TypeError:
size = 0
if typ in top_scores:
top_scores[typ] += size
instances[typ] += 1
else:
top_scores[typ] = size
instances[typ] = 1
output.write('object %s type %s size %s bytes\n' % (repr(obj), typ, size))
output.write('----------------------------------\n')
output.write('Memory usage scores: \n')
output.write('----------------------------------\n')
items = list(top_scores.items())
items.sort(key=lambda y: -y[1])
for typ, tot_size in items:
output.write('%s: %s bytes %s instances\n' % (typ, tot_size, instances[typ])) |
Python | def install_dump_memory_on(signal_number, output: tp.TextIO = sys.stderr):
"""
Instruct Python to dump all frames onto output, along with their local variables
upon receiving given signal
:param signal_number: number of the signal
:param output: output
"""
signal.signal(signal_number,
lambda sig_no, stack_frame: dump_memory_on(output)) | def install_dump_memory_on(signal_number, output: tp.TextIO = sys.stderr):
"""
Instruct Python to dump all frames onto output, along with their local variables
upon receiving given signal
:param signal_number: number of the signal
:param output: output
"""
signal.signal(signal_number,
lambda sig_no, stack_frame: dump_memory_on(output)) |
Python | def merge_dicts(v1: tp.Any, v2: tp.Any) -> tp.Any:
"""
Try to merge two dicts/list together. If key collision is found, value from v2 will be taken.
If the objects aren't dicts or lists, v2 will be returned.
Lists will be concatenated, and dicts updated. v1 will be updated in-place!
"""
if isinstance(v1, dict) and isinstance(v2, dict):
for k in v2.keys():
try:
v1[k] = merge_dicts(v1[k], v2[k])
except KeyError:
v1[k] = v2[k]
return v1
if isinstance(v1, list) and isinstance(v2, list):
v1.extend(v2)
return v1
return v2 | def merge_dicts(v1: tp.Any, v2: tp.Any) -> tp.Any:
"""
Try to merge two dicts/list together. If key collision is found, value from v2 will be taken.
If the objects aren't dicts or lists, v2 will be returned.
Lists will be concatenated, and dicts updated. v1 will be updated in-place!
"""
if isinstance(v1, dict) and isinstance(v2, dict):
for k in v2.keys():
try:
v1[k] = merge_dicts(v1[k], v2[k])
except KeyError:
v1[k] = v2[k]
return v1
if isinstance(v1, list) and isinstance(v2, list):
v1.extend(v2)
return v1
return v2 |
Python | def sync(self, timeout: tp.Optional[float] = None) -> None:
"""
Wait until current tasks are complete.
:param timeout: timeout to wait. None means wait indefinitely.
:raises WouldWaitMore: if timeout has expired
"""
while self.get_queue_length() > 0:
time.sleep(0.1)
def fix():
return
sync_threadpool(self.executor, max_wait=timeout) | def sync(self, timeout: tp.Optional[float] = None) -> None:
"""
Wait until current tasks are complete.
:param timeout: timeout to wait. None means wait indefinitely.
:raises WouldWaitMore: if timeout has expired
"""
while self.get_queue_length() > 0:
time.sleep(0.1)
def fix():
return
sync_threadpool(self.executor, max_wait=timeout) |
Python | def chain(*args) -> tp.Iterator:
"""
Construct an iterator out of provided elements.
If an element is an iterator, or an iterable it will be yielded-from. If it's not, it will
just be yielded.
A cast to iter() is used to determine iteratorness
"""
for elem in args:
try:
yield from iter(elem)
except TypeError:
yield elem | def chain(*args) -> tp.Iterator:
"""
Construct an iterator out of provided elements.
If an element is an iterator, or an iterable it will be yielded-from. If it's not, it will
just be yielded.
A cast to iter() is used to determine iteratorness
"""
for elem in args:
try:
yield from iter(elem)
except TypeError:
yield elem |
Python | def exhaust(iterator: tp.Iterator) -> None:
"""
Iterate till the end of the iterator, discarding values as they go
:param iterator: iterator to exhaust
"""
try:
while True:
next(iterator)
except StopIteration:
pass | def exhaust(iterator: tp.Iterator) -> None:
"""
Iterate till the end of the iterator, discarding values as they go
:param iterator: iterator to exhaust
"""
try:
while True:
next(iterator)
except StopIteration:
pass |
Python | def insert(self, x: float) -> None:
"""
Add a value to the rolling average, discarding the previous entry if the
buffer size is exceeded
:param x: sample to insert
"""
if len(self.queue) >= self.n:
prev_entry = self.queue.popleft()
self.tot_sum -= prev_entry
self.queue.append(x)
self.tot_sum += x | def insert(self, x: float) -> None:
"""
Add a value to the rolling average, discarding the previous entry if the
buffer size is exceeded
:param x: sample to insert
"""
if len(self.queue) >= self.n:
prev_entry = self.queue.popleft()
self.tot_sum -= prev_entry
self.queue.append(x)
self.tot_sum += x |
Python | def sleep(self) -> None:
"""
Called when sleep is expected.
"""
self.sleep_fun(self.counter) | def sleep(self) -> None:
"""
Called when sleep is expected.
"""
self.sleep_fun(self.counter) |
Python | def wait_until_available(self, timeout: tp.Optional[float] = None) -> None:
"""
Waits until the service is available
:param timeout: maximum amount of seconds to wait. If waited more than that,
WouldWaitMore will be raised
:raises WouldWaitMore: waited for timeout and service still was not healthy
"""
with measure(timeout=timeout) as m:
while not m.timeouted:
tn = self.time_until_next_check()
if tn is None:
return
self.condition.wait(timeout=tn, dont_raise=True)
raise WouldWaitMore('timeouted while waiting for service to become healthy') | def wait_until_available(self, timeout: tp.Optional[float] = None) -> None:
"""
Waits until the service is available
:param timeout: maximum amount of seconds to wait. If waited more than that,
WouldWaitMore will be raised
:raises WouldWaitMore: waited for timeout and service still was not healthy
"""
with measure(timeout=timeout) as m:
while not m.timeouted:
tn = self.time_until_next_check()
if tn is None:
return
self.condition.wait(timeout=tn, dont_raise=True)
raise WouldWaitMore('timeouted while waiting for service to become healthy') |
Python | def time_until_next_check(self) -> tp.Optional[float]:
"""Return the time until next health check, or None if the service is healthy"""
if self.unavailable_until is None:
return None
else:
t = time.monotonic()
if t > self.unavailable_until:
self.unavailable_until = None
return None
else:
return self.unavailable_until - t | def time_until_next_check(self) -> tp.Optional[float]:
"""Return the time until next health check, or None if the service is healthy"""
if self.unavailable_until is None:
return None
else:
t = time.monotonic()
if t > self.unavailable_until:
self.unavailable_until = None
return None
else:
return self.unavailable_until - t |
Python | def launch(self, exceptions_on_failed: ExceptionList = Exception,
immediate: bool = False):
"""
A decorator to simplify writing doing-something loops. Basically, this:
>>> eb = ExponentialBackoff(start=2.5, limit=30)
>>> @eb.launch(TypeError)
>>> def do_action(*args, **kwargs):
>>> x_do_action(*args, **kwargs)
>>> do_action(5, test=True)
is equivalent to this:
>>> eb = ExponentialBackoff(start=2.5, limit=30)
>>> while True:
>>> try:
>>> x_do_action(5, test=True)
>>> except TypeError:
>>> eb.failed()
>>> eb.sleep()
The first example with :code:`immediate=True` could skip the last call to do_action,
as it will be executed automatically with zero parameters if immediate=True is set.
:param exceptions_on_failed: a list of a single exception of exceptions
whose raising will signal that fun has failed
:param immediate: immediately execute the function, but return the wrapped function
as a result of this decorator. The function will be called with zero arguments.
:return: a function, that called, will pass the exactly same parameters
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
while True:
try:
r = fun(*args, **kwargs)
self.success()
return r
except exceptions_on_failed:
self.failed()
self.sleep()
if immediate:
inner()
return inner
return outer | def launch(self, exceptions_on_failed: ExceptionList = Exception,
immediate: bool = False):
"""
A decorator to simplify writing doing-something loops. Basically, this:
>>> eb = ExponentialBackoff(start=2.5, limit=30)
>>> @eb.launch(TypeError)
>>> def do_action(*args, **kwargs):
>>> x_do_action(*args, **kwargs)
>>> do_action(5, test=True)
is equivalent to this:
>>> eb = ExponentialBackoff(start=2.5, limit=30)
>>> while True:
>>> try:
>>> x_do_action(5, test=True)
>>> except TypeError:
>>> eb.failed()
>>> eb.sleep()
The first example with :code:`immediate=True` could skip the last call to do_action,
as it will be executed automatically with zero parameters if immediate=True is set.
:param exceptions_on_failed: a list of a single exception of exceptions
whose raising will signal that fun has failed
:param immediate: immediately execute the function, but return the wrapped function
as a result of this decorator. The function will be called with zero arguments.
:return: a function, that called, will pass the exactly same parameters
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
while True:
try:
r = fun(*args, **kwargs)
self.success()
return r
except exceptions_on_failed:
self.failed()
self.sleep()
if immediate:
inner()
return inner
return outer |
Python | def load_source_from_list(obj: list) -> 'sources.MergingSource':
"""
Builds a MergingSource from dict-ed objects
"""
return sources.MergingSource(*map(load_source_from_dict, obj)) | def load_source_from_list(obj: list) -> 'sources.MergingSource':
"""
Builds a MergingSource from dict-ed objects
"""
return sources.MergingSource(*map(load_source_from_dict, obj)) |
Python | def call_in_future(executor: Executor, function: tp.Callable,
*args, **kwargs) -> tp.Callable[[], 'Future']:
"""
Return a callable, whose calling will schedule function to be executed on a target Executor.
The returned function will accept any number of arguments and keyword arguments, but will simply
ignore them.
:param executor: executor to run at
:param function: callable to schedule
:param args: arguments to provide to the callable
:param kwargs: keyword arguments to provide to the callable
:return: a callable, calling which will schedule function to run at executor. Calling this callable
will return the Future for that function
"""
def inner(*my_args, **my_kwargs):
return wrap_if(executor.submit(function, *args, **kwargs))
return inner | def call_in_future(executor: Executor, function: tp.Callable,
*args, **kwargs) -> tp.Callable[[], 'Future']:
"""
Return a callable, whose calling will schedule function to be executed on a target Executor.
The returned function will accept any number of arguments and keyword arguments, but will simply
ignore them.
:param executor: executor to run at
:param function: callable to schedule
:param args: arguments to provide to the callable
:param kwargs: keyword arguments to provide to the callable
:return: a callable, calling which will schedule function to run at executor. Calling this callable
will return the Future for that function
"""
def inner(*my_args, **my_kwargs):
return wrap_if(executor.submit(function, *args, **kwargs))
return inner |
Python | def transform_result(expr: str):
"""
A decorator transforming the result value of a function by a Python expression.
The result is feeded as the local variable "x", while arguments are fed as if they were
expressed as arguments, eg:
>>> @transform_result('x*a')
>>> def square(a):
>>> return a
:param expr: Python string expression
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
a = fun(*args, **kwargs)
local = get_arguments(fun, *args, *kwargs)
local['x'] = a
return eval(expr, globals(), local)
return inner
return outer | def transform_result(expr: str):
"""
A decorator transforming the result value of a function by a Python expression.
The result is feeded as the local variable "x", while arguments are fed as if they were
expressed as arguments, eg:
>>> @transform_result('x*a')
>>> def square(a):
>>> return a
:param expr: Python string expression
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
a = fun(*args, **kwargs)
local = get_arguments(fun, *args, *kwargs)
local['x'] = a
return eval(expr, globals(), local)
return inner
return outer |
Python | def transform_arguments(**expressions: str):
"""
A decorator transforming the arguments of a function prior to it's execution.
The arguments are always bound as if they were available in the function.
The expressions always operate on "old" arguments
>>> @transform_arguments(a='a*a')
>>> def square(a):
>>> return a
:param expressions: Python strings that are meant to be evaluated
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
old_args = get_arguments(fun, *args, *kwargs)
new_args = {}
for arg, arg_value in expressions.items():
new_args[arg] = eval(arg_value, globals(), old_args)
for new_arg in old_args:
if new_arg not in new_args:
new_args[new_arg] = old_args[new_arg]
return call_with_arguments(fun, new_args)
return inner
return outer | def transform_arguments(**expressions: str):
"""
A decorator transforming the arguments of a function prior to it's execution.
The arguments are always bound as if they were available in the function.
The expressions always operate on "old" arguments
>>> @transform_arguments(a='a*a')
>>> def square(a):
>>> return a
:param expressions: Python strings that are meant to be evaluated
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
old_args = get_arguments(fun, *args, *kwargs)
new_args = {}
for arg, arg_value in expressions.items():
new_args[arg] = eval(arg_value, globals(), old_args)
for new_arg in old_args:
if new_arg not in new_args:
new_args[new_arg] = old_args[new_arg]
return call_with_arguments(fun, new_args)
return inner
return outer |
Python | def replace_argument_if(arg_name: str,
structure: tp.Union[dict, list, tuple, PredicateClass],
instance_of: tp.Optional[tp.Union[tp.Type, tp.Tuple[tp.Type, ...]]] = None,
predicate: tp.Optional[Predicate] = None,
):
"""
Examine arguments of the callable that will be decorated with this.
If argument arg_name is found to be an instance of instance_of, it will be replaced
by a structure defined a structure.
:param arg_name: argument to replace
:param instance_of: type
:param predicate: alternative condition of replacement. If this is given,
predicate is called on the value of the argument and replacement is done
if it returns True
:param structure: a callable that takes original argument and returns new, or a
structure made of these
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
args_dict = get_arguments(fun, *args, **kwargs)
altered = False
if arg_name in args_dict:
v = args_dict[arg_name]
if predicate is not None:
args_dict[arg_name] = build_structure(structure, v)
altered = True
elif isinstance(v, instance_of):
args_dict[arg_name] = build_structure(structure, v)
altered = True
if altered:
return call_with_arguments(fun, args_dict)
else:
return fun(*args, **kwargs)
return inner
return outer | def replace_argument_if(arg_name: str,
structure: tp.Union[dict, list, tuple, PredicateClass],
instance_of: tp.Optional[tp.Union[tp.Type, tp.Tuple[tp.Type, ...]]] = None,
predicate: tp.Optional[Predicate] = None,
):
"""
Examine arguments of the callable that will be decorated with this.
If argument arg_name is found to be an instance of instance_of, it will be replaced
by a structure defined a structure.
:param arg_name: argument to replace
:param instance_of: type
:param predicate: alternative condition of replacement. If this is given,
predicate is called on the value of the argument and replacement is done
if it returns True
:param structure: a callable that takes original argument and returns new, or a
structure made of these
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
args_dict = get_arguments(fun, *args, **kwargs)
altered = False
if arg_name in args_dict:
v = args_dict[arg_name]
if predicate is not None:
args_dict[arg_name] = build_structure(structure, v)
altered = True
elif isinstance(v, instance_of):
args_dict[arg_name] = build_structure(structure, v)
altered = True
if altered:
return call_with_arguments(fun, args_dict)
else:
return fun(*args, **kwargs)
return inner
return outer |
Python | def auto_adapt_to_methods(decorator):
"""
Allows you to use the same decorator on methods and functions,
hiding the self argument from the decorator.
Usage:
>>> @auto_adapt_to_methods
>>> def times_two(fun):
>>> def outer(a):
>>> return fun(a*2)
>>> return outer
>>> class Test:
>>> @times_two
>>> def twice(self, a):
>>> return a*2
>>> @times_two
>>> def twice(a):
>>> return a*2
>>> assert Test().twice(2) == 4
>>> assert twice(2) == 4
"""
def adapt(func):
return _MethodDecoratorAdaptor(decorator, func)
return adapt | def auto_adapt_to_methods(decorator):
"""
Allows you to use the same decorator on methods and functions,
hiding the self argument from the decorator.
Usage:
>>> @auto_adapt_to_methods
>>> def times_two(fun):
>>> def outer(a):
>>> return fun(a*2)
>>> return outer
>>> class Test:
>>> @times_two
>>> def twice(self, a):
>>> return a*2
>>> @times_two
>>> def twice(a):
>>> return a*2
>>> assert Test().twice(2) == 4
>>> assert twice(2) == 4
"""
def adapt(func):
return _MethodDecoratorAdaptor(decorator, func)
return adapt |
Python | def attach_arguments(*args, **kwargs):
"""
Return a decorator that passes extra arguments to the function.
Example:
>>> @attach_arguments(2, label='value')
>>> def print_args(*args, **kwargs):
>>> print(args, kwargs)
>>> print_args(3, 4, key='value')
will print
>>> (3, 4, 2) {'key': 'value', 'label': 'value'}
Arguments given in attach_arguments will take precedence in case of key collisions.
"""
def outer(fun):
@wraps(fun)
def inner(*my_args, **my_kwargs):
my_kwargs.update(kwargs)
return fun(*my_args, *args, **my_kwargs)
return inner
return outer | def attach_arguments(*args, **kwargs):
"""
Return a decorator that passes extra arguments to the function.
Example:
>>> @attach_arguments(2, label='value')
>>> def print_args(*args, **kwargs):
>>> print(args, kwargs)
>>> print_args(3, 4, key='value')
will print
>>> (3, 4, 2) {'key': 'value', 'label': 'value'}
Arguments given in attach_arguments will take precedence in case of key collisions.
"""
def outer(fun):
@wraps(fun)
def inner(*my_args, **my_kwargs):
my_kwargs.update(kwargs)
return fun(*my_args, *args, **my_kwargs)
return inner
return outer |
Python | def copy_arguments(deep_copy: bool = False) -> tp.Callable:
"""
Make every argument passe to this function be copied.
This way you can pass dictionaries to the function that would normally have modified them.
Use like this:
>>> @copy_arguments()
>>> def alter_dict(dct: dict)
>>> return dct.pop('a')
Now you can use it like this:
>>> b = {'a': 5}
>>> assert alter_dict(b) == 5
>>> assert b == {'a': 5}
:param deep_copy: whether to use deepcopy instead of a plain copy
"""
f_copy = copy.deepcopy if deep_copy else copy.copy
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
args = tuple(f_copy(arg) for arg in args)
kwargs = {name: f_copy(value) for name, value in kwargs.items()}
return fun(*args, **kwargs)
return inner
return outer | def copy_arguments(deep_copy: bool = False) -> tp.Callable:
"""
Make every argument passe to this function be copied.
This way you can pass dictionaries to the function that would normally have modified them.
Use like this:
>>> @copy_arguments()
>>> def alter_dict(dct: dict)
>>> return dct.pop('a')
Now you can use it like this:
>>> b = {'a': 5}
>>> assert alter_dict(b) == 5
>>> assert b == {'a': 5}
:param deep_copy: whether to use deepcopy instead of a plain copy
"""
f_copy = copy.deepcopy if deep_copy else copy.copy
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
args = tuple(f_copy(arg) for arg in args)
kwargs = {name: f_copy(value) for name, value in kwargs.items()}
return fun(*args, **kwargs)
return inner
return outer |
Python | def for_argument(*t_ops: ForArgumentArg, **t_kwops: ForArgumentArg):
"""
Calls a callable for each of the arguments. Pass None if you do not wish to process given
argument.
returns is a special keyword, a callable to process the result through
Use like:
>>> @for_argument(int, str, typed=bool, returns=int)
>>> def check(val1, val2, typed='True'):
>>> if typed:
>>> return val1 + int(val2)
for_argument can also accept strings as expressions:
>>> @for_argument('x*2')
>>> def accept_two(x):
>>> assert x == 2
>>> accept_two(1)
for_argument will also recognize default values:
>>> @for_argument(k=int)
>>> def for_arg(k='5')
>>> print(repr(k))
>>> for_arg()
will print `5` instead of `'5'`.
Note that for_argument is quite slow when it comes to having default values
in the function signature. Best to avoid it if you need speed.
If it detects that the function that you passed does not use default values,
it will use the faster implementation.
"""
new_t_ops = []
for op in t_ops:
if op == 'self':
new_t_ops.append(_NOP)
elif op is None:
new_t_ops.append(_NOP)
elif isinstance(op, str):
new_t_ops.append(source_to_function(op))
else:
new_t_ops.append(op)
t_ops = new_t_ops
returns = t_kwops.pop('returns', _NOP)
for key, value in t_kwops.items():
if value is None:
t_kwops[key] = _NOP
elif isinstance(value, str):
t_kwops[key] = source_to_function(value)
def outer(fun):
comparison = False
# Check whether this function has any default arguments
for param in inspect.signature(fun).parameters.values():
try:
if Parameter.empty != param.default:
comparison = True
break
except (AttributeError, TypeError):
comparison = True
break
if comparison:
@wraps(fun)
def inner(*args, **kwargs):
dict_operations = _get_arguments(fun, True, *t_ops, **t_kwops)
dict_values = get_arguments(fun, *args, **kwargs)
arguments = {}
for arg_name in dict_values:
v = dict_values[arg_name]
if arg_name in dict_operations:
f = dict_operations[arg_name]
if callable(f) and f is not None:
v = f(v)
arguments[arg_name] = v
return returns(call_with_arguments(fun, arguments))
else:
@wraps(fun)
def inner(*args, **kwargs):
# add extra 'None' argument if unbound method
assert len(args) >= len(t_ops)
a = fun(*((_NOP if op2 is None else op2)(arg) for arg, op2 in
itertools.zip_longest(args, t_ops, fillvalue=None)),
**{k: t_kwops.get(k, _NOP)(v) for k, v in kwargs.items()})
return returns(a)
return inner
return outer | def for_argument(*t_ops: ForArgumentArg, **t_kwops: ForArgumentArg):
"""
Calls a callable for each of the arguments. Pass None if you do not wish to process given
argument.
returns is a special keyword, a callable to process the result through
Use like:
>>> @for_argument(int, str, typed=bool, returns=int)
>>> def check(val1, val2, typed='True'):
>>> if typed:
>>> return val1 + int(val2)
for_argument can also accept strings as expressions:
>>> @for_argument('x*2')
>>> def accept_two(x):
>>> assert x == 2
>>> accept_two(1)
for_argument will also recognize default values:
>>> @for_argument(k=int)
>>> def for_arg(k='5')
>>> print(repr(k))
>>> for_arg()
will print `5` instead of `'5'`.
Note that for_argument is quite slow when it comes to having default values
in the function signature. Best to avoid it if you need speed.
If it detects that the function that you passed does not use default values,
it will use the faster implementation.
"""
new_t_ops = []
for op in t_ops:
if op == 'self':
new_t_ops.append(_NOP)
elif op is None:
new_t_ops.append(_NOP)
elif isinstance(op, str):
new_t_ops.append(source_to_function(op))
else:
new_t_ops.append(op)
t_ops = new_t_ops
returns = t_kwops.pop('returns', _NOP)
for key, value in t_kwops.items():
if value is None:
t_kwops[key] = _NOP
elif isinstance(value, str):
t_kwops[key] = source_to_function(value)
def outer(fun):
comparison = False
# Check whether this function has any default arguments
for param in inspect.signature(fun).parameters.values():
try:
if Parameter.empty != param.default:
comparison = True
break
except (AttributeError, TypeError):
comparison = True
break
if comparison:
@wraps(fun)
def inner(*args, **kwargs):
dict_operations = _get_arguments(fun, True, *t_ops, **t_kwops)
dict_values = get_arguments(fun, *args, **kwargs)
arguments = {}
for arg_name in dict_values:
v = dict_values[arg_name]
if arg_name in dict_operations:
f = dict_operations[arg_name]
if callable(f) and f is not None:
v = f(v)
arguments[arg_name] = v
return returns(call_with_arguments(fun, arguments))
else:
@wraps(fun)
def inner(*args, **kwargs):
# add extra 'None' argument if unbound method
assert len(args) >= len(t_ops)
a = fun(*((_NOP if op2 is None else op2)(arg) for arg, op2 in
itertools.zip_longest(args, t_ops, fillvalue=None)),
**{k: t_kwops.get(k, _NOP)(v) for k, v in kwargs.items()})
return returns(a)
return inner
return outer |
Python | def execute_if_attribute_none(attribute: str):
"""
Decorator for instancemethods.
This will execute the function only if provided attribute is None.
Otherwise it will return a None
:param attribute: name of the attribute to check
"""
def outer(fun):
@wraps(fun)
def inner(self, *args, **kwargs):
if getattr(self, attribute) is None:
return fun(self, *args, **kwargs)
return inner
return outer | def execute_if_attribute_none(attribute: str):
"""
Decorator for instancemethods.
This will execute the function only if provided attribute is None.
Otherwise it will return a None
:param attribute: name of the attribute to check
"""
def outer(fun):
@wraps(fun)
def inner(self, *args, **kwargs):
if getattr(self, attribute) is None:
return fun(self, *args, **kwargs)
return inner
return outer |
Python | def execute_if_attribute_not_none(attribute: str):
"""
Decorator for instancemethods.
This will execute the function only if provided attribute is not None.
Otherwise it will return a None
:param attribute: name of the attribute to check
"""
def outer(fun):
@wraps(fun)
def inner(self, *args, **kwargs):
if getattr(self, attribute) is not None:
return fun(self, *args, **kwargs)
return inner
return outer | def execute_if_attribute_not_none(attribute: str):
"""
Decorator for instancemethods.
This will execute the function only if provided attribute is not None.
Otherwise it will return a None
:param attribute: name of the attribute to check
"""
def outer(fun):
@wraps(fun)
def inner(self, *args, **kwargs):
if getattr(self, attribute) is not None:
return fun(self, *args, **kwargs)
return inner
return outer |
Python | def cached_property(prop_name: str, assume_not_loaded = None):
"""
A decorator to use to create cached properties.
You job is to only write the value returner. If the value is
currently assume_not_loaded (None by default) your property
method will be called. Otherwise it will be served from
cached attribute, whose value you provide as parameter.
Use as follows:
>>> class Example:
>>> def __init__(self):
>>> self._a = None
>>> @property
>>> @cached_property('_a')
>>> def a(self) -> str:
>>> return 'abc'
>>> a = Example()
>>> assert a.a == 'abc'
>>> assert a._a == 'abc'
:param prop_name: Name of property to store the value in
:param assume_not_loaded: Value if currently the attribute is
equal to this, it is assumed to not have been loaded
"""
def outer(fun):
@wraps(fun)
def inner(self, *args, **kwargs):
attr_v = getattr(self, prop_name)
if attr_v == assume_not_loaded:
attr_v = fun(self, *args, **kwargs)
setattr(self, prop_name, attr_v)
return attr_v
else:
return attr_v
return inner
return outer | def cached_property(prop_name: str, assume_not_loaded = None):
"""
A decorator to use to create cached properties.
You job is to only write the value returner. If the value is
currently assume_not_loaded (None by default) your property
method will be called. Otherwise it will be served from
cached attribute, whose value you provide as parameter.
Use as follows:
>>> class Example:
>>> def __init__(self):
>>> self._a = None
>>> @property
>>> @cached_property('_a')
>>> def a(self) -> str:
>>> return 'abc'
>>> a = Example()
>>> assert a.a == 'abc'
>>> assert a._a == 'abc'
:param prop_name: Name of property to store the value in
:param assume_not_loaded: Value if currently the attribute is
equal to this, it is assumed to not have been loaded
"""
def outer(fun):
@wraps(fun)
def inner(self, *args, **kwargs):
attr_v = getattr(self, prop_name)
if attr_v == assume_not_loaded:
attr_v = fun(self, *args, **kwargs)
setattr(self, prop_name, attr_v)
return attr_v
else:
return attr_v
return inner
return outer |
Python | def daemonize(exit_via: tp.Callable = sys.exit,
redirect_std_to_devnull: bool = True,
uid: tp.Optional[int] = None,
gid: tp.Optional[int] = None):
"""
Make this process into a daemon.
This entails:
- umask 0
- forks twice
- be the child of init
- becomes session leader
- changes root directory to /
- closes stdin, stdout, stderr
- (option) redirects stdin, stdout, stderr to /dev/null
Refer - "Advanced Programming in the UNIX Environment" 13.3
:param exit_via: callable used to terminate process
:param redirect_std_to_devnull: whether to redirect stdin, stdout and
stderr to /dev/null
:param uid: User to set (via seteuid). Default - this won't be done. You
can pass either user name as string or UID.
:param gid: Same as UID, but for groups. These will be resolved too.
:raises KeyError: uid/gid was passed as string, but getpwnam() failed
:raises OSError: platform is Windows
"""
if sys.platform.startswith('win'):
raise OSError('Cannot call daemonize on Windows!')
_double_fork(exit_via=exit_via)
_close_descriptors()
if redirect_std_to_devnull:
_redirect_descriptors_to_null()
_parse_ug(uid, pwd, 'pw_uid', os.seteuid)
_parse_ug(gid, grp, 'gr_gid', os.setegid) | def daemonize(exit_via: tp.Callable = sys.exit,
redirect_std_to_devnull: bool = True,
uid: tp.Optional[int] = None,
gid: tp.Optional[int] = None):
"""
Make this process into a daemon.
This entails:
- umask 0
- forks twice
- be the child of init
- becomes session leader
- changes root directory to /
- closes stdin, stdout, stderr
- (option) redirects stdin, stdout, stderr to /dev/null
Refer - "Advanced Programming in the UNIX Environment" 13.3
:param exit_via: callable used to terminate process
:param redirect_std_to_devnull: whether to redirect stdin, stdout and
stderr to /dev/null
:param uid: User to set (via seteuid). Default - this won't be done. You
can pass either user name as string or UID.
:param gid: Same as UID, but for groups. These will be resolved too.
:raises KeyError: uid/gid was passed as string, but getpwnam() failed
:raises OSError: platform is Windows
"""
if sys.platform.startswith('win'):
raise OSError('Cannot call daemonize on Windows!')
_double_fork(exit_via=exit_via)
_close_descriptors()
if redirect_std_to_devnull:
_redirect_descriptors_to_null()
_parse_ug(uid, pwd, 'pw_uid', os.seteuid)
_parse_ug(gid, grp, 'gr_gid', os.setegid) |
Python | def sleep(y: tp.Union[str, float], abort_on_interrupt: bool = False) -> bool:
"""
Sleep for given interval.
This won't be interrupted by KeyboardInterrupted, and always will sleep for given time interval.
This will return at once if x is negative
:param y: the interval to wait in seconds, can be also a time string
:param abort_on_interrupt: whether to abort at once when KeyboardInterrupt is seen
:returns: whether the function has completed its sleep naturally. False is seen on
aborts thanks to KeyboardInterrupt only if abort_on_interrupt is True
"""
y = parse_time_string(y)
if y < 0:
return
with measure() as measurement:
while measurement() < y:
try:
time.sleep(y - measurement())
except KeyboardInterrupt:
if abort_on_interrupt:
return False
return True | def sleep(y: tp.Union[str, float], abort_on_interrupt: bool = False) -> bool:
"""
Sleep for given interval.
This won't be interrupted by KeyboardInterrupted, and always will sleep for given time interval.
This will return at once if x is negative
:param y: the interval to wait in seconds, can be also a time string
:param abort_on_interrupt: whether to abort at once when KeyboardInterrupt is seen
:returns: whether the function has completed its sleep naturally. False is seen on
aborts thanks to KeyboardInterrupt only if abort_on_interrupt is True
"""
y = parse_time_string(y)
if y < 0:
return
with measure() as measurement:
while measurement() < y:
try:
time.sleep(y - measurement())
except KeyboardInterrupt:
if abort_on_interrupt:
return False
return True |
Python | def choose(filter_fun: Predicate[T], iterable: Iteratable,
check_multiple: bool = False) -> T:
"""
Return a single value that exists in given iterable.
Essentially the same as:
>>> next(iter(filter(filter_fun, iterable)))
but raises a different exception if nothing matches (and if there are multiple matches
and check_multiple is True).
If check_multiple is True this guarantees to exhaust the generator (if passed).
:param filter_fun: function that returns bool on the single value
:param iterable: iterable to examine
:param check_multiple: if True, this will check if there are multiple entries matching
filter_fun, and will raise ValueError if so. If True, this will exhaust the iterator.
If left at default, False, this may not exhaust the iterator.
:return: single element in the iterable that matches given input
:raises ValueError: on multiple elements matching (if check_multiple), or none at all
"""
elem_candidate = None
found = False
for elem in iterable:
if filter_fun(elem):
if not check_multiple:
return elem
if found:
raise ValueError(
'Multiple values (%s, %s) seen' % (repr(elem_candidate), repr(elem)))
elem_candidate = elem
found = True
if not found:
raise ValueError('No elements matching given filter seen')
return elem_candidate | def choose(filter_fun: Predicate[T], iterable: Iteratable,
check_multiple: bool = False) -> T:
"""
Return a single value that exists in given iterable.
Essentially the same as:
>>> next(iter(filter(filter_fun, iterable)))
but raises a different exception if nothing matches (and if there are multiple matches
and check_multiple is True).
If check_multiple is True this guarantees to exhaust the generator (if passed).
:param filter_fun: function that returns bool on the single value
:param iterable: iterable to examine
:param check_multiple: if True, this will check if there are multiple entries matching
filter_fun, and will raise ValueError if so. If True, this will exhaust the iterator.
If left at default, False, this may not exhaust the iterator.
:return: single element in the iterable that matches given input
:raises ValueError: on multiple elements matching (if check_multiple), or none at all
"""
elem_candidate = None
found = False
for elem in iterable:
if filter_fun(elem):
if not check_multiple:
return elem
if found:
raise ValueError(
'Multiple values (%s, %s) seen' % (repr(elem_candidate), repr(elem)))
elem_candidate = elem
found = True
if not found:
raise ValueError('No elements matching given filter seen')
return elem_candidate |
Python | def remove(self, other: T) -> None:
"""
Remove an element from the list
:param other: element to remove
:raises ValueError: element not in list
"""
index = self.items.index(other) # throws ValueError
del self.items[index]
del self.keys[index] | def remove(self, other: T) -> None:
"""
Remove an element from the list
:param other: element to remove
:raises ValueError: element not in list
"""
index = self.items.index(other) # throws ValueError
del self.items[index]
del self.keys[index] |
Python | def add(self, other: T) -> int:
"""
Add an element. Returns the index at which it was inserted.
:param other: element to insert
:return: index that the entry is available now at
"""
key_value = self.key(other)
for index in range(len(self.keys)):
if key_value <= self.keys[index]:
break
else:
index = len(self.keys)
self.items.insert(index, other)
self.keys.insert(index, key_value)
return index | def add(self, other: T) -> int:
"""
Add an element. Returns the index at which it was inserted.
:param other: element to insert
:return: index that the entry is available now at
"""
key_value = self.key(other)
for index in range(len(self.keys)):
if key_value <= self.keys[index]:
break
else:
index = len(self.keys)
self.items.insert(index, other)
self.keys.insert(index, key_value)
return index |
Python | def SingletonWithRegardsTo(num_args: int):
"""
Make a memoized singletion depending on the arguments.
A dictionary is made (first N arguments => class instance) and such is returned.
Please take care to ensure that a tuple made out of first num_args can be used as a dictionary
key (ie. is both hashable and __eq__-able).
Usage:
>>> @SingletonWithRegardsTo(num_args=1)
>>> class MyClass:
>>> def __init__(self, device_id: str):
>>> ...
>>> a = MyClass('dev1')
>>> b = MyClass('dev2')
>>> c = MyClass('dev1')
>>> assert a is c
>>> assert b is not c
"""
def inner(cls):
cls.__new_old__ = cls.__new__
@wraps(cls.__new__)
def singleton_new(cls, *args, **kw):
it = cls.__dict__.get('__it__')
if it is None:
it = cls.__it__ = {}
key = args[:num_args]
if key in it:
return it[key]
inst = it[key] = cls.__new_old__(cls)
inst.__init_old__(*args, **kw)
return inst
cls.__new__ = singleton_new
cls.__init_old__ = cls.__init__
cls.__init__ = wraps(cls.__init__)(
lambda self, *args, **kwargs: object.__init__(self))
return cls
return inner | def SingletonWithRegardsTo(num_args: int):
"""
Make a memoized singletion depending on the arguments.
A dictionary is made (first N arguments => class instance) and such is returned.
Please take care to ensure that a tuple made out of first num_args can be used as a dictionary
key (ie. is both hashable and __eq__-able).
Usage:
>>> @SingletonWithRegardsTo(num_args=1)
>>> class MyClass:
>>> def __init__(self, device_id: str):
>>> ...
>>> a = MyClass('dev1')
>>> b = MyClass('dev2')
>>> c = MyClass('dev1')
>>> assert a is c
>>> assert b is not c
"""
def inner(cls):
cls.__new_old__ = cls.__new__
@wraps(cls.__new__)
def singleton_new(cls, *args, **kw):
it = cls.__dict__.get('__it__')
if it is None:
it = cls.__it__ = {}
key = args[:num_args]
if key in it:
return it[key]
inst = it[key] = cls.__new_old__(cls)
inst.__init_old__(*args, **kw)
return inst
cls.__new__ = singleton_new
cls.__init_old__ = cls.__init__
cls.__init__ = wraps(cls.__init__)(
lambda self, *args, **kwargs: object.__init__(self))
return cls
return inner |
Python | def delete_singleton_for(x, *args) -> None:
"""
Delete singleton for given arguments in a class decorated with SingletonWithRegardsTo
:param x: class decorated with SingletonWithRegardsTo
:param args: arguments used in the constructor
"""
del x.__it__[args] | def delete_singleton_for(x, *args) -> None:
"""
Delete singleton for given arguments in a class decorated with SingletonWithRegardsTo
:param x: class decorated with SingletonWithRegardsTo
:param args: arguments used in the constructor
"""
del x.__it__[args] |
Python | def descriptor_from_dict(dct: dict) -> Descriptor:
"""
Giving a Python dictionary-defined schema of the configuration, return a Descriptor-based one
:param dct: something like
{
"a": "int",
"b": "str",
"c": {
"type": "int"
"optional": True,
"default": 5
},
"d": {
"a": "int",
"b": "str"
}
}
although you can pass "int", "float" and "str" without enclosing quotes, that will work too
:return: a Descriptor-based schema
"""
fields = []
for key, value in dct.items():
descriptor = _get_descriptor_for(key, value)
fields.append(descriptor)
return Dict(fields) | def descriptor_from_dict(dct: dict) -> Descriptor:
"""
Giving a Python dictionary-defined schema of the configuration, return a Descriptor-based one
:param dct: something like
{
"a": "int",
"b": "str",
"c": {
"type": "int"
"optional": True,
"default": 5
},
"d": {
"a": "int",
"b": "str"
}
}
although you can pass "int", "float" and "str" without enclosing quotes, that will work too
:return: a Descriptor-based schema
"""
fields = []
for key, value in dct.items():
descriptor = _get_descriptor_for(key, value)
fields.append(descriptor)
return Dict(fields) |
Python | def frame_from_traceback(tb: types.TracebackType) -> types.FrameType:
"""
Extract the bottom stack frame from a traceback
:param tb: traceback to extract the frame
:return: bottom stack frame
"""
while tb.tb_next:
tb = tb.tb_next
return tb.tb_frame | def frame_from_traceback(tb: types.TracebackType) -> types.FrameType:
"""
Extract the bottom stack frame from a traceback
:param tb: traceback to extract the frame
:return: bottom stack frame
"""
while tb.tb_next:
tb = tb.tb_next
return tb.tb_frame |
Python | def from_pickle(cls, pick: tp.Union[io.BytesIO, bytes]) -> 'Traceback':
"""
Load a traceback from a pickle
:param pick: either bytes or a BytesIO to load it from
:return: previously serialized Traceback
:raises ValueError: unserialized object is not a Traceback!
"""
if isinstance(pick, io.BytesIO):
a = pickle.load(pick)
else:
a = pickle.loads(pick)
if not isinstance(a, Traceback):
raise ValueError('%s is not a traceback!' % (type(a),))
return a | def from_pickle(cls, pick: tp.Union[io.BytesIO, bytes]) -> 'Traceback':
"""
Load a traceback from a pickle
:param pick: either bytes or a BytesIO to load it from
:return: previously serialized Traceback
:raises ValueError: unserialized object is not a Traceback!
"""
if isinstance(pick, io.BytesIO):
a = pickle.load(pick)
else:
a = pickle.loads(pick)
if not isinstance(a, Traceback):
raise ValueError('%s is not a traceback!' % (type(a),))
return a |
Python | def pretty_format(self) -> str:
"""
Return a multi-line, pretty-printed representation of all exception
data.
:return: text
"""
bio = io.StringIO()
self.pretty_print(bio)
return bio.getvalue() | def pretty_format(self) -> str:
"""
Return a multi-line, pretty-printed representation of all exception
data.
:return: text
"""
bio = io.StringIO()
self.pretty_print(bio)
return bio.getvalue() |
Python | def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
return self.executor.shutdown(wait=wait) | def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
return self.executor.shutdown(wait=wait) |
Python | def call_in_separate_thread(*t_args, no_thread_attribute: bool = False,
delay: float = 0, **t_kwargs):
"""
Decorator to mark given routine as callable in a separate thread.
The decorated routine will return a Future that is waitable to get the result
(or the exception) of the function.
The returned Future will have an extra attribute, "thread" that is
thread that was spawned for it. The returned thread will in turn
have an attribute "future" that links to this future.
.. warning:: calling this will cause reference loops, so don't use it
if you've disabled Python GC, or in that case enable
the no_thread_attribute argument
The arguments given here will be passed to thread's constructor, so use like:
:param no_thread_attribute: if set to True, future won't have a link returned to
it's thread. The thread will have attribute of "future" anyways.
:param delay: seconds to wait before launching function
>>> @call_in_separate_thread(daemon=True)
>>> def handle_messages():
>>> while True:
>>> ...
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs) -> Future:
class MyThread(threading.Thread):
def __init__(self):
self.future = Future()
if not no_thread_attribute:
self.future.thread = self
super().__init__(*t_args, **t_kwargs)
def run(self):
if not self.future.set_running_or_notify_cancel():
return
if delay:
time.sleep(delay)
try:
res = fun(*args, **kwargs)
self.future.set_result(res)
except Exception as e:
self.future.set_exception(e)
t = MyThread()
t.start()
return t.future
return inner
return outer | def call_in_separate_thread(*t_args, no_thread_attribute: bool = False,
delay: float = 0, **t_kwargs):
"""
Decorator to mark given routine as callable in a separate thread.
The decorated routine will return a Future that is waitable to get the result
(or the exception) of the function.
The returned Future will have an extra attribute, "thread" that is
thread that was spawned for it. The returned thread will in turn
have an attribute "future" that links to this future.
.. warning:: calling this will cause reference loops, so don't use it
if you've disabled Python GC, or in that case enable
the no_thread_attribute argument
The arguments given here will be passed to thread's constructor, so use like:
:param no_thread_attribute: if set to True, future won't have a link returned to
it's thread. The thread will have attribute of "future" anyways.
:param delay: seconds to wait before launching function
>>> @call_in_separate_thread(daemon=True)
>>> def handle_messages():
>>> while True:
>>> ...
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs) -> Future:
class MyThread(threading.Thread):
def __init__(self):
self.future = Future()
if not no_thread_attribute:
self.future.thread = self
super().__init__(*t_args, **t_kwargs)
def run(self):
if not self.future.set_running_or_notify_cancel():
return
if delay:
time.sleep(delay)
try:
res = fun(*args, **kwargs)
self.future.set_result(res)
except Exception as e:
self.future.set_exception(e)
t = MyThread()
t.start()
return t.future
return inner
return outer |
Python | def wait(self, timeout: tp.Optional[tp.Union[str, float]] = None,
dont_raise: bool = False) -> None:
"""
Wait for condition to become true.
:param timeout: timeout to wait. None is default and means infinity. Can be also a
time string.
:param dont_raise: if True, then WouldWaitMore won't be raised
:raises ResourceLocked: unable to acquire the underlying lock within specified timeout.
:raises WouldWaitMore: wait's timeout has expired
"""
from satella.time.parse import parse_time_string
if timeout is not None:
timeout = parse_time_string(timeout)
if timeout < 0:
timeout = 0
from satella.time.measure import measure
with measure(timeout=timeout) as measurement:
if timeout is None:
self.acquire()
else:
if not self.acquire(timeout=measurement.time_remaining):
raise ResourceLocked('internal lock locked')
try:
if timeout is None:
super().wait()
else:
if not super().wait(timeout=measurement.time_remaining):
if not dont_raise:
raise WouldWaitMore('wait was not notified')
finally:
self.release() | def wait(self, timeout: tp.Optional[tp.Union[str, float]] = None,
dont_raise: bool = False) -> None:
"""
Wait for condition to become true.
:param timeout: timeout to wait. None is default and means infinity. Can be also a
time string.
:param dont_raise: if True, then WouldWaitMore won't be raised
:raises ResourceLocked: unable to acquire the underlying lock within specified timeout.
:raises WouldWaitMore: wait's timeout has expired
"""
from satella.time.parse import parse_time_string
if timeout is not None:
timeout = parse_time_string(timeout)
if timeout < 0:
timeout = 0
from satella.time.measure import measure
with measure(timeout=timeout) as measurement:
if timeout is None:
self.acquire()
else:
if not self.acquire(timeout=measurement.time_remaining):
raise ResourceLocked('internal lock locked')
try:
if timeout is None:
super().wait()
else:
if not super().wait(timeout=measurement.time_remaining):
if not dont_raise:
raise WouldWaitMore('wait was not notified')
finally:
self.release() |
Python | def notify_all(self) -> None:
"""
Notify all threads waiting on this Condition
"""
with self._lock:
super().notify_all() | def notify_all(self) -> None:
"""
Notify all threads waiting on this Condition
"""
with self._lock:
super().notify_all() |
Python | def notify(self, n: int = 1) -> None:
"""
Notify n threads waiting on this Condition
:param n: amount of threads to notify
"""
with self._lock:
super().notify(n=n) | def notify(self, n: int = 1) -> None:
"""
Notify n threads waiting on this Condition
:param n: amount of threads to notify
"""
with self._lock:
super().notify(n=n) |
Python | def start(self) -> 'SingleStartThread':
"""
No-op when called second or so time. The first time it starts the thread.
:return: self
"""
if self.__started:
return
self.__started = True
super().start()
return self | def start(self) -> 'SingleStartThread':
"""
No-op when called second or so time. The first time it starts the thread.
:return: self
"""
if self.__started:
return
self.__started = True
super().start()
return self |
Python | def join(self, timeout=None) -> None:
"""
Wait for the pseudo-thread. Sets running to False if thread was terminated.
:param timeout: maximum number of seconds to wait for termination
:raises WouldWaitMore: thread did not terminate within that many seconds
:raises RuntimeError: tried to join() before start()!
"""
if not self.terminated and not self.started:
raise RuntimeError('Cannot join on a thread that has not started!')
if not self.terminated:
started_elapsing = time.monotonic()
while time.monotonic() - started_elapsing < timeout and not self.terminated:
time.sleep(1)
if not self.terminated:
raise WouldWaitMore('thread failed to terminate') | def join(self, timeout=None) -> None:
"""
Wait for the pseudo-thread. Sets running to False if thread was terminated.
:param timeout: maximum number of seconds to wait for termination
:raises WouldWaitMore: thread did not terminate within that many seconds
:raises RuntimeError: tried to join() before start()!
"""
if not self.terminated and not self.started:
raise RuntimeError('Cannot join on a thread that has not started!')
if not self.terminated:
started_elapsing = time.monotonic()
while time.monotonic() - started_elapsing < timeout and not self.terminated:
time.sleep(1)
if not self.terminated:
raise WouldWaitMore('thread failed to terminate') |
Python | def loop(self) -> None:
"""
Run one iteration of the loop. Meant to be overrided. You do not need to override it
if you decide to override run() through.
This should block for as long as a single check will take, as termination checks take place
between calls.
Note that if it throws one of the exceptions given in `terminate_on` this thread will
terminate cleanly, whereas if it throws something else, the thread will be terminated with
a traceback.
""" | def loop(self) -> None:
"""
Run one iteration of the loop. Meant to be overrided. You do not need to override it
if you decide to override run() through.
This should block for as long as a single check will take, as termination checks take place
between calls.
Note that if it throws one of the exceptions given in `terminate_on` this thread will
terminate cleanly, whereas if it throws something else, the thread will be terminated with
a traceback.
""" |
Python | def start(self) -> 'TerminableThread':
"""
Start the execution of this thread
:return: this thread
"""
super().start()
return self | def start(self) -> 'TerminableThread':
"""
Start the execution of this thread
:return: this thread
"""
super().start()
return self |
Python | def run(self) -> None:
"""
Calls self.loop() indefinitely, until terminating condition is met
"""
try:
try:
self.prepare()
except Exception as e:
if self._terminate_on is not None:
if isinstance(e, self._terminate_on):
self.terminate()
return
raise
while not self._terminating:
try:
self.loop()
except Exception as e:
if self._terminate_on is not None:
if isinstance(e, self._terminate_on):
self.terminate()
return
raise
except SystemExit:
pass
finally:
self.cleanup() | def run(self) -> None:
"""
Calls self.loop() indefinitely, until terminating condition is met
"""
try:
try:
self.prepare()
except Exception as e:
if self._terminate_on is not None:
if isinstance(e, self._terminate_on):
self.terminate()
return
raise
while not self._terminating:
try:
self.loop()
except Exception as e:
if self._terminate_on is not None:
if isinstance(e, self._terminate_on):
self.terminate()
return
raise
except SystemExit:
pass
finally:
self.cleanup() |
Python | def safe_wait_condition(self, condition: Condition, timeout: tp.Union[str, float],
wake_up_each: tp.Union[str, float] = 2,
dont_raise: bool = False) -> None:
"""
Wait for a condition, checking periodically if the thread is being terminated.
To be invoked only by the thread that's represented by the object!
:param condition: condition to wait on
:param timeout: maximum time to wait in seconds. Can be also a time string
:param wake_up_each: amount of seconds to wake up each to check for termination.
Can be also a time string.
:param dont_raise: if set to True, :class:`~satella.exceptions.WouldWaitMore` will not be
raised
:raises WouldWaitMore: timeout has passed and Condition has not happened
:raises SystemExit: thread is terminating
"""
from satella.time.parse import parse_time_string
timeout = parse_time_string(timeout)
wake_up_each = parse_time_string(wake_up_each)
t = 0
while t < timeout:
if self._terminating:
raise SystemExit()
ttw = min(timeout - t, wake_up_each)
t += ttw
try:
condition.wait(ttw)
return
except WouldWaitMore:
pass
if not dont_raise:
raise WouldWaitMore() | def safe_wait_condition(self, condition: Condition, timeout: tp.Union[str, float],
wake_up_each: tp.Union[str, float] = 2,
dont_raise: bool = False) -> None:
"""
Wait for a condition, checking periodically if the thread is being terminated.
To be invoked only by the thread that's represented by the object!
:param condition: condition to wait on
:param timeout: maximum time to wait in seconds. Can be also a time string
:param wake_up_each: amount of seconds to wake up each to check for termination.
Can be also a time string.
:param dont_raise: if set to True, :class:`~satella.exceptions.WouldWaitMore` will not be
raised
:raises WouldWaitMore: timeout has passed and Condition has not happened
:raises SystemExit: thread is terminating
"""
from satella.time.parse import parse_time_string
timeout = parse_time_string(timeout)
wake_up_each = parse_time_string(wake_up_each)
t = 0
while t < timeout:
if self._terminating:
raise SystemExit()
ttw = min(timeout - t, wake_up_each)
t += ttw
try:
condition.wait(ttw)
return
except WouldWaitMore:
pass
if not dont_raise:
raise WouldWaitMore() |
Python | def safe_sleep(self, interval: float, wake_up_each: float = 2) -> None:
"""
Sleep for interval, waking up each wake_up_each seconds to check if terminating,
finish earlier if is terminating.
This will do *the right thing* when passed a negative interval.
To be invoked only by the thread that's represented by the object!
:param interval: Time to sleep in total
:param wake_up_each: Amount of seconds to wake up each
:raises SystemExit: thread is terminating
"""
t = 0
while t < interval and not self._terminating:
remaining_to_sleep = min(interval - t, wake_up_each)
time.sleep(remaining_to_sleep)
t += remaining_to_sleep
if self._terminating:
raise SystemExit() | def safe_sleep(self, interval: float, wake_up_each: float = 2) -> None:
"""
Sleep for interval, waking up each wake_up_each seconds to check if terminating,
finish earlier if is terminating.
This will do *the right thing* when passed a negative interval.
To be invoked only by the thread that's represented by the object!
:param interval: Time to sleep in total
:param wake_up_each: Amount of seconds to wake up each
:raises SystemExit: thread is terminating
"""
t = 0
while t < interval and not self._terminating:
remaining_to_sleep = min(interval - t, wake_up_each)
time.sleep(remaining_to_sleep)
t += remaining_to_sleep
if self._terminating:
raise SystemExit() |
Python | def terminate(self, force: bool = False) -> 'TerminableThread':
"""
Signal this thread to terminate.
Forcing, if requested, will be done by injecting a SystemExit exception into target
thread, so the thread must acquire GIL. For example, following would not be interruptable:
>>> time.sleep(1000000)
Note that calling force=True on PyPy won't work, and NotImplementedError will be raised
instead.
:param force: Whether to force a quit
:return: self
:raises RuntimeError: when something goes wrong with the underlying Python machinery
:raises NotImplementedError: force=True was used on PyPy
"""
self._terminating = True
if force:
if platform.python_implementation() == 'PyPy':
raise NotImplementedError('force=True was made on PyPy')
ret = ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._ident),
ctypes.py_object(SystemExit))
if ret == 0:
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._ident), 0)
raise RuntimeError('Zero threads killed!')
elif ret > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._ident), 0)
raise RuntimeError('Multiple threads killed!')
return self | def terminate(self, force: bool = False) -> 'TerminableThread':
"""
Signal this thread to terminate.
Forcing, if requested, will be done by injecting a SystemExit exception into target
thread, so the thread must acquire GIL. For example, following would not be interruptable:
>>> time.sleep(1000000)
Note that calling force=True on PyPy won't work, and NotImplementedError will be raised
instead.
:param force: Whether to force a quit
:return: self
:raises RuntimeError: when something goes wrong with the underlying Python machinery
:raises NotImplementedError: force=True was used on PyPy
"""
self._terminating = True
if force:
if platform.python_implementation() == 'PyPy':
raise NotImplementedError('force=True was made on PyPy')
ret = ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._ident),
ctypes.py_object(SystemExit))
if ret == 0:
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._ident), 0)
raise RuntimeError('Zero threads killed!')
elif ret > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._ident), 0)
raise RuntimeError('Multiple threads killed!')
return self |
Python | def on_overrun(self, time_taken: float) -> None:
"""
Called when executing .loop() takes more than x seconds.
Called each cycle.
You are meant to override this, as by default this does nothing.
:param time_taken: how long did calling .loop() take
""" | def on_overrun(self, time_taken: float) -> None:
"""
Called when executing .loop() takes more than x seconds.
Called each cycle.
You are meant to override this, as by default this does nothing.
:param time_taken: how long did calling .loop() take
""" |
Python | def whereis(name: str) -> tp.Iterator[str]:
"""
Looking in PATH return a sequence of executable files having provided name.
Additionally, on Windows, it will use PATHEXT.
.. note:: on Windows name is supposed to be without extension!
:param name: name of the executable to search for
:return: an iterator of absolute paths to given executable
"""
if sys.platform.startswith('win'):
paths_to_look_in = os.environ.get('PATH', '').split(';')
name = name.upper()
available_extensions = os.environ.get('PATHEXT', '.com;.bat;.exe').upper().split(';')
else:
paths_to_look_in = os.environ.get('PATH', '').split(':')
available_extensions = '',
for directory in paths_to_look_in:
with silence_excs(FileNotFoundError):
for file in os.listdir(directory):
path = os.path.join(directory, file)
if 'x' not in stat.filemode(os.stat(path).st_mode):
continue
if sys.platform.startswith('win'): # a POSIX-specific check
file = file.upper() # paths are not case-sensitive on Windows
for extension in available_extensions:
if file == '%s%s' % (name, extension):
yield path | def whereis(name: str) -> tp.Iterator[str]:
"""
Looking in PATH return a sequence of executable files having provided name.
Additionally, on Windows, it will use PATHEXT.
.. note:: on Windows name is supposed to be without extension!
:param name: name of the executable to search for
:return: an iterator of absolute paths to given executable
"""
if sys.platform.startswith('win'):
paths_to_look_in = os.environ.get('PATH', '').split(';')
name = name.upper()
available_extensions = os.environ.get('PATHEXT', '.com;.bat;.exe').upper().split(';')
else:
paths_to_look_in = os.environ.get('PATH', '').split(':')
available_extensions = '',
for directory in paths_to_look_in:
with silence_excs(FileNotFoundError):
for file in os.listdir(directory):
path = os.path.join(directory, file)
if 'x' not in stat.filemode(os.stat(path).st_mode):
continue
if sys.platform.startswith('win'): # a POSIX-specific check
file = file.upper() # paths are not case-sensitive on Windows
for extension in available_extensions:
if file == '%s%s' % (name, extension):
yield path |
Python | def terminate(self, force: bool = False) -> 'PrometheusHTTPExporterThread':
"""
Order this thread to terminate and return self.
You will need to .join() on this thread to ensure that it has quit.
:param force: whether to terminate this thread by injecting an exception into it
"""
self.httpd.shutdown()
return super().terminate(force=force) | def terminate(self, force: bool = False) -> 'PrometheusHTTPExporterThread':
"""
Order this thread to terminate and return self.
You will need to .join() on this thread to ensure that it has quit.
:param force: whether to terminate this thread by injecting an exception into it
"""
self.httpd.shutdown()
return super().terminate(force=force) |
Python | def metric_data_collection_to_prometheus(mdc: MetricDataCollection) -> str:
"""
Render the data in the form understandable by Prometheus.
Values marked as internal will be skipped.
:param mdc: Metric data collection to render
:param tree: MetricDataCollection returned by the root metric (or any metric for that instance).
:return: a string output to present to Prometheus
"""
if not mdc.values:
return '\n'
obj = RendererObject()
for value in mdc.values:
if value.internal:
continue
obj.render(value)
return obj.getvalue() | def metric_data_collection_to_prometheus(mdc: MetricDataCollection) -> str:
"""
Render the data in the form understandable by Prometheus.
Values marked as internal will be skipped.
:param mdc: Metric data collection to render
:param tree: MetricDataCollection returned by the root metric (or any metric for that instance).
:return: a string output to present to Prometheus
"""
if not mdc.values:
return '\n'
obj = RendererObject()
for value in mdc.values:
if value.internal:
continue
obj.render(value)
return obj.getvalue() |
Python | def put(self, key: K, value: V) -> None:
"""
Put given value to storage at given key.
This may block for a while.
:param key: key to store
:param value: value to store
""" | def put(self, key: K, value: V) -> None:
"""
Put given value to storage at given key.
This may block for a while.
:param key: key to store
:param value: value to store
""" |
Python | def iterate(self, starting_key: tp.Optional[K]) -> tp.Iterator[KVTuple]:
"""
Return an iterator iterating from provided starting key to the end
of the values, as read from the database.
This may block for a while.
This iterator will be closed upon no longer being necessary.
:param starting_key: starting key, included, or None for iterate from the start
:return: an iterator from provided key (included) to the end of the range
""" | def iterate(self, starting_key: tp.Optional[K]) -> tp.Iterator[KVTuple]:
"""
Return an iterator iterating from provided starting key to the end
of the values, as read from the database.
This may block for a while.
This iterator will be closed upon no longer being necessary.
:param starting_key: starting key, included, or None for iterate from the start
:return: an iterator from provided key (included) to the end of the range
""" |
Python | def on_change_start_entry(self, start_entry: tp.Optional[K]) -> None:
"""
Called by SyncableDroppable when starting entry (earliest entry encountered both in the DB
and is memory) is changed.
:param start_entry: new value of start entry or None if there are no entries at all
""" | def on_change_start_entry(self, start_entry: tp.Optional[K]) -> None:
"""
Called by SyncableDroppable when starting entry (earliest entry encountered both in the DB
and is memory) is changed.
:param start_entry: new value of start entry or None if there are no entries at all
""" |
Python | def on_change_stop_entry(self, stop_entry: tp.Optional[K]) -> None:
"""
Called by SyncableDroppable when stopping entry (earliest entry encountered both in the DB
and is memory) is changed.
:param stop_entry: new value of stop entry or None if there are no entries at all
""" | def on_change_stop_entry(self, stop_entry: tp.Optional[K]) -> None:
"""
Called by SyncableDroppable when stopping entry (earliest entry encountered both in the DB
and is memory) is changed.
:param stop_entry: new value of stop entry or None if there are no entries at all
""" |
Python | def on_change_synced_up_to(self, synced_up_to: tp.Optional[K]) -> None:
"""
Called by SyncableDroppable when synced up to (earliest timestamp synced) is changed.
:param synced_up_to: new value of synced up to
""" | def on_change_synced_up_to(self, synced_up_to: tp.Optional[K]) -> None:
"""
Called by SyncableDroppable when synced up to (earliest timestamp synced) is changed.
:param synced_up_to: new value of synced up to
""" |
Python | def delete(self, key: K) -> None:
"""
Called by SyncableDroppable when there's a need to remove target key
:param key: key to remove
""" | def delete(self, key: K) -> None:
"""
Called by SyncableDroppable when there's a need to remove target key
:param key: key to remove
""" |
Python | def sync_to_db(self) -> None:
"""
Make sure that everything's that in memory in also stored in the DB.
"""
for key, value in self.data_in_memory:
self.db_storage.put(key, value)
self.data_in_memory = [] | def sync_to_db(self) -> None:
"""
Make sure that everything's that in memory in also stored in the DB.
"""
for key, value in self.data_in_memory:
self.db_storage.put(key, value)
self.data_in_memory = [] |
Python | def _cleanup_the_db(self) -> bool:
"""
Remove entries from the DB that are older than span_to_keep_in_db
:return: if all entries in the DB have been trashed
"""
if self.start_entry is None:
return False
cutoff_span = self.stop_entry - self.span_to_keep_in_db
iterator = self.db_storage.iterate(self.start_entry)
try:
for key, value in iterator:
if key < cutoff_span:
self.db_storage.delete(key)
else:
self.start_entry = key
break
else:
# This means that we have wiped entire DB
if self.data_in_memory:
self.start_entry = self.data_in_memory[0][0]
self.db_storage.on_change_start_entry(self.start_entry)
else:
# We no longer have ANY data
self.start_entry = self.stop_entry = None
return True
finally:
try_close(iterator)
return False | def _cleanup_the_db(self) -> bool:
"""
Remove entries from the DB that are older than span_to_keep_in_db
:return: if all entries in the DB have been trashed
"""
if self.start_entry is None:
return False
cutoff_span = self.stop_entry - self.span_to_keep_in_db
iterator = self.db_storage.iterate(self.start_entry)
try:
for key, value in iterator:
if key < cutoff_span:
self.db_storage.delete(key)
else:
self.start_entry = key
break
else:
# This means that we have wiped entire DB
if self.data_in_memory:
self.start_entry = self.data_in_memory[0][0]
self.db_storage.on_change_start_entry(self.start_entry)
else:
# We no longer have ANY data
self.start_entry = self.stop_entry = None
return True
finally:
try_close(iterator)
return False |
Python | def cleanup_keep_in_memory(self) -> None:
"""
Eject values from memory that should reside in the DB onto the DB
"""
first_key = self.first_key_in_memory
if first_key is None:
return
cutoff_point = self.stop_entry - self.span_to_keep_in_memory
for index, row in enumerate(self.data_in_memory):
ts, value = row
if ts > cutoff_point:
for ts, value in self.data_in_memory[:index]:
self.db_storage.put(ts, value)
del self.data_in_memory[:index]
break
else:
self.sync_to_db()
self.data_in_memory = []
return | def cleanup_keep_in_memory(self) -> None:
"""
Eject values from memory that should reside in the DB onto the DB
"""
first_key = self.first_key_in_memory
if first_key is None:
return
cutoff_point = self.stop_entry - self.span_to_keep_in_memory
for index, row in enumerate(self.data_in_memory):
ts, value = row
if ts > cutoff_point:
for ts, value in self.data_in_memory[:index]:
self.db_storage.put(ts, value)
del self.data_in_memory[:index]
break
else:
self.sync_to_db()
self.data_in_memory = []
return |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.