index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
16,147
synchronicity.synchronizer
__init__
null
def __init__( self, multiwrap_warning=False, async_leakage_warning=True, ): self._multiwrap_warning = multiwrap_warning self._async_leakage_warning = async_leakage_warning self._loop = None self._loop_creation_lock = threading.Lock() self._thread = None self._stopping = None if platform.system() == "Windows": # default event loop policy on windows spits out errors when # closing the event loop, so use WindowsSelectorEventLoopPolicy instead # https://stackoverflow.com/questions/45600579/asyncio-event-loop-is-closed-when-getting-loop asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # Special attribute we use to go from wrapped <-> original self._wrapped_attr = "_sync_wrapped_%d" % id(self) self._original_attr = "_sync_original_%d" % id(self) # Special attribute to mark something as non-wrappable self._nowrap_attr = "_sync_nonwrap_%d" % id(self) self._input_translation_attr = "_sync_input_translation_%d" % id(self) self._output_translation_attr = "_sync_output_translation_%d" % id(self) # Prep a synchronized context manager in case one is returned and needs translation self._ctx_mgr_cls = contextlib._AsyncGeneratorContextManager self.create_async(self._ctx_mgr_cls) self.create_blocking(self._ctx_mgr_cls) atexit.register(self._close_loop)
(self, multiwrap_warning=False, async_leakage_warning=True)
16,148
synchronicity.synchronizer
__setstate__
null
def __setstate__(self, d): for attr in self._PICKLE_ATTRS: setattr(self, attr, d[attr])
(self, d)
16,149
synchronicity.synchronizer
_close_loop
null
def _close_loop(self): if self._thread is not None: if not self._loop.is_closed(): # This also serves the purpose of waking up an idle loop self._loop.call_soon_threadsafe(self._stopping.set) self._thread.join() self._thread = None
(self)
16,150
synchronicity.synchronizer
_get_loop
null
def _get_loop(self, start=False): if self._loop is None and start: return self._start_loop() return self._loop
(self, start=False)
16,151
synchronicity.synchronizer
_get_running_loop
null
def _get_running_loop(self): # TODO: delete this method try: return asyncio.get_running_loop() except RuntimeError: return
(self)
16,152
synchronicity.synchronizer
_is_inside_loop
null
def _is_inside_loop(self): loop = self._get_loop() if loop is None: return False if threading.current_thread() != self._thread: # gevent does something bad that causes asyncio.get_running_loop() to return self._loop return False current_loop = self._get_running_loop() return loop == current_loop
(self)
16,153
synchronicity.synchronizer
_recurse_map
null
def _recurse_map(self, mapper, obj): if type(obj) == list: # noqa: E721 return list(self._recurse_map(mapper, item) for item in obj) elif type(obj) == tuple: # noqa: E721 return tuple(self._recurse_map(mapper, item) for item in obj) elif type(obj) == dict: # noqa: E721 return dict((key, self._recurse_map(mapper, item)) for key, item in obj.items()) else: return mapper(obj)
(self, mapper, obj)
16,154
synchronicity.synchronizer
_run_function_async
null
def _run_function_sync_future(self, coro, interface, original_func): coro = wrap_coro_exception(coro) coro = self._wrap_check_async_leakage(coro) loop = self._get_loop(start=True) # For futures, we unwrap the result at this point, not in f_wrapped coro = unwrap_coro_exception(coro) coro = self._translate_coro_out(coro, interface, original_func) return asyncio.run_coroutine_threadsafe(coro, loop)
(self, coro, interface, original_func)
16,155
synchronicity.synchronizer
_run_function_sync
null
def _run_function_sync(self, coro, interface, original_func): if self._is_inside_loop(): raise Exception("Deadlock detected: calling a sync function from the synchronizer loop") coro = wrap_coro_exception(coro) coro = self._wrap_check_async_leakage(coro) loop = self._get_loop(start=True) fut = asyncio.run_coroutine_threadsafe(coro, loop) value = fut.result() if getattr(original_func, self._output_translation_attr, True): return self._translate_out(value, interface) return value
(self, coro, interface, original_func)
16,157
synchronicity.synchronizer
_run_generator_async
null
def _run_generator_sync(self, gen, interface, original_func): value, is_exc = None, False while True: try: if is_exc: value = self._run_function_sync(gen.athrow(value), interface, original_func) else: value = self._run_function_sync(gen.asend(value), interface, original_func) except UserCodeException as uc_exc: raise uc_exc.exc from None except StopAsyncIteration: break try: value = yield value is_exc = False except BaseException as exc: value = exc is_exc = True
(self, gen, interface, original_func)
16,159
synchronicity.synchronizer
_start_loop
null
def _start_loop(self): with self._loop_creation_lock: if self._loop and self._loop.is_running(): return self._loop is_ready = threading.Event() def thread_inner(): async def loop_inner(): self._loop = asyncio.get_running_loop() self._stopping = asyncio.Event() is_ready.set() await self._stopping.wait() # wait until told to stop try: asyncio.run(loop_inner()) except RuntimeError as exc: # Python 3.12 raises a RuntimeError when new threads are created at shutdown. # Swallowing it here is innocuous, but ideally we will revisit this after # refactoring the shutdown handlers that modal uses to avoid triggering it. if "can't create new thread at interpreter shutdown" not in str(exc): raise exc self._thread = threading.Thread(target=thread_inner, daemon=True) self._thread.start() is_ready.wait() # TODO: this might block for a very short time return self._loop
(self)
16,160
synchronicity.synchronizer
_translate_coro_out
null
def _translate_coro_out(self, coro, interface, original_func): async def unwrap_coro(): res = await coro if getattr(original_func, self._output_translation_attr, True): return self._translate_out(res, interface) return res return unwrap_coro()
(self, coro, interface, original_func)
16,161
synchronicity.synchronizer
_translate_in
null
def _translate_in(self, obj): return self._recurse_map(self._translate_scalar_in, obj)
(self, obj)
16,162
synchronicity.synchronizer
_translate_out
null
def _translate_out(self, obj, interface): return self._recurse_map(lambda scalar: self._translate_scalar_out(scalar, interface), obj)
(self, obj, interface)
16,163
synchronicity.synchronizer
_translate_scalar_in
null
def _translate_scalar_in(self, obj): # If it's an external object, translate it to the internal type if hasattr(obj, "__dict__"): if inspect.isclass(obj): # TODO: functions? return obj.__dict__.get(self._original_attr, obj) else: return obj.__dict__.get(self._original_attr, obj) else: return obj
(self, obj)
16,164
synchronicity.synchronizer
_translate_scalar_out
null
def _translate_scalar_out(self, obj, interface): if interface == Interface._ASYNC_WITH_BLOCKING_TYPES: interface = Interface.BLOCKING # If it's an internal object, translate it to the external interface if inspect.isclass(obj): # TODO: functions? cls_dct = obj.__dict__ if self._wrapped_attr in cls_dct: return cls_dct[self._wrapped_attr][interface] else: return obj elif isinstance(obj, typing.TypeVar): if hasattr(obj, self._wrapped_attr): return getattr(obj, self._wrapped_attr)[interface] else: return obj else: cls_dct = obj.__class__.__dict__ if self._wrapped_attr in cls_dct: # This is an *instance* of a synchronized class, translate its type return self._wrap(obj, interface) else: return obj
(self, obj, interface)
16,165
synchronicity.synchronizer
_update_wrapper
Very similar to functools.update_wrapper
def _update_wrapper(self, f_wrapped, f, name=None, interface=None, target_module=None): """Very similar to functools.update_wrapper""" functools.update_wrapper(f_wrapped, f) if name is not None: f_wrapped.__name__ = name f_wrapped.__qualname__ = name if target_module is not None: f_wrapped.__module__ = target_module setattr(f_wrapped, SYNCHRONIZER_ATTR, self) setattr(f_wrapped, TARGET_INTERFACE_ATTR, interface)
(self, f_wrapped, f, name=None, interface=None, target_module=None)
16,166
synchronicity.synchronizer
_wrap
null
def _wrap( self, obj, interface, name=None, require_already_wrapped=False, target_module=None, ): # This method works for classes, functions, and instances # It wraps the object, and caches the wrapped object # Get the list of existing interfaces if hasattr(obj, "__dict__"): if self._wrapped_attr not in obj.__dict__: if isinstance(obj.__dict__, dict): # This works for instances obj.__dict__.setdefault(self._wrapped_attr, {}) else: # This works for classes & functions setattr(obj, self._wrapped_attr, {}) interfaces = obj.__dict__[self._wrapped_attr] else: # e.g., TypeVar in Python>=3.12 if not hasattr(obj, self._wrapped_attr): setattr(obj, self._wrapped_attr, {}) interfaces = getattr(obj, self._wrapped_attr) # If this is already wrapped, return the existing interface if interface in interfaces: if self._multiwrap_warning: warnings.warn(f"Object {obj} is already wrapped, but getting wrapped again") return interfaces[interface] if require_already_wrapped: # This happens if a class has a custom name but its base class doesn't raise RuntimeError(f"{obj} needs to be serialized explicitly with a custom name") # Wrap object (different cases based on the type) if inspect.isclass(obj): new_obj = self._wrap_class( obj, interface, name, target_module=target_module, ) elif inspect.isfunction(obj): new_obj = self._wrap_callable(obj, interface, name, target_module=target_module) elif isinstance(obj, typing.TypeVar): new_obj = self._wrap_type_var(obj, interface, name, target_module) elif self._wrapped_attr in obj.__class__.__dict__: new_obj = self._wrap_instance(obj, interface) else: raise Exception("Argument %s is not a class or a callable" % obj) # Store the interface on the obj and return interfaces[interface] = new_obj return new_obj
(self, obj, interface, name=None, require_already_wrapped=False, target_module=None)
16,167
synchronicity.synchronizer
_wrap_callable
null
def _wrap_callable( self, f, interface, name=None, allow_futures=True, unwrap_user_excs=True, target_module=None, include_aio_interface=True, ): if hasattr(f, self._original_attr): if self._multiwrap_warning: warnings.warn(f"Function {f} is already wrapped, but getting wrapped again") return f if name is None: _name = _FUNCTION_PREFIXES[interface] + f.__name__ else: _name = name is_coroutinefunction = inspect.iscoroutinefunction(f) @wraps_by_interface(interface, f) def f_wrapped(*args, **kwargs): return_future = kwargs.pop(_RETURN_FUTURE_KWARG, False) # If this gets called with an argument that represents an external type, # translate it into an internal type if getattr(f, self._input_translation_attr, True): args = self._translate_in(args) kwargs = self._translate_in(kwargs) # Call the function res = f(*args, **kwargs) # Figure out if this is a coroutine or something is_coroutine = inspect.iscoroutine(res) is_asyncgen = inspect.isasyncgen(res) if return_future: if not allow_futures: raise Exception("Can not return future for this function") elif is_coroutine: return self._run_function_sync_future(res, interface, f) elif is_asyncgen: raise Exception("Can not return futures for generators") else: return res elif is_coroutine: if interface in (Interface.ASYNC, Interface._ASYNC_WITH_BLOCKING_TYPES): coro = self._run_function_async(res, interface, f) if not is_coroutinefunction: # If this is a non-async function that returns a coroutine, # then this is the exit point, and we need to unwrap any # wrapped exception here. Otherwise, the exit point is # in async_wrap.py coro = unwrap_coro_exception(coro) return coro elif interface == Interface.BLOCKING: # This is the exit point, so we need to unwrap the exception here try: return self._run_function_sync(res, interface, f) except StopAsyncIteration: # this is a special case for handling __next__ wrappers around # __anext__ that raises StopAsyncIteration raise StopIteration() except UserCodeException as uc_exc: # Used to skip a frame when called from `proxy_method`. if unwrap_user_excs and not (Interface.BLOCKING and include_aio_interface): raise uc_exc.exc from None else: raise uc_exc elif is_asyncgen: # Note that the _run_generator_* functions handle their own # unwrapping of exceptions (this happens during yielding) if interface in (Interface.ASYNC, Interface._ASYNC_WITH_BLOCKING_TYPES): return self._run_generator_async(res, interface, f) elif interface == Interface.BLOCKING: return self._run_generator_sync(res, interface, f) else: if inspect.isfunction(res) or isinstance(res, functools.partial): # TODO: HACKY HACK # TODO: this is needed for decorator wrappers that returns functions # Maybe a bit of a hacky special case that deserves its own decorator @wraps_by_interface(interface, res) def f_wrapped(*args, **kwargs): args = self._translate_in(args) kwargs = self._translate_in(kwargs) f_res = res(*args, **kwargs) if getattr(f, self._output_translation_attr, True): return self._translate_out(f_res, interface) else: return f_res return f_wrapped if getattr(f, self._output_translation_attr, True): return self._translate_out(res, interface) else: return res self._update_wrapper(f_wrapped, f, _name, interface, target_module=target_module) setattr(f_wrapped, self._original_attr, f) if interface == Interface.BLOCKING and include_aio_interface and should_have_aio_interface(f): # special async interface # this async interface returns *blocking* instances of wrapped objects, not async ones: async_interface = self._wrap_callable( f, interface=Interface._ASYNC_WITH_BLOCKING_TYPES, name=name, allow_futures=allow_futures, unwrap_user_excs=unwrap_user_excs, target_module=target_module, ) f_wrapped = FunctionWithAio(f_wrapped, async_interface, self) self._update_wrapper(f_wrapped, f, _name, interface, target_module=target_module) setattr(f_wrapped, self._original_attr, f) return f_wrapped
(self, f, interface, name=None, allow_futures=True, unwrap_user_excs=True, target_module=None, include_aio_interface=True)
16,168
synchronicity.synchronizer
_wrap_check_async_leakage
Check if a coroutine returns another coroutine (or an async generator) and warn. The reason this is important to catch is that otherwise even synchronized code might end up "leaking" async code into the caller.
def _wrap_check_async_leakage(self, coro): """Check if a coroutine returns another coroutine (or an async generator) and warn. The reason this is important to catch is that otherwise even synchronized code might end up "leaking" async code into the caller. """ if not self._async_leakage_warning: return coro async def coro_wrapped(): value = await coro # TODO: we should include the name of the original function here if inspect.iscoroutine(value): warnings.warn(f"Potential async leakage: coroutine returned a coroutine {value}.") elif inspect.isasyncgen(value): warnings.warn(f"Potential async leakage: Coroutine returned an async generator {value}.") return value return coro_wrapped()
(self, coro)
16,169
synchronicity.synchronizer
_wrap_class
null
def _wrap_class(self, cls, interface, name, target_module=None): bases = tuple( self._wrap(base, interface, require_already_wrapped=(name is not None)) if base != object else object for base in cls.__bases__ ) new_dict = {self._original_attr: cls} if cls is not None: new_dict["__init__"] = self._wrap_proxy_constructor(cls, interface) for k, v in cls.__dict__.items(): if k in _BUILTIN_ASYNC_METHODS: k_sync = _BUILTIN_ASYNC_METHODS[k] if interface == Interface.BLOCKING: new_dict[k_sync] = self._wrap_proxy_method( v, interface, allow_futures=False, include_aio_interface=False, ) new_dict[k] = self._wrap_proxy_method( v, Interface._ASYNC_WITH_BLOCKING_TYPES, allow_futures=False, ) elif interface == Interface.ASYNC: new_dict[k] = self._wrap_proxy_method(v, interface, allow_futures=False) elif k in ("__new__", "__init__"): # Skip custom constructor in the wrapped class # Instead, delegate to the base class constructor and wrap it pass elif k in IGNORED_ATTRIBUTES: pass elif isinstance(v, staticmethod): # TODO(erikbern): this feels pretty hacky new_dict[k] = self._wrap_proxy_staticmethod(v, interface) elif isinstance(v, classmethod): new_dict[k] = self._wrap_proxy_classmethod(v, interface) elif isinstance(v, property): new_dict[k] = self._wrap_proxy_property(v, interface) elif isinstance(v, MethodWithAio): # if library defines its own "synchronicity-like" interface we transfer it "as is" to the wrapper new_dict[k] = v elif callable(v): new_dict[k] = self._wrap_proxy_method(v, interface) if name is None: name = _CLASS_PREFIXES[interface] + cls.__name__ new_cls = type.__new__(type, name, bases, new_dict) new_cls.__module__ = cls.__module__ if target_module is None else target_module new_cls.__doc__ = cls.__doc__ if "__annotations__" in cls.__dict__: new_cls.__annotations__ = cls.__annotations__ # transfer annotations setattr(new_cls, TARGET_INTERFACE_ATTR, interface) setattr(new_cls, SYNCHRONIZER_ATTR, self) return new_cls
(self, cls, interface, name, target_module=None)
16,170
synchronicity.synchronizer
_wrap_instance
null
def _wrap_instance(self, obj, interface): # Takes an object and creates a new proxy object for it cls = obj.__class__ cls_dct = cls.__dict__ interfaces = cls_dct[self._wrapped_attr] if interface not in interfaces: raise RuntimeError(f"Class {cls} has not synchronized {interface}.") interface_cls = interfaces[interface] new_obj = interface_cls.__new__(interface_cls) # Store a reference to the original object new_obj.__dict__[self._original_attr] = obj new_obj.__dict__[SYNCHRONIZER_ATTR] = self new_obj.__dict__[TARGET_INTERFACE_ATTR] = interface return new_obj
(self, obj, interface)
16,171
synchronicity.synchronizer
_wrap_proxy_classmethod
null
def _wrap_proxy_classmethod(self, orig_classmethod, interface): orig_func = orig_classmethod.__func__ method = self._wrap_callable(orig_func, interface, include_aio_interface=False) if interface == Interface.BLOCKING and should_have_aio_interface(orig_func): async_method = self._wrap_callable(orig_func, Interface._ASYNC_WITH_BLOCKING_TYPES) return MethodWithAio(method, async_method, self, is_classmethod=True) return classmethod(method)
(self, orig_classmethod, interface)
16,172
synchronicity.synchronizer
_wrap_proxy_constructor
Returns a custom __init__ for the subclass.
def _wrap_proxy_constructor(synchronizer_self, cls, interface): """Returns a custom __init__ for the subclass.""" def my_init(self, *args, **kwargs): # Create base instance args = synchronizer_self._translate_in(args) kwargs = synchronizer_self._translate_in(kwargs) instance = cls(*args, **kwargs) # Register self as the wrapped one interface_instances = {interface: self} instance.__dict__[synchronizer_self._wrapped_attr] = interface_instances # Store a reference to the original object self.__dict__[synchronizer_self._original_attr] = instance synchronizer_self._update_wrapper(my_init, cls.__init__, interface=interface) setattr(my_init, synchronizer_self._original_attr, cls.__init__) return my_init
(synchronizer_self, cls, interface)
16,173
synchronicity.synchronizer
_wrap_proxy_method
null
def _wrap_proxy_method( synchronizer_self, method, interface, allow_futures=True, include_aio_interface=True, ): if getattr(method, synchronizer_self._nowrap_attr, None): # This method is marked as non-wrappable return method wrapped_method = synchronizer_self._wrap_callable( method, interface, allow_futures=allow_futures, unwrap_user_excs=False, ) @wraps_by_interface(interface, wrapped_method) def proxy_method(self, *args, **kwargs): instance = self.__dict__[synchronizer_self._original_attr] try: return wrapped_method(instance, *args, **kwargs) except UserCodeException as uc_exc: raise uc_exc.exc from None if interface == Interface.BLOCKING and include_aio_interface and should_have_aio_interface(method): async_proxy_method = synchronizer_self._wrap_proxy_method( method, Interface._ASYNC_WITH_BLOCKING_TYPES, allow_futures ) return MethodWithAio(proxy_method, async_proxy_method, synchronizer_self) return proxy_method
(synchronizer_self, method, interface, allow_futures=True, include_aio_interface=True)
16,174
synchronicity.synchronizer
_wrap_proxy_property
null
def _wrap_proxy_property(self, prop, interface): kwargs = {} for attr in ["fget", "fset", "fdel"]: if getattr(prop, attr): func = getattr(prop, attr) kwargs[attr] = self._wrap_proxy_method( func, interface, allow_futures=False, include_aio_interface=False ) return property(**kwargs)
(self, prop, interface)
16,175
synchronicity.synchronizer
_wrap_proxy_staticmethod
null
def _wrap_proxy_staticmethod(self, method, interface): orig_function = method.__func__ method = self._wrap_callable(orig_function, interface) if isinstance(method, FunctionWithAio): return method # no need to wrap a FunctionWithAio in a staticmethod, as it won't get bound anyways return staticmethod(method)
(self, method, interface)
16,176
synchronicity.synchronizer
_wrap_type_var
null
def _wrap_type_var(self, obj, interface, name, target_module): # TypeVar translation is needed only for type stub generation, in case the # "bound" attribute refers to a translatable type. # Creates a new identical TypeVar, marked with synchronicity's special attributes # This lets type stubs "translate" the `bounds` attribute on emitted type vars # if picked up from module scope and in generics using the base implementation type # TODO(elias): Refactor - since this isn't used for live apps, move type stub generation into genstub new_obj = typing.TypeVar(name, bound=obj.__bound__) # noqa setattr(new_obj, self._original_attr, obj) setattr(new_obj, SYNCHRONIZER_ATTR, self) setattr(new_obj, TARGET_INTERFACE_ATTR, interface) new_obj.__module__ = target_module if not hasattr(obj, self._wrapped_attr): setattr(obj, self._wrapped_attr, {}) getattr(obj, self._wrapped_attr)[interface] = new_obj return new_obj
(self, obj, interface, name, target_module)
16,177
synchronicity.synchronizer
create_async
null
def create_async(self, obj, name: Optional[str] = None, target_module: Optional[str] = None): wrapped = self._wrap(obj, Interface.ASYNC, name, target_module=target_module) return wrapped
(self, obj, name: Optional[str] = None, target_module: Optional[str] = None)
16,178
synchronicity.synchronizer
create_blocking
null
def create_blocking(self, obj, name: Optional[str] = None, target_module: Optional[str] = None): wrapped = self._wrap(obj, Interface.BLOCKING, name, target_module=target_module) return wrapped
(self, obj, name: Optional[str] = None, target_module: Optional[str] = None)
16,179
synchronicity.synchronizer
create_callback
null
def create_callback(self, f, interface): return Callback(self, f, interface)
(self, f, interface)
16,180
synchronicity.synchronizer
is_synchronized
null
def is_synchronized(self, obj): if inspect.isclass(obj) or inspect.isfunction(obj): return hasattr(obj, self._original_attr) else: return hasattr(obj.__class__, self._original_attr)
(self, obj)
16,181
synchronicity.synchronizer
no_input_translation
null
def no_input_translation(self, obj): setattr(obj, self._input_translation_attr, False) return obj
(self, obj)
16,182
synchronicity.synchronizer
no_io_translation
null
def no_io_translation(self, obj): return self.no_input_translation(self.no_output_translation(obj))
(self, obj)
16,183
synchronicity.synchronizer
no_output_translation
null
def no_output_translation(self, obj): setattr(obj, self._output_translation_attr, False) return obj
(self, obj)
16,184
synchronicity.synchronizer
nowrap
null
def nowrap(self, obj): setattr(obj, self._nowrap_attr, True) return obj
(self, obj)
16,192
coola.summarizers.base
BaseSummarizer
Define the base class to implement a summarizer. ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> summarizer Summarizer( (<class 'collections.abc.Mapping'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'collections.abc.Sequence'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'dict'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'list'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'object'>): DefaultFormatter(max_characters=-1) (<class 'set'>): SetFormatter(max_items=5, num_spaces=2) (<class 'tuple'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'numpy.ndarray'>): NDArrayFormatter(show_data=False) (<class 'torch.Tensor'>): TensorFormatter(show_data=False) ) >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ```
class BaseSummarizer(ABC): r"""Define the base class to implement a summarizer. ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> summarizer Summarizer( (<class 'collections.abc.Mapping'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'collections.abc.Sequence'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'dict'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'list'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'object'>): DefaultFormatter(max_characters=-1) (<class 'set'>): SetFormatter(max_items=5, num_spaces=2) (<class 'tuple'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'numpy.ndarray'>): NDArrayFormatter(show_data=False) (<class 'torch.Tensor'>): TensorFormatter(show_data=False) ) >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ``` """ @abstractmethod def summary( self, value: Any, depth: int = 0, max_depth: int = 1, ) -> str: r"""Summarize the input value in a string. Args: value: Specifies the value to summarize. depth: Specifies the current depth. max_depth: Specifies the maximum depth to summarize if the input is nested. Returns: The summary as a string. Example usage: ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ``` """
()
16,193
coola.summarizers.base
summary
Summarize the input value in a string. Args: value: Specifies the value to summarize. depth: Specifies the current depth. max_depth: Specifies the maximum depth to summarize if the input is nested. Returns: The summary as a string. Example usage: ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ```
@abstractmethod def summary( self, value: Any, depth: int = 0, max_depth: int = 1, ) -> str: r"""Summarize the input value in a string. Args: value: Specifies the value to summarize. depth: Specifies the current depth. max_depth: Specifies the maximum depth to summarize if the input is nested. Returns: The summary as a string. Example usage: ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ``` """
(self, value: Any, depth: int = 0, max_depth: int = 1) -> str
16,194
coola.reduction
Reduction
Implement the class that defines the reduction strategy.
class Reduction: r"""Implement the class that defines the reduction strategy.""" reducer = auto_reducer() @classmethod def available_reducers(cls) -> tuple[str, ...]: """Get the available reducers. Returns: The available reducers. Example usage: ```pycon >>> from coola import Reduction >>> Reduction.available_reducers() (...) ``` """ return ReducerRegistry.available_reducers() @classmethod def check_reducer(cls, reducer: str) -> None: r"""Check if the reducer is available. Args: reducer: Specifies the reducer name. Raises: RuntimeError: if the reducer is not available. Example usage: ```pycon >>> from coola import Reduction >>> Reduction.check_reducer("torch") ``` """ if reducer not in (reducers := cls.available_reducers()): msg = f"Incorrect reducer {reducer}. Reducer should be one of {reducers}" raise RuntimeError(msg) @classmethod def initialize(cls, reducer: str) -> None: r"""Initialize the reduction strategy. Args: reducer: Specifies the name of the reducer to use. Example usage: ```pycon >>> from coola import Reduction >>> Reduction.initialize("torch") ``` """ cls.check_reducer(reducer) cls.reducer = ReducerRegistry.registry[reducer]
()
16,195
coola.summarizers.summarizer
Summarizer
Implement the default summarizer. The registry is a class variable, so it is shared with all the instances of this class. Example usage: ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> summarizer Summarizer( (<class 'collections.abc.Mapping'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'collections.abc.Sequence'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'dict'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'list'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'object'>): DefaultFormatter(max_characters=-1) (<class 'set'>): SetFormatter(max_items=5, num_spaces=2) (<class 'tuple'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'numpy.ndarray'>): NDArrayFormatter(show_data=False) (<class 'torch.Tensor'>): TensorFormatter(show_data=False) ) >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ```
class Summarizer(BaseSummarizer): """Implement the default summarizer. The registry is a class variable, so it is shared with all the instances of this class. Example usage: ```pycon >>> from coola import Summarizer >>> summarizer = Summarizer() >>> summarizer Summarizer( (<class 'collections.abc.Mapping'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'collections.abc.Sequence'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'dict'>): MappingFormatter(max_items=5, num_spaces=2) (<class 'list'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'object'>): DefaultFormatter(max_characters=-1) (<class 'set'>): SetFormatter(max_items=5, num_spaces=2) (<class 'tuple'>): SequenceFormatter(max_items=5, num_spaces=2) (<class 'numpy.ndarray'>): NDArrayFormatter(show_data=False) (<class 'torch.Tensor'>): TensorFormatter(show_data=False) ) >>> print(summarizer.summary(1)) <class 'int'> 1 >>> print(summarizer.summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summarizer.summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ``` """ registry: ClassVar[dict[type[object], BaseFormatter]] = { Mapping: MappingFormatter(), Sequence: SequenceFormatter(), dict: MappingFormatter(), list: SequenceFormatter(), object: DefaultFormatter(), set: SetFormatter(), tuple: SequenceFormatter(), } def __repr__(self) -> str: return f"{self.__class__.__qualname__}(\n {str_indent(str_mapping(self.registry))}\n)" def summary( self, value: Any, depth: int = 0, max_depth: int = 1, ) -> str: return self.find_formatter(type(value)).format( summarizer=self, value=value, depth=depth, max_depth=max_depth, ) @classmethod def add_formatter( cls, data_type: type[object], formatter: BaseFormatter, exist_ok: bool = False ) -> None: r"""Add a formatter for a given data type. Args: data_type: Specifies the data type for this test. formatter: Specifies the formatter to use for the specified type. exist_ok: If ``False``, ``RuntimeError`` is raised if the data type already exists. This parameter should be set to ``True`` to overwrite the formatter for a type. Raises: RuntimeError: if a formatter is already registered for the data type and ``exist_ok=False``. Example usage: ```pycon >>> from coola import Summarizer >>> from coola.formatters import MappingFormatter >>> Summarizer.add_formatter(dict, MappingFormatter(), exist_ok=True) ``` """ if data_type in cls.registry and not exist_ok: msg = ( f"A formatter ({cls.registry[data_type]}) is already registered for the data " f"type {data_type}. Please use `exist_ok=True` if you want to overwrite the " "formatter for this type" ) raise RuntimeError(msg) cls.registry[data_type] = formatter @classmethod def has_formatter(cls, data_type: type[object]) -> bool: r"""Indicate if a formatter is registered for the given data type. Args: data_type: Specifies the data type to check. Returns: ``True`` if a formatter is registered, otherwise ``False``. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.has_formatter(list) True >>> Summarizer.has_formatter(str) False ``` """ return data_type in cls.registry @classmethod def find_formatter(cls, data_type: Any) -> BaseFormatter: r"""Find the formatter associated to an object. Args: data_type: Specifies the data type to get. Returns: The formatter associated to the data type. Raises: TypeError: if a formatter cannot be found for this data type. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.find_formatter(list) SequenceFormatter(max_items=5, num_spaces=2) >>> Summarizer.find_formatter(str) DefaultFormatter(max_characters=-1) ``` """ for object_type in data_type.__mro__: formatter = cls.registry.get(object_type, None) if formatter is not None: return formatter msg = f"Incorrect data type: {data_type}" raise TypeError(msg) @classmethod def load_state_dict(cls, state: dict) -> None: r"""Load the state values from a dict. Args: state: A dictionary with state values. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.load_state_dict({object: {"max_characters": 10}}) >>> summarizer = Summarizer() >>> summarizer.registry[object] DefaultFormatter(max_characters=10) >>> Summarizer.load_state_dict({object: {"max_characters": -1}}) >>> summarizer.registry[object] DefaultFormatter(max_characters=-1) ``` """ for data_type, formatter in cls.registry.items(): if (s := state.get(data_type)) is not None: formatter.load_state_dict(s) @classmethod def state_dict(cls) -> dict: r"""Return a dictionary containing state values. Returns: The state values in a dict. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.state_dict() {<class 'collections.abc.Mapping'>: {'max_items': 5, 'num_spaces': 2},...} ``` """ return {data_type: formatter.state_dict() for data_type, formatter in cls.registry.items()} @classmethod def set_max_characters(cls, max_characters: int) -> None: r"""Set the maximum of characters for the compatible formatter to the specified value. To be updated, the formatters need to implement the method ``set_max_characters``. Args: max_characters: Specifies the maximum of characters. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.set_max_characters(10) >>> summarizer = Summarizer() >>> summarizer.registry[object] DefaultFormatter(max_characters=10) >>> Summarizer.set_max_characters(-1) >>> summarizer.registry[object] DefaultFormatter(max_characters=-1) ``` """ for formatter in cls.registry.values(): if hasattr(formatter, "set_max_characters"): formatter.set_max_characters(max_characters) @classmethod def set_max_items(cls, max_items: int) -> None: r"""Set the maximum number of items for the compatible formatter to the specified value. To be updated, the formatters need to implement the method ``set_max_items``. Args: max_items: Specifies the maximum number of items to show. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.set_max_items(10) >>> summarizer = Summarizer() >>> summarizer.registry[dict] MappingFormatter(max_items=10, num_spaces=2) >>> Summarizer.set_max_items(5) >>> summarizer.registry[dict] MappingFormatter(max_items=5, num_spaces=2) ``` """ for formatter in cls.registry.values(): if hasattr(formatter, "set_max_items"): formatter.set_max_items(max_items) @classmethod def set_num_spaces(cls, num_spaces: int) -> None: r"""Set the maximum of items for the compatible formatter to the specified value. To be updated, the formatters need to implement the method ``set_num_spaces``. Args: num_spaces: Specifies the number of spaces for indentation. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.set_num_spaces(4) >>> summarizer = Summarizer() >>> summarizer.registry[dict] MappingFormatter(max_items=5, num_spaces=4) >>> Summarizer.set_num_spaces(2) >>> summarizer.registry[dict] MappingFormatter(max_items=5, num_spaces=2) ``` """ for formatter in cls.registry.values(): if hasattr(formatter, "set_num_spaces"): formatter.set_num_spaces(num_spaces)
()
16,196
coola.summarizers.summarizer
__repr__
null
def __repr__(self) -> str: return f"{self.__class__.__qualname__}(\n {str_indent(str_mapping(self.registry))}\n)"
(self) -> str
16,197
coola.summarizers.summarizer
summary
null
def summary( self, value: Any, depth: int = 0, max_depth: int = 1, ) -> str: return self.find_formatter(type(value)).format( summarizer=self, value=value, depth=depth, max_depth=max_depth, )
(self, value: Any, depth: int = 0, max_depth: int = 1) -> str
16,201
coola.comparison
objects_are_allclose
Indicate if two objects are equal within a tolerance. Args: actual: Specifies the actual input. expected: Specifies the expected input. rtol: Specifies the relative tolerance parameter. atol: Specifies the absolute tolerance parameter. equal_nan: If ``True``, then two ``NaN``s will be considered as equal. show_difference: If ``True``, it shows a difference between the two objects if they are different. This parameter is useful to find the difference between two objects. tester: Specifies an equality tester. If ``None``, ``EqualityTester`` is used. Returns: ``True`` if the two objects are (element-wise) equal within a tolerance, otherwise ``False`` Example usage: ```pycon >>> import torch >>> from coola import objects_are_allclose >>> objects_are_allclose( ... [torch.ones(2, 3), torch.zeros(2)], ... [torch.ones(2, 3), torch.zeros(2)], ... ) True >>> objects_are_allclose( ... [torch.ones(2, 3), torch.ones(2)], ... [torch.ones(2, 3), torch.zeros(2)], ... ) False >>> objects_are_allclose( ... [torch.ones(2, 3) + 1e-7, torch.ones(2)], ... [torch.ones(2, 3), torch.ones(2) - 1e-7], ... rtol=0, ... atol=1e-8, ... ) False ```
def objects_are_allclose( actual: Any, expected: Any, *, rtol: float = 1e-5, atol: float = 1e-8, equal_nan: bool = False, show_difference: bool = False, tester: BaseEqualityTester | None = None, ) -> bool: r"""Indicate if two objects are equal within a tolerance. Args: actual: Specifies the actual input. expected: Specifies the expected input. rtol: Specifies the relative tolerance parameter. atol: Specifies the absolute tolerance parameter. equal_nan: If ``True``, then two ``NaN``s will be considered as equal. show_difference: If ``True``, it shows a difference between the two objects if they are different. This parameter is useful to find the difference between two objects. tester: Specifies an equality tester. If ``None``, ``EqualityTester`` is used. Returns: ``True`` if the two objects are (element-wise) equal within a tolerance, otherwise ``False`` Example usage: ```pycon >>> import torch >>> from coola import objects_are_allclose >>> objects_are_allclose( ... [torch.ones(2, 3), torch.zeros(2)], ... [torch.ones(2, 3), torch.zeros(2)], ... ) True >>> objects_are_allclose( ... [torch.ones(2, 3), torch.ones(2)], ... [torch.ones(2, 3), torch.zeros(2)], ... ) False >>> objects_are_allclose( ... [torch.ones(2, 3) + 1e-7, torch.ones(2)], ... [torch.ones(2, 3), torch.ones(2) - 1e-7], ... rtol=0, ... atol=1e-8, ... ) False ``` """ tester = tester or _tester config = EqualityConfig( tester=tester, show_difference=show_difference, equal_nan=equal_nan, atol=atol, rtol=rtol ) return tester.equal(actual, expected, config)
(actual: 'Any', expected: 'Any', *, rtol: 'float' = 1e-05, atol: 'float' = 1e-08, equal_nan: 'bool' = False, show_difference: 'bool' = False, tester: 'BaseEqualityTester | None' = None) -> 'bool'
16,202
coola.comparison
objects_are_equal
Indicate if two objects are equal or not. Args: actual: Specifies the actual input. expected: Specifies the expected input. equal_nan: If ``True``, then two ``NaN``s will be considered as equal. show_difference: If ``True``, it shows a difference between the two objects if they are different. This parameter is useful to find the difference between two objects. tester: Specifies an equality tester. If ``None``, ``EqualityTester`` is used. Returns: ``True`` if the two nested data are equal, otherwise ``False``. Example usage: ```pycon >>> import torch >>> from coola import objects_are_equal >>> objects_are_equal( ... [torch.ones(2, 3), torch.zeros(2)], ... [torch.ones(2, 3), torch.zeros(2)], ... ) True >>> objects_are_equal([torch.ones(2, 3), torch.ones(2)], [torch.ones(2, 3), torch.zeros(2)]) False ```
def objects_are_equal( actual: Any, expected: Any, *, equal_nan: bool = False, show_difference: bool = False, tester: BaseEqualityTester | None = None, ) -> bool: r"""Indicate if two objects are equal or not. Args: actual: Specifies the actual input. expected: Specifies the expected input. equal_nan: If ``True``, then two ``NaN``s will be considered as equal. show_difference: If ``True``, it shows a difference between the two objects if they are different. This parameter is useful to find the difference between two objects. tester: Specifies an equality tester. If ``None``, ``EqualityTester`` is used. Returns: ``True`` if the two nested data are equal, otherwise ``False``. Example usage: ```pycon >>> import torch >>> from coola import objects_are_equal >>> objects_are_equal( ... [torch.ones(2, 3), torch.zeros(2)], ... [torch.ones(2, 3), torch.zeros(2)], ... ) True >>> objects_are_equal([torch.ones(2, 3), torch.ones(2)], [torch.ones(2, 3), torch.zeros(2)]) False ``` """ tester = tester or _tester config = EqualityConfig(tester=tester, show_difference=show_difference, equal_nan=equal_nan) return tester.equal(actual, expected, config)
(actual: 'Any', expected: 'Any', *, equal_nan: 'bool' = False, show_difference: 'bool' = False, tester: 'BaseEqualityTester | None' = None) -> 'bool'
16,205
coola.summarizers.summarizer
set_summarizer_options
Set the ``Summarizer`` options. Note: It is recommended to use ``summarizer_options`` rather than this function. Args: max_characters: Specifies the maximum number of characters to show. If ``None``, the maximum number of characters is unchanged. max_items: Specifies the maximum number of items to show. If ``None``, the maximum number of items is unchanged. num_spaces: Specifies the number of spaces for indentation. If ``None``, the number of spaces for indentation is unchanged. Example usage: ```pycon >>> from coola import set_summarizer_options, summary >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghijklmnopqrstuvwxyz >>> set_summarizer_options(max_characters=10) >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghij... >>> set_summarizer_options(max_characters=-1) >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghijklmnopqrstuvwxyz ```
def set_summarizer_options( max_characters: int | None = None, max_items: int | None = None, num_spaces: int | None = None ) -> None: r"""Set the ``Summarizer`` options. Note: It is recommended to use ``summarizer_options`` rather than this function. Args: max_characters: Specifies the maximum number of characters to show. If ``None``, the maximum number of characters is unchanged. max_items: Specifies the maximum number of items to show. If ``None``, the maximum number of items is unchanged. num_spaces: Specifies the number of spaces for indentation. If ``None``, the number of spaces for indentation is unchanged. Example usage: ```pycon >>> from coola import set_summarizer_options, summary >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghijklmnopqrstuvwxyz >>> set_summarizer_options(max_characters=10) >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghij... >>> set_summarizer_options(max_characters=-1) >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghijklmnopqrstuvwxyz ``` """ if max_characters is not None: Summarizer.set_max_characters(max_characters) if max_items is not None: Summarizer.set_max_items(max_items) if num_spaces is not None: Summarizer.set_num_spaces(num_spaces)
(max_characters: Optional[int] = None, max_items: Optional[int] = None, num_spaces: Optional[int] = None) -> NoneType
16,207
coola.summarizers.summarizer
summarizer_options
Context manager that temporarily changes the summarizer options. Accepted arguments are same as ``set_summarizer_options``. The context manager temporary change the configuration of ``Summarizer``. This context manager has no effect if ``Summarizer`` is not used. Args: **kwargs: Accepted arguments are same as ``set_summarizer_options``. Example usage: ```pycon >>> from coola import summarizer_options, summary >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghijklmnopqrstuvwxyz >>> with summarizer_options(max_characters=10): ... print(summary("abcdefghijklmnopqrstuvwxyz")) ... <class 'str'> abcdefghij... >>> print(summary("abcdefghijklmnopqrstuvwxyz")) <class 'str'> abcdefghijklmnopqrstuvwxyz ```
@classmethod def set_max_items(cls, max_items: int) -> None: r"""Set the maximum number of items for the compatible formatter to the specified value. To be updated, the formatters need to implement the method ``set_max_items``. Args: max_items: Specifies the maximum number of items to show. Example usage: ```pycon >>> from coola import Summarizer >>> Summarizer.set_max_items(10) >>> summarizer = Summarizer() >>> summarizer.registry[dict] MappingFormatter(max_items=10, num_spaces=2) >>> Summarizer.set_max_items(5) >>> summarizer.registry[dict] MappingFormatter(max_items=5, num_spaces=2) ``` """ for formatter in cls.registry.values(): if hasattr(formatter, "set_max_items"): formatter.set_max_items(max_items)
(**kwargs: Any) -> NoneType
16,209
coola.summarization
summary
Summarize the input value in a string. Args: value: Specifies the value to summarize. max_depth: Specifies the maximum depth to summarize if the input is nested. summarizer: Specifies the summarization strategy. If ``None``, the default ``Summarizer`` is used. Returns: The summary as a string. Example usage: ```pycon >>> from coola import summary >>> print(summary(1)) <class 'int'> 1 >>> print(summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ```
def summary(value: Any, max_depth: int = 1, summarizer: BaseSummarizer | None = None) -> str: r"""Summarize the input value in a string. Args: value: Specifies the value to summarize. max_depth: Specifies the maximum depth to summarize if the input is nested. summarizer: Specifies the summarization strategy. If ``None``, the default ``Summarizer`` is used. Returns: The summary as a string. Example usage: ```pycon >>> from coola import summary >>> print(summary(1)) <class 'int'> 1 >>> print(summary(["abc", "def"])) <class 'list'> (length=2) (0): abc (1): def >>> print(summary([[0, 1, 2], {"key1": "abc", "key2": "def"}])) <class 'list'> (length=2) (0): [0, 1, 2] (1): {'key1': 'abc', 'key2': 'def'} >>> print(summary([[0, 1, 2], {"key1": "abc", "key2": "def"}], max_depth=2)) <class 'list'> (length=2) (0): <class 'list'> (length=3) (0): 0 (1): 1 (2): 2 (1): <class 'dict'> (length=2) (key1): abc (key2): def ``` """ summarizer = summarizer or Summarizer() return summarizer.summary(value=value, depth=0, max_depth=max_depth)
(value: 'Any', max_depth: 'int' = 1, summarizer: 'BaseSummarizer | None' = None) -> 'str'
16,212
exif._constants
ColorSpace
Color space specifier.
class ColorSpace(IntEnum): """Color space specifier.""" SRGB = 1 "sRBG" UNCALIBRATED = 0xFFFF "Uncalibrated or Other"
(value, names=None, *, module=None, qualname=None, type=None, start=1)
16,213
exif._constants
ExposureMode
Exposure mode set when the image was shot.
class ExposureMode(IntEnum): """Exposure mode set when the image was shot.""" AUTO_EXPOSURE = 0 """Auto Exposure""" MANUAL_EXPOSURE = 1 """Manual Exposure""" AUTO_BRACKET = 2 """Auto Bracket"""
(value, names=None, *, module=None, qualname=None, type=None, start=1)
16,214
exif._constants
ExposureProgram
Class of the program used by the camera to set exposure when the picture is taken.
class ExposureProgram(IntEnum): """Class of the program used by the camera to set exposure when the picture is taken.""" NOT_DEFINED = 0 """Not Defined""" MANUAL = 1 """Manual""" NORMAL_PROGRAM = 2 """Normal Program""" APERTURE_PRIORITY = 3 """Aperture Priority""" SHUTTER_PRIORITY = 4 """Shutter Priority""" CREATIVE_PROGRAM = 5 """Creative Program (Biased Toward Depth of Field)""" ACTION_PROGRAM = 6 """Action Program (Biased Toward Fast Shutter Speed)""" PORTRAIT_MODE = 7 """Portrait Mode (For Closeup Photos with the Background out of Focus)""" LANDSCAPE_MODE = 8 """Landscape Kode (For Landscape Photos with the Background in Focus)"""
(value, names=None, *, module=None, qualname=None, type=None, start=1)
16,215
exif._datatypes
Flash
Status of the camera's flash when the image was taken. (Reported by the ``flash`` tag.)
class Flash(BitFields, nbytes=1): # type: ignore """Status of the camera's flash when the image was taken. (Reported by the ``flash`` tag.)""" flash_fired: bool = bitfield(typ=bool, size=1) flash_return: FlashReturn = bitfield(typ=FlashReturn, size=2) flash_mode: FlashMode = bitfield(typ=FlashMode, size=2) flash_function_not_present: bool = bitfield(typ=bool, size=1) red_eye_reduction_supported: bool = bitfield(typ=bool, size=1) reserved: int = bitfield(typ=int, size=1)
(*, flash_fired: bool, flash_return: exif._datatypes.FlashReturn, flash_mode: exif._datatypes.FlashMode, flash_function_not_present: bool, red_eye_reduction_supported: bool, reserved: int) -> None
16,216
plum.bitfields
__abs__
null
def __abs__(self): return self.__value__
(self)
16,217
plum.bitfields
__add__
null
def __add__(self, other): return int.__add__(self.__value__, other)
(self, other)
16,218
plum.bitfields
__and__
null
def __and__(self, other): return int.__and__(self.__value__, other)
(self, other)
16,219
plum.bitfields
__bool__
null
def __bool__(self): return int.__bool__(self.__value__ & type(self).__compare_mask__)
(self)
16,220
plum.bitfields
__divmod__
null
def __divmod__(self, other): return int.__divmod__(self.__value__, other)
(self, other)
16,221
plum.bitfields
__eq__
null
def __eq__(self, other): value, other = self._normalize_for_compare(self.__value__, other) return int.__eq__(value, other)
(self, other)
16,222
plum.bitfields
__float__
null
def __float__(self): return int.__float__(self.__value__)
(self)
16,223
plum.bitfields
__floordiv__
null
def __floordiv__(self, other): return int.__floordiv__(self.__value__, other)
(self, other)
16,224
plum.bitfields
__ge__
null
def __ge__(self, other): value, other = self._normalize_for_compare(self.__value__, other) return int.__ge__(value, other)
(self, other)
16,225
plum.bitfields
__getitem__
null
def __getitem__(self, index): nbits = self.__nbytes__ * 8 mask = 1 bits = [] value = int(self) while nbits: bits.append(bool(value & mask)) mask <<= 1 nbits -= 1 return bits[index]
(self, index)
16,226
plum.bitfields
__gt__
null
def __gt__(self, other): value, other = self._normalize_for_compare(self.__value__, other) return int.__gt__(value, other)
(self, other)
16,227
plum.bitfields
__hash__
null
def __hash__(self): return int.__hash__(self.__value__ & type(self).__compare_mask__)
(self)
16,228
plum.bitfields
__iadd__
null
def __iadd__(self, other): return self.__value__ + other
(self, other)
16,229
plum.bitfields
__iand__
null
def __iand__(self, other): return self.__value__ & other
(self, other)
16,230
plum.bitfields
__ifloordiv__
null
def __ifloordiv__(self, other): return self.__value__ // other
(self, other)
16,231
plum.bitfields
__ilshift__
null
def __ilshift__(self, other): return self.__value__ << other
(self, other)
16,232
plum.bitfields
__imod__
null
def __imod__(self, other): return self.__value__ % other
(self, other)
16,233
plum.bitfields
__imul__
null
def __imul__(self, other): return self.__value__ * other
(self, other)
16,234
plum.bitfields
__index__
null
def __index__(self): return int.__index__(self.__value__)
(self)
16,235
null
__init__
null
from builtins import function
(self, *, flash_fired: bool, flash_return: 'FlashReturn', flash_mode: 'FlashMode', flash_function_not_present: bool, red_eye_reduction_supported: bool, reserved: int) -> None
16,236
plum.bitfields
__int__
null
def __int__(self): return self.__value__
(self)
16,237
plum.bitfields
__invert__
null
def __invert__(self): return ~self.__value__
(self)
16,238
plum.bitfields
__ior__
null
def __ior__(self, other): return self.__value__ | other
(self, other)
16,239
plum.bitfields
__irshift__
null
def __irshift__(self, other): return self.__value__ >> other
(self, other)
16,240
plum.bitfields
__isub__
null
def __isub__(self, other): return self.__value__ - other
(self, other)
16,241
plum.bitfields
__itruediv__
null
def __itruediv__(self, other): return self.__value__ / other
(self, other)
16,242
plum.bitfields
__ixor__
null
def __ixor__(self, other): return self.__value__ ^ other
(self, other)
16,243
plum.bitfields
__le__
null
def __le__(self, other): value, other = self._normalize_for_compare(self.__value__, other) return int.__le__(value, other)
(self, other)
16,244
plum.bitfields
__lshift__
null
def __lshift__(self, other): return int.__lshift__(self.__value__, other)
(self, other)
16,245
plum.bitfields
__lt__
null
def __lt__(self, other): value, other = self._normalize_for_compare(self.__value__, other) return int.__lt__(value, other)
(self, other)
16,246
plum.bitfields
__mod__
null
def __mod__(self, other): return int.__mod__(self.__value__, other)
(self, other)
16,247
plum.bitfields
__mul__
null
def __mul__(self, other): return int.__mul__(self.__value__, other)
(self, other)
16,248
plum.bitfields
__ne__
null
def __ne__(self, other): value, other = self._normalize_for_compare(self.__value__, other) return int.__ne__(value, other)
(self, other)
16,249
plum.bitfields
__neg__
null
def __neg__(self): return -self.__value__
(self)
16,250
plum.bitfields
__or__
null
def __or__(self, other): return int.__or__(self.__value__, other)
(self, other)
16,251
plum.bitfields
__pos__
null
def __pos__(self): return self.__value__
(self)
16,252
plum.bitfields
__pow__
null
def __pow__(self, other, *args): return int.__pow__(self.__value__, other, *args)
(self, other, *args)
16,253
plum.bitfields
__radd__
null
def __radd__(self, other): return int.__radd__(self.__value__, other)
(self, other)
16,254
plum.bitfields
__rand__
null
def __rand__(self, other): return int.__rand__(self.__value__, other)
(self, other)
16,255
plum.bitfields
__rdivmod__
null
def __rdivmod__(self, other): return int.__rdivmod__(self.__value__, other)
(self, other)
16,257
plum.bitfields
__rfloordiv__
null
def __rfloordiv__(self, other): return int.__rfloordiv__(self.__value__, other)
(self, other)
16,258
plum.bitfields
__rlshift__
null
def __rlshift__(self, other): return int.__rlshift__(self.__value__, other)
(self, other)
16,259
plum.bitfields
__rmod__
null
def __rmod__(self, other): return int.__rmod__(self.__value__, other)
(self, other)
16,260
plum.bitfields
__rmul__
null
def __rmul__(self, other): return int.__rmul__(self.__value__, other)
(self, other)
16,261
plum.bitfields
__ror__
null
def __ror__(self, other): return int.__ror__(self.__value__, other)
(self, other)
16,262
plum.bitfields
__round__
null
def __round__(self, *args): return int.__round__(self.__value__, *args)
(self, *args)
16,263
plum.bitfields
__rpow__
null
def __rpow__(self, other, *args): return int.__rpow__(self.__value__, other, *args)
(self, other, *args)
16,264
plum.bitfields
__rrshift__
null
def __rrshift__(self, other): return int.__rrshift__(self.__value__, other)
(self, other)
16,265
plum.bitfields
__rshift__
null
def __rshift__(self, other): return int.__rshift__(self.__value__, other)
(self, other)