Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def load(istream, strict=True):
"Deserialize a patch object."
try:
diff = json.load(istream)
if strict:
jsonschema.validate(diff, SCHEMA)
except ValueError:
raise InvalidPatchError('patch is not valid JSON')
except jsonschema.exceptions.ValidationError as e:
raise InvalidPatchError(e.message)
return diff | [] |
Please provide a description of the function:def save(diff, stream=sys.stdout, compact=False):
"Serialize a patch object."
flags = {'sort_keys': True}
if not compact:
flags['indent'] = 2
json.dump(diff, stream, **flags) | [] |
Please provide a description of the function:def create(from_records, to_records, index_columns, ignore_columns=None):
from_indexed = records.index(from_records, index_columns)
to_indexed = records.index(to_records, index_columns)
if ignore_columns is not None:
from_indexed = records.filter_ignored(from_indexed, ignore_columns)
to_indexed = records.filter_ignored(to_indexed, ignore_columns)
return create_indexed(from_indexed, to_indexed, index_columns) | [
"\n Diff two sets of records, using the index columns as the primary key for\n both datasets.\n "
] |
Please provide a description of the function:def _compare_rows(from_recs, to_recs, keys):
"Return the set of keys which have changed."
return set(
k for k in keys
if sorted(from_recs[k].items()) != sorted(to_recs[k].items())
) | [] |
Please provide a description of the function:def record_diff(lhs, rhs):
"Diff an individual row."
delta = {}
for k in set(lhs).union(rhs):
from_ = lhs[k]
to_ = rhs[k]
if from_ != to_:
delta[k] = {'from': from_, 'to': to_}
return delta | [] |
Please provide a description of the function:def filter_significance(diff, significance):
changed = diff['changed']
# remove individual field changes that are significant
reduced = [{'key': delta['key'],
'fields': {k: v
for k, v in delta['fields'].items()
if _is_significant(v, significance)}}
for delta in changed]
# call a key changed only if it still has significant changes
filtered = [delta for delta in reduced if delta['fields']]
diff = diff.copy()
diff['changed'] = filtered
return diff | [
"\n Prune any changes in the patch which are due to numeric changes less than this level of\n significance.\n "
] |
Please provide a description of the function:def _is_significant(change, significance):
try:
a = float(change['from'])
b = float(change['to'])
except ValueError:
return True
return abs(a - b) > 10 ** (-significance) | [
"\n Return True if a change is genuinely significant given our tolerance.\n "
] |
Please provide a description of the function:def diff_files(from_file, to_file, index_columns, sep=',', ignored_columns=None):
with open(from_file) as from_stream:
with open(to_file) as to_stream:
from_records = records.load(from_stream, sep=sep)
to_records = records.load(to_stream, sep=sep)
return patch.create(from_records, to_records, index_columns,
ignore_columns=ignored_columns) | [
"\n Diff two CSV files, returning the patch which transforms one into the\n other.\n "
] |
Please provide a description of the function:def patch_file(patch_stream: TextIO, fromcsv_stream: TextIO, tocsv_stream: TextIO,
strict: bool = True, sep: str = ','):
diff = patch.load(patch_stream)
from_records = records.load(fromcsv_stream, sep=sep)
to_records = patch.apply(diff, from_records, strict=strict)
# what order should the columns be in?
if to_records:
# have data, use a nice ordering
all_columns = to_records[0].keys()
index_columns = diff['_index']
fieldnames = _nice_fieldnames(all_columns, index_columns)
else:
# no data, use the original order
fieldnames = from_records.fieldnames
records.save(to_records, fieldnames, tocsv_stream) | [
"\n Apply the patch to the source CSV file, and save the result to the target\n file.\n "
] |
Please provide a description of the function:def patch_records(diff, from_records, strict=True):
return patch.apply(diff, from_records, strict=strict) | [
"\n Apply the patch to the sequence of records, returning the transformed\n records.\n "
] |
Please provide a description of the function:def _nice_fieldnames(all_columns, index_columns):
"Indexes on the left, other fields in alphabetical order on the right."
non_index_columns = set(all_columns).difference(index_columns)
return index_columns + sorted(non_index_columns) | [] |
Please provide a description of the function:def csvdiff_cmd(index_columns, from_csv, to_csv, style=None, output=None,
sep=',', quiet=False, ignore_columns=None, significance=None):
if ignore_columns is not None:
for i in ignore_columns:
if i in index_columns:
error.abort("You can't ignore an index column")
ostream = (open(output, 'w') if output
else io.StringIO() if quiet
else sys.stdout)
try:
if style == 'summary':
_diff_and_summarize(from_csv, to_csv, index_columns, ostream,
sep=sep, ignored_columns=ignore_columns,
significance=significance)
else:
compact = (style == 'compact')
_diff_files_to_stream(from_csv, to_csv, index_columns, ostream,
compact=compact, sep=sep, ignored_columns=ignore_columns,
significance=significance)
except records.InvalidKeyError as e:
error.abort(e.args[0])
finally:
ostream.close() | [
"\n Compare two csv files to see what rows differ between them. The files\n are each expected to have a header row, and for each row to be uniquely\n identified by one or more indexing columns.\n "
] |
Please provide a description of the function:def _diff_and_summarize(from_csv, to_csv, index_columns, stream=sys.stdout,
sep=',', ignored_columns=None, significance=None):
from_records = list(records.load(from_csv, sep=sep))
to_records = records.load(to_csv, sep=sep)
diff = patch.create(from_records, to_records, index_columns, ignored_columns)
if significance is not None:
diff = patch.filter_significance(diff, significance)
_summarize_diff(diff, len(from_records), stream=stream)
exit_code = (EXIT_SAME
if patch.is_empty(diff)
else EXIT_DIFFERENT)
sys.exit(exit_code) | [
"\n Print a summary of the difference between the two files.\n "
] |
Please provide a description of the function:def csvpatch_cmd(input_csv, input=None, output=None, strict=True):
patch_stream = (sys.stdin
if input is None
else open(input))
tocsv_stream = (sys.stdout
if output is None
else open(output, 'w'))
fromcsv_stream = open(input_csv)
try:
patch_file(patch_stream, fromcsv_stream, tocsv_stream, strict=strict)
except patch.InvalidPatchError as e:
error.abort('reading patch, {0}'.format(e.args[0]))
finally:
patch_stream.close()
fromcsv_stream.close()
tocsv_stream.close() | [
"\n Apply the changes from a csvdiff patch to an existing CSV file.\n "
] |
Please provide a description of the function:def sort(records: Sequence[Record]) -> List[Record]:
"Sort records into a canonical order, suitable for comparison."
return sorted(records, key=_record_key) | [] |
Please provide a description of the function:def _record_key(record: Record) -> List[Tuple[Column, str]]:
"An orderable representation of this record."
return sorted(record.items()) | [] |
Please provide a description of the function:def _outter_split(inpt, delim, openers, closers=None, opener_lookup=None):
if closers is None:
closers = openers
if opener_lookup is None:
opener_lookup = {}
for i in range(len(openers)):
opener_lookup[openers[i]] = i
stack = []
res = []
splt = 0
for i in range(len(inpt)):
if inpt[i] == delim and len(stack) == 0:
res.append(inpt[splt:i].strip())
splt = i+1
elif inpt[i] in opener_lookup:
stack.append(opener_lookup[inpt[i]])
elif len(stack) > 0 and inpt[i] == closers[stack[-1]]:
stack.pop()
res.append(inpt[splt:].strip())
return res | [
"Splits only at delims that are at outter-most level regarding\n openers/closers pairs.\n Unchecked requirements:\n Only supports length-1 delim, openers and closers.\n delim must not be member of openers or closers.\n len(openers) == len(closers) or closers == None\n "
] |
Please provide a description of the function:def getargspecs(func):
if func is None:
raise TypeError('None is not a Python function')
if hasattr(func, 'ch_func'):
return getargspecs(func.ch_func)
elif hasattr(func, 'ov_func'):
return getargspecs(func.ov_func)
if hasattr(inspect, 'getfullargspec'):
return inspect.getfullargspec(func) # Python 3
else:
return inspect.getargspec(func) | [
"Bridges inspect.getargspec and inspect.getfullargspec.\n Automatically selects the proper one depending of current Python version.\n Automatically bypasses wrappers from typechecked- and override-decorators.\n "
] |
Please provide a description of the function:def get_required_kwonly_args(argspecs):
try:
kwonly = argspecs.kwonlyargs
if argspecs.kwonlydefaults is None:
return kwonly
res = []
for name in kwonly:
if not name in argspecs.kwonlydefaults:
res.append(name)
return res
except AttributeError:
return [] | [
"Determines whether given argspecs implies required keywords-only args\n and returns them as a list. Returns empty list if no such args exist.\n "
] |
Please provide a description of the function:def getargnames(argspecs, with_unbox=False):
# todo: We can maybe make use of inspect.formatargspec
args = argspecs.args
vargs = argspecs.varargs
try:
kw = argspecs.keywords
except AttributeError:
kw = argspecs.varkw
try:
kwonly = argspecs.kwonlyargs
except AttributeError:
kwonly = None
res = []
if not args is None:
res.extend(args)
if not vargs is None:
res.append('*'+vargs if with_unbox else vargs)
if not kwonly is None:
res.extend(kwonly)
if not kw is None:
res.append('**'+kw if with_unbox else kw)
return res | [
"Resembles list of arg-names as would be seen in a function signature, including\n var-args, var-keywords and keyword-only args.\n "
] |
Please provide a description of the function:def fromargskw(argskw, argspecs, slf_or_clsm = False):
res_args = argskw
try:
kwds = argspecs.keywords
except AttributeError:
kwds = argspecs.varkw
if not kwds is None:
res_kw = argskw[-1]
res_args = argskw[:-1]
else:
res_kw = None
if not argspecs.varargs is None:
vargs_pos = (len(argspecs.args)-1) \
if slf_or_clsm else len(argspecs.args)
if vargs_pos > 0:
res_lst = list(argskw[:vargs_pos])
res_lst.extend(argskw[vargs_pos])
res_args = tuple(res_lst)
else:
res_args = argskw[0]
try:
if len(argspecs.kwonlyargs) > 0:
res_kw = {} if res_kw is None else dict(res_kw)
ipos = -len(argspecs.kwonlyargs) - (0 if kwds is None else 1)
for name in argspecs.kwonlyargs:
res_kw[name] = argskw[ipos]
ipos += 1
except AttributeError:
pass
if res_kw is None:
res_kw = {}
return res_args, res_kw | [
"Turns a linearized list of args into (args, keywords) form\n according to given argspecs (like inspect module provides).\n "
] |
Please provide a description of the function:def get_staticmethod_qualname(staticmeth):
func = _actualfunc(staticmeth)
module = sys.modules[func.__module__]
nst = _get_class_nesting_list_for_staticmethod(staticmeth, module, [], set())
nst = [cl.__name__ for cl in nst]
return '.'.join(nst)+'.'+func.__name__ | [
"Determines the fully qualified name of a static method.\n Yields a result similar to what __qualname__ would contain, but is applicable\n to static methods and also works in Python 2.7.\n "
] |
Please provide a description of the function:def get_class_qualname(cls):
if hasattr(cls, '__qualname__'):
return cls.__qualname__
module = sys.modules[cls.__module__]
if cls.__module__ == 'typing' and not hasattr(cls, '__name__'):
# Python 3.7
return cls._name
if hasattr(module, cls.__name__) and getattr(module, cls.__name__) is cls:
return cls.__name__
else:
nst = _get_class_nesting_list(cls, module)
nst.append(cls)
nst = [cl.__name__ for cl in nst]
return '.'.join(nst)
return cls.__name__ | [
"Determines the fully qualified name of a class.\n Yields a result similar to what __qualname__ contains, but also works on\n Python 2.7.\n "
] |
Please provide a description of the function:def search_class_module(cls, deep_search=True):
for md_name in sys.modules:
module = sys.modules[md_name]
if hasattr(module, cls.__name__) and getattr(module, cls.__name__) is cls:
return module
if deep_search:
for md_name in sys.modules:
module = sys.modules[md_name]
try:
nst = _get_class_nesting_list(cls, module)
if cls is nst[-1]:
return module
except:
pass
return None | [
"E.g. if cls is a TypeVar, cls.__module__ won't contain the actual module\n that declares cls. This returns the actual module declaring cls.\n Can be used with any class (not only TypeVar), though usually cls.__module__\n is the recommended way.\n If deep_search is True (default) this even finds the correct module if it\n declares cls as an inner class of another class.\n "
] |
Please provide a description of the function:def get_class_that_defined_method(meth):
if is_classmethod(meth):
return meth.__self__
if hasattr(meth, 'im_class'):
return meth.im_class
elif hasattr(meth, '__qualname__'):
# Python 3
try:
cls_names = meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0].split('.')
cls = inspect.getmodule(meth)
for cls_name in cls_names:
cls = getattr(cls, cls_name)
if isinstance(cls, type):
return cls
except AttributeError:
# If this was called from a decorator and meth is not a method, this
# can result in AttributeError, because at decorator-time meth has not
# yet been added to module. If it's really a method, its class would be
# already in, so no problem in that case.
pass
raise ValueError(str(meth)+' is not a method.') | [
"Determines the class owning the given method.\n "
] |
Please provide a description of the function:def is_method(func):
func0 = _actualfunc(func)
argNames = getargnames(getargspecs(func0))
if len(argNames) > 0:
if argNames[0] == 'self':
if inspect.ismethod(func):
return True
elif sys.version_info.major >= 3:
# In Python3 there are no unbound methods, so we count as method,
# if first arg is called 'self'
return True
else:
_warn_argname('is_method encountered non-method declaring self',
func0, False, False, None)
else:
return inspect.ismethod(func)
return False | [
"Detects if the given callable is a method. In context of pytypes this\n function is more reliable than plain inspect.ismethod, e.g. it automatically\n bypasses wrappers from typechecked and override decorators.\n "
] |
Please provide a description of the function:def is_classmethod(meth):
if inspect.ismethoddescriptor(meth):
return isinstance(meth, classmethod)
if not inspect.ismethod(meth):
return False
if not inspect.isclass(meth.__self__):
return False
if not hasattr(meth.__self__, meth.__name__):
return False
return meth == getattr(meth.__self__, meth.__name__) | [
"Detects if the given callable is a classmethod.\n "
] |
Please provide a description of the function:def get_current_args(caller_level = 0, func = None, argNames = None):
if argNames is None:
argNames = getargnames(getargspecs(func))
if func is None:
func = get_current_function(caller_level+1)
if isinstance(func, property):
func = func.fget if func.fset is None else func.fset
stck = inspect.stack()
lcs = stck[1+caller_level][0].f_locals
return tuple([lcs[t] for t in argNames]) | [
"Determines the args of current function call.\n Use caller_level > 0 to get args of even earlier function calls in current stack.\n "
] |
Please provide a description of the function:def getmodule(code):
try:
md = inspect.getmodule(code, code.co_filename)
except AttributeError:
return inspect.getmodule(code)
if md is None:
# Jython-specific:
# This is currently just a crutch; todo: resolve __pyclasspath__ properly!
cfname = code.co_filename.replace('__pyclasspath__',
os.path.realpath('')+os.sep+'__pyclasspath__')
cfname = cfname.replace('$py.class', '.py')
md = inspect.getmodule(code, cfname)
if md is None:
md = inspect.getmodule(code)
return md | [
"More robust variant of inspect.getmodule.\n E.g. has less issues on Jython.\n "
] |
Please provide a description of the function:def get_callable_fq_for_code(code, locals_dict = None):
if code in _code_callable_dict:
res = _code_callable_dict[code]
if not res[0] is None or locals_dict is None:
return res
md = getmodule(code)
if not md is None:
nesting = []
res, slf = _get_callable_fq_for_code(code, md, md, False, nesting, set())
if res is None and not locals_dict is None:
nesting = []
res, slf = _get_callable_from_locals(code, locals_dict, md, False, nesting)
else:
_code_callable_dict[code] = (res, nesting, slf)
return res, nesting, slf
else:
return None, None, None | [
"Determines the function belonging to a given code object in a fully qualified fashion.\n Returns a tuple consisting of\n - the callable\n - a list of classes and inner classes, locating the callable (like a fully qualified name)\n - a boolean indicating whether the callable is a method\n "
] |
Please provide a description of the function:def _calc_traceback_limit(tb):
limit = 1
tb2 = tb
while not tb2.tb_next is None:
try:
maybe_pytypes = tb2.tb_next.tb_frame.f_code.co_filename.split(os.sep)[-2]
except IndexError:
maybe_pytypes = None
if maybe_pytypes == 'pytypes' and not \
tb2.tb_next.tb_frame.f_code == pytypes.typechecker._pytypes___import__.__code__:
break
else:
limit += 1
tb2 = tb2.tb_next
return limit | [
"Calculates limit-parameter to strip away pytypes' internals when used\n with API from traceback module.\n "
] |
Please provide a description of the function:def _pytypes_excepthook(exctype, value, tb):
if pytypes.clean_traceback and issubclass(exctype, TypeError):
traceback.print_exception(exctype, value, tb, _calc_traceback_limit(tb))
else:
if _sys_excepthook is None:
sys.__excepthook__(exctype, value, tb)
else:
_sys_excepthook(exctype, value, tb) | [
"\"An excepthook suitable for use as sys.excepthook, that strips away\n the part of the traceback belonging to pytypes' internals.\n Can be switched on and off via pytypes.clean_traceback\n or pytypes.set_clean_traceback.\n The latter automatically installs this hook in sys.excepthook.\n "
] |
Please provide a description of the function:def get_generator_type(genr):
if genr in _checked_generator_types:
return _checked_generator_types[genr]
if not genr.gi_frame is None and 'gen_type' in genr.gi_frame.f_locals:
return genr.gi_frame.f_locals['gen_type']
else:
cllble, nesting, slf = util.get_callable_fq_for_code(genr.gi_code)
if cllble is None:
return Generator
return _funcsigtypes(cllble, slf, nesting[-1] if slf else None,
genr.gi_frame.f_globals if not genr.gi_frame is None else None)[1] | [
"Obtains PEP 484 style type of a generator object, i.e. returns a\n typing.Generator object.\n "
] |
Please provide a description of the function:def get_iterable_itemtype(obj):
# support further specific iterables on demand
try:
if isinstance(obj, range):
tpl = tuple(deep_type(obj.start), deep_type(obj.stop), deep_type(obj.step))
return Union[tpl]
except TypeError:
# We're running Python 2
pass
if type(obj) is tuple:
tpl = tuple(deep_type(t) for t in obj)
return Union[tpl]
elif type(obj) is types.GeneratorType:
return get_generator_yield_type(obj)
else:
tp = deep_type(obj)
if is_Generic(tp):
if issubclass(tp.__origin__, typing.Iterable):
if len(tp.__args__) == 1:
return tp.__args__[0]
return _select_Generic_superclass_parameters(tp, typing.Iterable)[0]
if is_iterable(obj):
if type(obj) is str:
return str
if hasattr(obj, '__iter__'):
if has_type_hints(obj.__iter__):
itrator = _funcsigtypes(obj.__iter__, True, obj.__class__)[1]
if is_Generic(itrator) and itrator.__origin__ is typing.Iterator:
return itrator.__args__[0]
if hasattr(obj, '__getitem__'):
if has_type_hints(obj.__getitem__):
itrator = _funcsigtypes(obj.__getitem__, True, obj.__class__)[1]
if is_Generic(itrator) and itrator.__origin__ is typing.Iterator:
return itrator.__args__[0]
return None # means that type is unknown
else:
raise TypeError('Not an iterable: '+str(type(obj))) | [
"Attempts to get an iterable's itemtype without iterating over it,\n not even partly. Note that iterating over an iterable might modify\n its inner state, e.g. if it is an iterator.\n Note that obj is expected to be an iterable, not a typing.Iterable.\n This function leverages various alternative ways to obtain that\n info, e.g. by looking for type annotations of '__iter__' or '__getitem__'.\n It is intended for (unknown) iterables, where the type cannot be obtained\n via sampling without the risk of modifying inner state.\n "
] |
Please provide a description of the function:def get_Generic_itemtype(sq, simplify=True):
if is_Tuple(sq):
if simplify:
itm_tps = [x for x in get_Tuple_params(sq)]
simplify_for_Union(itm_tps)
return Union[tuple(itm_tps)]
else:
return Union[get_Tuple_params(sq)]
else:
try:
res = _select_Generic_superclass_parameters(sq, typing.Container)
except TypeError:
res = None
if res is None:
try:
res = _select_Generic_superclass_parameters(sq, typing.Iterable)
except TypeError:
pass
if res is None:
raise TypeError("Has no itemtype: "+type_str(sq))
else:
return res[0] | [
"Retrieves the item type from a PEP 484 generic or subclass of such.\n sq must be a typing.Tuple or (subclass of) typing.Iterable or typing.Container.\n Consequently this also works with typing.List, typing.Set and typing.Dict.\n Note that for typing.Dict and mapping types in general, the key type is regarded as item type.\n For typing.Tuple all contained types are returned as a typing.Union.\n If simplify == True some effort is taken to eliminate redundancies in such a union.\n "
] |
Please provide a description of the function:def get_Mapping_key_value(mp):
try:
res = _select_Generic_superclass_parameters(mp, typing.Mapping)
except TypeError:
res = None
if res is None:
raise TypeError("Has no key/value types: "+type_str(mp))
else:
return tuple(res) | [
"Retrieves the key and value types from a PEP 484 mapping or subclass of such.\n mp must be a (subclass of) typing.Mapping.\n "
] |
Please provide a description of the function:def get_Generic_parameters(tp, generic_supertype):
try:
res = _select_Generic_superclass_parameters(tp, generic_supertype)
except TypeError:
res = None
if res is None:
raise TypeError("%s has no proper parameters defined by %s."%
(type_str(tp), type_str(generic_supertype)))
else:
return tuple(res) | [
"tp must be a subclass of generic_supertype.\n Retrieves the type values from tp that correspond to parameters\n defined by generic_supertype.\n\n E.g. get_Generic_parameters(tp, typing.Mapping) is equivalent\n to get_Mapping_key_value(tp) except for the error message.\n\n Note that get_Generic_itemtype(tp) is not exactly equal to\n get_Generic_parameters(tp, typing.Container), as that method\n additionally contains treatment for typing.Tuple and typing.Iterable.\n "
] |
Please provide a description of the function:def get_Tuple_params(tpl):
try:
return tpl.__tuple_params__
except AttributeError:
try:
if tpl.__args__ is None:
return None
# Python 3.6
if tpl.__args__[0] == ():
return ()
else:
if tpl.__args__[-1] is Ellipsis:
return tpl.__args__[:-1] if len(tpl.__args__) > 1 else None
else:
return tpl.__args__
except AttributeError:
return None | [
"Python version independent function to obtain the parameters\n of a typing.Tuple object.\n Omits the ellipsis argument if present. Use is_Tuple_ellipsis for that.\n Tested with CPython 2.7, 3.5, 3.6 and Jython 2.7.1.\n "
] |
Please provide a description of the function:def is_Tuple_ellipsis(tpl):
try:
return tpl.__tuple_use_ellipsis__
except AttributeError:
try:
if tpl.__args__ is None:
return False
# Python 3.6
if tpl.__args__[-1] is Ellipsis:
return True
except AttributeError:
pass
return False | [
"Python version independent function to check if a typing.Tuple object\n contains an ellipsis."
] |
Please provide a description of the function:def get_Callable_args_res(clb):
try:
return clb.__args__, clb.__result__
except AttributeError:
# Python 3.6
return clb.__args__[:-1], clb.__args__[-1] | [
"Python version independent function to obtain the parameters\n of a typing.Callable object. Returns as tuple: args, result.\n Tested with CPython 2.7, 3.5, 3.6 and Jython 2.7.1.\n "
] |
Please provide a description of the function:def is_Type(tp):
if isinstance(tp, type):
return True
try:
typing._type_check(tp, '')
return True
except TypeError:
return False | [
"Python version independent check if an object is a type.\n For Python 3.7 onwards(?) this is not equivalent to\n ``isinstance(tp, type)`` any more, as that call would return\n ``False`` for PEP 484 types.\n Tested with CPython 2.7, 3.5, 3.6, 3.7 and Jython 2.7.1.\n "
] |
Please provide a description of the function:def is_Union(tp):
if tp is Union:
return True
try:
# Python 3.6
return tp.__origin__ is Union
except AttributeError:
try:
return isinstance(tp, typing.UnionMeta)
except AttributeError:
return False | [
"Python version independent check if a type is typing.Union.\n Tested with CPython 2.7, 3.5, 3.6 and Jython 2.7.1.\n "
] |
Please provide a description of the function:def deep_type(obj, depth = None, max_sample = None, get_type = None):
return _deep_type(obj, [], 0, depth, max_sample, get_type) | [
"Tries to construct a type for a given value. In contrast to type(...),\n deep_type does its best to fit structured types from typing as close as\n possible to the given value.\n E.g. deep_type((1, 2, 'a')) will return Tuple[int, int, str] rather than\n just tuple.\n Supports various types from typing, but not yet all.\n Also detects nesting up to given depth (uses pytypes.default_typecheck_depth\n if no value is given).\n If a value for max_sample is given, this number of elements is probed\n from lists, sets and dictionaries to determine the element type. By default,\n all elements are probed. If there are fewer elements than max_sample, all\n existing elements are probed.\n Optionally, a custom get_type function can be provided to further\n customize how types are resolved. By default it uses type function.\n "
] |
Please provide a description of the function:def _deep_type(obj, checked, checked_len, depth = None, max_sample = None, get_type = None):
if depth is None:
depth = pytypes.default_typecheck_depth
if max_sample is None:
max_sample = pytypes.deep_type_samplesize
if -1 != max_sample < 2:
max_sample = 2
if get_type is None:
get_type = type
try:
res = obj.__orig_class__
except AttributeError:
res = get_type(obj)
if depth == 0 or util._is_in(obj, checked[:checked_len]):
return res
elif not util._is_in(obj, checked[checked_len:]):
checked.append(obj)
# We must operate with a consistent checked list for one certain depth level
# to avoid issues with a list, tuple, dict, etc containing the same element
# multiple times. This could otherwise be misconcepted as a recursion.
# Using a fake len checked_len2 ensures this. Each depth level operates with
# a common fake length of checked list:
checked_len2 = len(checked)
if res == tuple:
res = Tuple[tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) for t in obj)]
elif res == list:
if len(obj) == 0:
return Empty[List]
if max_sample == -1 or max_sample >= len(obj)-1 or len(obj) <= 2:
tpl = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) for t in obj)
else:
# In case of lists I somehow feel it's better to ensure that
# first and last element are part of the sample
sample = [0, len(obj)-1]
try:
rsmp = random.sample(xrange(1, len(obj)-1), max_sample-2)
except NameError:
rsmp = random.sample(range(1, len(obj)-1), max_sample-2)
sample.extend(rsmp)
tpl = tuple(_deep_type(obj[t], checked, checked_len2, depth-1, None, get_type) for t in sample)
res = List[Union[tpl]]
elif res == dict:
if len(obj) == 0:
return Empty[Dict]
if max_sample == -1 or max_sample >= len(obj)-1 or len(obj) <= 2:
try:
# We prefer a view (avoid copy)
tpl1 = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) \
for t in obj.viewkeys())
tpl2 = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) \
for t in obj.viewvalues())
except AttributeError:
# Python 3 gives views like this:
tpl1 = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) for t in obj.keys())
tpl2 = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) for t in obj.values())
else:
try:
kitr = iter(obj.viewkeys())
vitr = iter(obj.viewvalues())
except AttributeError:
kitr = iter(obj.keys())
vitr = iter(obj.values())
ksmpl = []
vsmpl = []
block = (len(obj) // max_sample)-1
# I know this method has some bias towards beginning of iteration
# sequence, but it's still more random than just taking the
# initial sample and better than O(n) random.sample.
while len(ksmpl) < max_sample:
if block > 0:
j = random.randint(0, block)
k = random.randint(0, block)
while j > 0:
next(vitr) # discard
j -= 1
while k > 0:
next(kitr) # discard
k -= 1
ksmpl.append(next(kitr))
vsmpl.append(next(vitr))
tpl1 = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) for t in ksmpl)
tpl2 = tuple(_deep_type(t, checked, checked_len2, depth-1, None, get_type) for t in vsmpl)
res = Dict[Union[tpl1], Union[tpl2]]
elif res == set or res == frozenset:
if res == set:
typ = Set
else:
typ = FrozenSet
if len(obj) == 0:
return Empty[typ]
if max_sample == -1 or max_sample >= len(obj)-1 or len(obj) <= 2:
tpl = tuple(_deep_type(t, checked, depth-1, None, None, get_type) for t in obj)
else:
itr = iter(obj)
smpl = []
block = (len(obj) // max_sample)-1
# I know this method has some bias towards beginning of iteration
# sequence, but it's still more random than just taking the
# initial sample and better than O(n) random.sample.
while len(smpl) < max_sample:
if block > 0:
j = random.randint(0, block)
while j > 0:
next(itr) # discard
j -= 1
smpl.append(next(itr))
tpl = tuple(_deep_type(t, checked, depth-1, None, None, get_type) for t in smpl)
res = typ[Union[tpl]]
elif res == types.GeneratorType:
res = get_generator_type(obj)
elif sys.version_info.major == 2 and isinstance(obj, types.InstanceType):
# For old-style instances return the actual class:
return obj.__class__
elif _has_base(res, Container) and len(obj) == 0:
return Empty[res]
elif hasattr(res, '__origin__') and _has_base(res.__origin__, Container) and len(obj) == 0:
return Empty[res.__origin__]
return res | [
"checked_len allows to operate with a fake length for checked.\n This is necessary to ensure that each depth level operates based\n on the same checked list subset. Otherwise our recursion detection\n mechanism can fall into false-positives.\n "
] |
Please provide a description of the function:def is_builtin_type(tp):
return hasattr(__builtins__, tp.__name__) and tp is getattr(__builtins__, tp.__name__) | [
"Checks if the given type is a builtin one.\n "
] |
Please provide a description of the function:def _tp_relfq_name(tp, tp_name=None, assumed_globals=None, update_assumed_globals=None,
implicit_globals=None):
# _type: (type, Optional[Union[Set[Union[type, types.ModuleType]], Mapping[Union[type, types.ModuleType], str]]], Optional[bool]) -> str
if tp_name is None:
tp_name = util.get_class_qualname(tp)
if implicit_globals is None:
implicit_globals = _implicit_globals
else:
implicit_globals = implicit_globals.copy()
implicit_globals.update(_implicit_globals)
if assumed_globals is None:
if update_assumed_globals is None:
return tp_name
md = sys.modules[tp.__module__]
if md in implicit_globals:
return tp_name
name = tp.__module__+'.'+tp_name
pck = None
if not (md.__package__ is None or md.__package__ == ''
or name.startswith(md.__package__)):
pck = md.__package__
return name if pck is None else pck+'.'+name
if tp in assumed_globals:
try:
return assumed_globals[tp]
except:
return tp_name
elif hasattr(tp, '__origin__') and tp.__origin__ in assumed_globals:
try:
return assumed_globals[tp.__origin__]
except:
return tp_name
# For some reason Callable does not have __origin__, so we special-case
# it here. Todo: Find a cleaner solution.
elif is_Callable(tp) and typing.Callable in assumed_globals:
try:
return assumed_globals[typing.Callable]
except:
return tp_name
elif update_assumed_globals == True:
if not assumed_globals is None:
if hasattr(tp, '__origin__') and not tp.__origin__ is None:
toadd = tp.__origin__
elif is_Callable(tp):
toadd = typing.Callable
else:
toadd = tp
if not sys.modules[toadd.__module__] in implicit_globals:
assumed_globals.add(toadd)
return tp_name
else:
md = sys.modules[tp.__module__]
if md in implicit_globals:
return tp_name
md_name = tp.__module__
if md in assumed_globals:
try:
md_name = assumed_globals[md]
except:
pass
else:
if not (md.__package__ is None or md.__package__ == ''
or md_name.startswith(md.__package__)):
md_name = md.__package__+'.'+tp.__module__
return md_name+'.'+tp_name | [
"Provides the fully qualified name of a type relative to a set of\n modules and types that is assumed as globally available.\n If assumed_globals is None this always returns the fully qualified name.\n If update_assumed_globals is True, this will return the plain type name,\n but will add the type to assumed_globals (expected to be a set).\n This way a caller can query how to generate an appropriate import section.\n If update_assumed_globals is False, assumed_globals can alternatively be\n a mapping rather than a set. In that case the mapping is expected to be\n an alias table, mapping modules or types to their alias names desired for\n displaying.\n update_assumed_globals can be None (default). In that case this will return the\n plain type name if assumed_globals is None as well (default).\n This mode is there to have a less involved default behavior.\n "
] |
Please provide a description of the function:def type_str(tp, assumed_globals=None, update_assumed_globals=None,
implicit_globals=None, bound_Generic=None, bound_typevars=None):
if assumed_globals is None and update_assumed_globals is None:
if implicit_globals is None:
implicit_globals = set()
else:
implicit_globals = implicit_globals.copy()
implicit_globals.add(sys.modules['typing'])
implicit_globals.add(sys.modules['__main__'])
if isinstance(tp, tuple):
return '('+', '.join([type_str(tp0, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars) for tp0 in tp])+')'
try:
return type_str(tp.__orig_class__, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars)
except AttributeError:
pass
tp = _match_stub_type(tp)
if isinstance(tp, TypeVar):
prm = None
if not bound_typevars is None:
try:
prm = bound_typevars[tp]
except:
pass
if prm is None and not bound_typevars is None and tp in bound_typevars:
prm = bound_typevars[tp]
if prm is None and not bound_Generic is None:
prm = get_arg_for_TypeVar(tp, bound_Generic)
if not prm is None:
return type_str(prm, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars)
return tp.__name__
elif isinstance(tp, ForwardRef):
return "'%s'" % tp.__forward_arg__
elif isclass(tp) and not is_Generic(tp) \
and not hasattr(typing, tp.__name__):
tp_name = _tp_relfq_name(tp, None, assumed_globals, update_assumed_globals,
implicit_globals)
prm = ''
if hasattr(tp, '__args__') and not tp.__args__ is None:
params = [type_str(param, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars) for param in tp.__args__]
prm = '[%s]'%', '.join(params)
return tp_name+prm
elif is_Union(tp):
prms = get_Union_params(tp)
params = [type_str(param, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars) for param in prms]
# See: https://github.com/Stewori/pytypes/issues/44
if pytypes.canonical_type_str:
params = sorted(params)
return '%s[%s]'%(_tp_relfq_name(Union, 'Union', assumed_globals,
update_assumed_globals, implicit_globals), ', '.join(params))
elif is_Tuple(tp):
prms = get_Tuple_params(tp)
tpl_params = [type_str(param, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars) for param in prms]
return '%s[%s]'%(_tp_relfq_name(Tuple, 'Tuple', assumed_globals,
update_assumed_globals, implicit_globals), ', '.join(tpl_params))
elif hasattr(tp, '__args__'):
tp_name = _tp_relfq_name(tp, None, assumed_globals, update_assumed_globals,
implicit_globals)
if tp.__args__ is None:
if hasattr(tp, '__parameters__') and \
hasattr(tp, '__origin__') and tp.__origin__ is Generic and \
not tp.__parameters__ is None and len(tp.__parameters__) > 0:
args = tp.__parameters__
else:
return tp_name
else:
args = tp.__args__
params = [type_str(param, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars) for param in args]
if hasattr(tp, '__result__'):
return '%s[[%s], %s]'%(tp_name, ', '.join(params),
type_str(tp.__result__, assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars))
elif is_Callable(tp):
return '%s[[%s], %s]'%(tp_name, ', '.join(params[:-1]),
type_str(params[-1], assumed_globals, update_assumed_globals,
implicit_globals, bound_Generic, bound_typevars))
else:
return '%s[%s]'%(tp_name, ', '.join(params))
elif hasattr(tp, '__name__'):
result = _tp_relfq_name(tp, None, assumed_globals, update_assumed_globals,
implicit_globals)
elif tp is Any:
# In Python 3.6 Any does not have __name__.
result = _tp_relfq_name(tp, 'Any', assumed_globals, update_assumed_globals,
implicit_globals)
else:
# Todo: Care for other special types from typing where necessary.
result = str(tp)
if not implicit_globals is None:
for s in implicit_globals:
result = result.replace(s.__name__+'.', '')
return result | [
"Generates a nicely readable string representation of the given type.\n The returned representation is workable as a source code string and would\n reconstruct the given type if handed to eval, provided that globals/locals\n are configured appropriately (e.g. assumes that various types from typing\n have been imported).\n Used as type-formatting backend of ptypes' code generator abilities\n in modules typelogger and stubfile_2_converter.\n\n If tp contains unbound TypeVars and bound_Generic is provided, this\n function attempts to retrieve corresponding values for the unbound TypeVars\n from bound_Generic.\n\n For semantics of assumed_globals and update_assumed_globals see\n _tp_relfq_name. Its doc applies to every argument or result contained in\n tp (recursively) and to tp itself.\n "
] |
Please provide a description of the function:def get_types(func):
return _get_types(func, util.is_classmethod(func), util.is_method(func)) | [
"Works like get_type_hints, but returns types as a sequence rather than a\n dictionary. Types are returned in declaration order of the corresponding arguments.\n "
] |
Please provide a description of the function:def get_member_types(obj, member_name, prop_getter = False):
cls = obj.__class__
member = getattr(cls, member_name)
slf = not (isinstance(member, staticmethod) or isinstance(member, classmethod))
clsm = isinstance(member, classmethod)
return _get_types(member, clsm, slf, cls, prop_getter) | [
"Still experimental, incomplete and hardly tested.\n Works like get_types, but is also applicable to descriptors.\n "
] |
Please provide a description of the function:def _get_types(func, clsm, slf, clss = None, prop_getter = False,
unspecified_type = Any, infer_defaults = None):
func0 = util._actualfunc(func, prop_getter)
# check consistency regarding special case with 'self'-keyword
if not slf:
argNames = util.getargnames(util.getargspecs(func0))
if len(argNames) > 0:
if clsm:
if argNames[0] != 'cls':
util._warn_argname('classmethod using non-idiomatic cls argname',
func0, slf, clsm, clss)
if clss is None and (slf or clsm):
if slf:
assert util.is_method(func) or isinstance(func, property)
if clsm:
assert util.is_classmethod(func)
clss = util.get_class_that_defined_method(func)
assert hasattr(clss, func.__name__)
args, res = _funcsigtypes(func, slf or clsm, clss, None, prop_getter,
unspecified_type = unspecified_type, infer_defaults = infer_defaults)
return _match_stub_type(args), _match_stub_type(res) | [
"Helper for get_types and get_member_types.\n "
] |
Please provide a description of the function:def _get_type_hints(func, args = None, res = None, infer_defaults = None):
if args is None or res is None:
args2, res2 = _get_types(func, util.is_classmethod(func),
util.is_method(func), unspecified_type = type(NotImplemented),
infer_defaults = infer_defaults)
if args is None:
args = args2
if res is None:
res = res2
slf = 1 if util.is_method(func) else 0
argNames = util.getargnames(util.getargspecs(util._actualfunc(func)))
result = {}
if not args is Any:
prms = get_Tuple_params(args)
for i in range(slf, len(argNames)):
if not prms[i-slf] is type(NotImplemented):
result[argNames[i]] = prms[i-slf]
result['return'] = res
return result | [
"Helper for get_type_hints.\n "
] |
Please provide a description of the function:def resolve_fw_decl(in_type, module_name=None, globs=None, level=0,
search_stack_depth=2):
'''Resolves forward references in ``in_type``, see
https://www.python.org/dev/peps/pep-0484/#forward-references.
Note:
``globs`` should be a dictionary containing values for the names
that must be resolved in ``in_type``. If ``globs`` is not provided, it
will be created by ``__globals__`` from the module named ``module_name``,
plus ``__locals__`` from the last ``search_stack_depth`` stack frames (Default: 2),
beginning at the calling function. This is to resolve cases where ``in_type`` and/or
types it fw-references are defined inside a function.
To prevent walking the stack, set ``search_stack_depth=0``.
Ideally provide a proper ``globs`` for best efficiency.
See ``util.get_function_perspective_globals`` for obtaining a ``globs`` that can be
cached. ``util.get_function_perspective_globals`` works like described above.
'''
# Also see discussion at https://github.com/Stewori/pytypes/pull/43
if in_type in _fw_resolve_cache:
return _fw_resolve_cache[in_type], True
if globs is None:
#if not module_name is None:
globs = util.get_function_perspective_globals(module_name, level+1,
level+1+search_stack_depth)
if isinstance(in_type, _basestring):
# For the case that a pure forward ref is given as string
out_type = eval(in_type, globs)
_fw_resolve_cache[in_type] = out_type
return out_type, True
elif isinstance(in_type, ForwardRef):
# Todo: Mabe somehow get globs from in_type.__forward_code__
if not in_type.__forward_evaluated__:
in_type.__forward_value__ = eval(in_type.__forward_arg__, globs)
in_type.__forward_evaluated__ = True
return in_type, True
elif is_Tuple(in_type):
return in_type, any([resolve_fw_decl(in_tp, None, globs)[1] \
for in_tp in get_Tuple_params(in_type)])
elif is_Union(in_type):
return in_type, any([resolve_fw_decl(in_tp, None, globs)[1] \
for in_tp in get_Union_params(in_type)])
elif is_Callable(in_type):
args, res = get_Callable_args_res(in_type)
ret = any([resolve_fw_decl(in_tp, None, globs)[1] \
for in_tp in args])
ret = resolve_fw_decl(res, None, globs)[1] or ret
return in_type, ret
elif hasattr(in_type, '__args__') and in_type.__args__ is not None:
return in_type, any([resolve_fw_decl(in_tp, None, globs)[1] \
for in_tp in in_type.__args__])
return in_type, False | [] |
Please provide a description of the function:def _issubclass_Mapping_covariant(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
if is_Generic(subclass):
if subclass.__origin__ is None or not issubclass(subclass.__origin__, Mapping):
return _issubclass_Generic(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if superclass.__args__ is None:
if not pytypes.check_unbound_types:
raise TypeError("Attempted to check unbound mapping type(superclass): "+
str(superclass))
if pytypes.strict_unknown_check:
# Nothing is subtype of unknown type
return False
super_args = (Any, Any)
else:
super_args = superclass.__args__
if subclass.__args__ is None:
if not pytypes.check_unbound_types:
raise TypeError("Attempted to check unbound mapping type(subclass): "+
str(subclass))
if pytypes.strict_unknown_check:
# Nothing can subclass unknown type
# For value type it would be okay if superclass had Any as value type,
# as unknown type is subtype of Any. However, since key type is invariant
# and also unknown, it cannot pass.
return False
sub_args = (Any, Any)
else:
sub_args = subclass.__args__
if not _issubclass(sub_args[0], super_args[0],
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
if not _issubclass(sub_args[1], super_args[1],
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
return True
return issubclass(subclass, superclass) | [
"Helper for _issubclass, a.k.a pytypes.issubtype.\n This subclass-check treats Mapping-values as covariant.\n "
] |
Please provide a description of the function:def _find_Generic_super_origin(subclass, superclass_origin):
stack = [subclass]
param_map = {}
while len(stack) > 0:
bs = stack.pop()
if is_Generic(bs):
if not bs.__origin__ is None and len(bs.__origin__.__parameters__) > 0:
for i in range(len(bs.__args__)):
ors = bs.__origin__.__parameters__[i]
if bs.__args__[i] != ors and isinstance(bs.__args__[i], TypeVar):
param_map[ors] = bs.__args__[i]
if (bs.__origin__ is superclass_origin or \
(bs.__origin__ is None and bs is superclass_origin)):
prms = []
try:
if len(bs.__origin__.__parameters__) > len(bs.__parameters__):
prms.extend(bs.__origin__.__parameters__)
else:
prms.extend(bs.__parameters__)
except:
prms.extend(bs.__parameters__)
for i in range(len(prms)):
while prms[i] in param_map:
prms[i] = param_map[prms[i]]
return prms
try:
stack.extend(bs.__orig_bases__)
except AttributeError:
stack.extend(bs.__bases__)
return None | [
"Helper for _issubclass_Generic.\n "
] |
Please provide a description of the function:def _select_Generic_superclass_parameters(subclass, superclass_origin):
subclass = _find_base_with_origin(subclass, superclass_origin)
if subclass is None:
return None
if subclass.__origin__ is superclass_origin:
return subclass.__args__
prms = _find_Generic_super_origin(subclass, superclass_origin)
res = []
for prm in prms:
sub_search = subclass
while not sub_search is None:
try:
res.append(sub_search.__args__[sub_search.__origin__.__parameters__.index(prm)])
break
except ValueError:
# We search the closest base that actually contains the parameter
sub_search = _find_base_with_origin(
sub_search.__origin__, superclass_origin)
else:
return None
return res | [
"Helper for _issubclass_Generic.\n "
] |
Please provide a description of the function:def _issubclass_Generic(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
# this function is partly based on code from typing module 3.5.2.2
if subclass is None:
return False
if subclass in _extra_dict:
subclass = _extra_dict[subclass]
if is_Tuple(subclass):
tpl_prms = get_Tuple_params(subclass)
if not tpl_prms is None and len(tpl_prms) == 0:
# (This section is required because Empty shall not be
# used on Tuples.)
# an empty Tuple is any Sequence, regardless of type
# note that we needn't consider superclass beeing a tuple,
# because that should have been checked in _issubclass_Tuple
return issubclass(typing.Sequence,
superclass if superclass.__origin__ is None else superclass.__origin__)
subclass = Sequence[Union[tpl_prms]]
if is_Generic(subclass):
# For a class C(Generic[T]) where T is co-variant,
# C[X] is a subclass of C[Y] iff X is a subclass of Y.
origin = _origin(superclass) #superclass.__origin__
if subclass.__origin__ is None:
try:
orig_bases = subclass.__orig_bases__
except AttributeError:
# Before typing 3.5.3.0 __bases__ used to contain all info that later
# became reserved for __orig_bases__. So we can use it as a fallback:
orig_bases = subclass.__bases__
for scls in orig_bases:
if is_Generic(scls):
if _issubclass_Generic(scls, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
return True
#Formerly: if origin is not None and origin is subclass.__origin__:
elif origin is not None and \
_issubclass(_origin(subclass), origin, bound_Generic, bound_typevars,
# In Python 3.7 this can currently cause infinite recursion.
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
# _issubclass(subclass.__origin__, origin, bound_Generic, bound_typevars,
# bound_typevars_readonly, follow_fwd_refs, _recursion_check):
assert len(superclass.__args__) == len(origin.__parameters__)
if len(subclass.__args__) == len(origin.__parameters__):
sub_args = subclass.__args__
else:
# We select the relevant subset of args by TypeVar-matching
sub_args = _select_Generic_superclass_parameters(subclass, superclass.__origin__)
assert len(sub_args) == len(origin.__parameters__)
for p_self, p_cls, p_origin in zip(superclass.__args__,
sub_args,
origin.__parameters__):
if isinstance(p_origin, TypeVar):
if p_origin.__covariant__:
# Covariant -- p_cls must be a subclass of p_self.
if not _issubclass(p_cls, p_self, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
break
elif p_origin.__contravariant__:
# Contravariant. I think it's the opposite. :-)
if not _issubclass(p_self, p_cls, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
break
else:
# Invariant -- p_cls and p_self must equal.
if p_self != p_cls:
if not _issubclass(p_cls, p_self, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
break
if not _issubclass(p_self, p_cls, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
break
else:
# If the origin's parameter is not a typevar,
# insist on invariance.
if p_self != p_cls:
if not _issubclass(p_cls, p_self, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
break
if not _issubclass(p_self, p_cls, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
break
else:
return True
# If we break out of the loop, the superclass gets a chance.
# I.e.: origin is None or not _issubclass(subclass.__origin__, origin)
# In this case we must consider origin or subclass.__origin__ to be None
# We treat param-values as unknown in the following sense:
# for covariant params: treat unknown more-or-equal specific than Any
# for contravariant param: Any more-or-equal specific than Unknown
# for invariant param: unknown never passes
# if both are unknown:
# return False (?) (or NotImplemented? Or let a flag decide behavior?)
if origin is None:
if not pytypes.check_unbound_types:
raise TypeError("Attempted to check unbound type(superclass): "+str(superclass))
if not subclass.__origin__ is None:
if not type.__subclasscheck__(superclass, subclass.__origin__):
return False
prms = _find_Generic_super_origin(subclass.__origin__, superclass)
args = _select_Generic_superclass_parameters(subclass, superclass)
for i in range(len(prms)):
if prms[i].__covariant__:
if pytypes.strict_unknown_check:
return False
elif prms[i].__contravariant__:
# Subclass-value must be wider than or equal to Any, i.e. must be Any:
if not args[i] is Any:
return False
else:
return False
return True
#else:
# nothing to do here... (?)
elif subclass.__origin__ is None:
if not pytypes.check_unbound_types:
raise TypeError("Attempted to check unbound type (subclass): "+str(subclass))
if not type.__subclasscheck__(superclass.__origin__, subclass):
return False
prms = superclass.__origin__.__parameters__
for i in range(len(prms)):
if prms[i].__covariant__:
# subclass-arg here is unknown, so in superclass only Any can pass:
if not superclass.__args__[i] is Any:
return False
elif prms[i].__contravariant__:
if pytypes.strict_unknown_check:
return False
else:
return False
return True
# Formerly: if super(GenericMeta, superclass).__subclasscheck__(subclass):
try:
if type.__subclasscheck__(superclass, subclass):
return True
except TypeError: pass
if _extra(superclass) is None or is_Generic(subclass):
return False
return _issubclass_2(subclass, _extra(superclass), bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check) | [
"Helper for _issubclass, a.k.a pytypes.issubtype.\n "
] |
Please provide a description of the function:def _issubclass_Tuple(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
# this function is partly based on code from typing module 3.5.2.2
if subclass in _extra_dict:
subclass = _extra_dict[subclass]
if not is_Type(subclass):
# To TypeError.
return False
if not is_Tuple(subclass):
if is_Generic(subclass):
try:
return _issubclass_Generic(subclass, superclass,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check)
except:
pass
elif is_Union(subclass):
return all(_issubclass_Tuple(t, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
for t in get_Union_params(subclass))
else:
return False
super_args = get_Tuple_params(superclass)
if super_args is None:
return True
sub_args = get_Tuple_params(subclass)
if sub_args is None:
return False # ???
# Covariance.
# For now we check ellipsis in most explicit manner.
# Todo: Compactify and Pythonify ellipsis branches (tests required before this).
if is_Tuple_ellipsis(subclass):
if is_Tuple_ellipsis(superclass):
# both are ellipsis, so no length check
common = min(len(super_args), len(sub_args))
for i in range(common):
if not _issubclass(sub_args[i], super_args[i], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
if len(super_args) < len(sub_args):
for i in range(len(super_args), len(sub_args)):
# Check remaining super args against the ellipsis type
if not _issubclass(sub_args[i], super_args[-1], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
elif len(super_args) > len(sub_args):
for i in range(len(sub_args), len(super_args)):
# Check remaining super args against the ellipsis type
if not _issubclass(sub_args[-1], super_args[i], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
return True
else:
# only subclass has ellipsis
if len(super_args) < len(sub_args)-1:
return False
for i in range(len(sub_args)-1):
if not _issubclass(sub_args[i], super_args[i], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
for i in range(len(sub_args), len(super_args)):
# Check remaining super args against the ellipsis type
if not _issubclass(sub_args[-1], super_args[i], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
return True
elif is_Tuple_ellipsis(superclass):
# only superclass has ellipsis
if len(super_args)-1 > len(sub_args):
return False
for i in range(len(super_args)-1):
if not _issubclass(sub_args[i], super_args[i], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
for i in range(len(super_args), len(sub_args)):
# Check remaining sub args against the ellipsis type
if not _issubclass(sub_args[i], super_args[-1], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
return True
else:
# none has ellipsis, so strict length check
return (len(super_args) == len(sub_args) and
all(_issubclass(x, p, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
for x, p in zip(sub_args, super_args))) | [
"Helper for _issubclass, a.k.a pytypes.issubtype.\n "
] |
Please provide a description of the function:def _issubclass_Union(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
if not follow_fwd_refs:
return _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
try:
# try to succeed fast, before we go the expensive way involving recursion checks
return _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, False, _recursion_check)
except pytypes.ForwardRefError:
return _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check) | [
"Helper for _issubclass, a.k.a pytypes.issubtype.\n "
] |
Please provide a description of the function:def _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
# this function is partly based on code from typing module 3.5.2.2
super_args = get_Union_params(superclass)
if super_args is None:
return is_Union(subclass)
elif is_Union(subclass):
sub_args = get_Union_params(subclass)
if sub_args is None:
return False
return all(_issubclass(c, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check) \
for c in (sub_args))
elif isinstance(subclass, TypeVar):
if subclass in super_args:
return True
if subclass.__constraints__:
return _issubclass(Union[subclass.__constraints__],
superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
return False
else:
return any(_issubclass(subclass, t, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check) \
for t in super_args) | [
"Helper for _issubclass_Union.\n "
] |
Please provide a description of the function:def _has_base(cls, base):
if cls is base:
return True
elif cls is None:
return False
try:
for bs in cls.__bases__:
if _has_base(bs, base):
return True
except:
pass
return False | [
"Helper for _issubclass, a.k.a pytypes.issubtype.\n "
] |
Please provide a description of the function:def _issubclass(subclass, superclass, bound_Generic=None, bound_typevars=None,
bound_typevars_readonly=False, follow_fwd_refs=True, _recursion_check=None):
if bound_typevars is None:
bound_typevars = {}
if superclass is Any:
return True
if subclass == superclass:
return True
if subclass is Any:
return superclass is Any
if isinstance(subclass, ForwardRef) or isinstance(superclass, ForwardRef):
if not follow_fwd_refs:
raise pytypes.ForwardRefError(
"ForwardRef encountered, but follow_fwd_refs is False: '%s'\n%s"%
((subclass if isinstance(subclass, ForwardRef) else superclass)
.__forward_arg__,
"Retry with follow_fwd_refs=True."))
# Now that forward refs are in the game, we must continue in recursion-proof manner:
if _recursion_check is None:
_recursion_check = {superclass: {subclass}}
elif superclass in _recursion_check:
if subclass in _recursion_check[superclass]:
# recursion detected
return False
else:
_recursion_check[superclass].add(subclass)
else:
_recursion_check[superclass] = {subclass}
if isinstance(subclass, ForwardRef):
if not subclass.__forward_evaluated__:
raise pytypes.ForwardRefError("ForwardRef in subclass not evaluated: '%s'\n%s"%
(subclass.__forward_arg__, "Use pytypes.resolve_fw_decl"))
else:
return _issubclass(subclass.__forward_value__, superclass,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
else: # isinstance(superclass, ForwardRef)
if not superclass.__forward_evaluated__:
raise pytypes.ForwardRefError("ForwardRef in superclass not evaluated: '%s'\n%s"%
(superclass.__forward_arg__, "Use pytypes.resolve_fw_decl"))
else:
return _issubclass(subclass, superclass.__forward_value__,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if pytypes.apply_numeric_tower:
if superclass is float and subclass is int:
return True
elif superclass is complex and \
(subclass is int or subclass is float):
return True
if superclass in _extra_dict:
superclass = _extra_dict[superclass]
try:
if _issubclass_2(subclass, Empty, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
for empty_target in [Container, Sized, Iterable]:
# We cannot simply use Union[Container, Sized, Iterable] as empty_target
# because of implementation detail behavior of _issubclass_2.
# It would e.g. cause false negative result of
# is_subtype(Empty[Dict], Empty[Container])
try:
if _issubclass_2(superclass.__origin__, empty_target,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return _issubclass_2(subclass.__args__[0], superclass.__origin__,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
except: pass
if _issubclass_2(superclass, empty_target,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return _issubclass_2(subclass.__args__[0], superclass,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
except: pass
try:
if _issubclass_2(superclass, Empty, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
for empty_target in [Container, Sized, Iterable]:
# We cannot simply use Union[Container, Sized, Iterable] as empty_target
# because of implementation detail behavior of _issubclass_2.
try:
if _issubclass_2(subclass.__origin__, empty_target,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return _issubclass_2(subclass.__origin__, superclass.__args__[0],
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
except: pass
if _issubclass_2(subclass, empty_target, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return _issubclass_2(subclass, superclass.__args__[0],
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
except: pass
if isinstance(superclass, TypeVar):
if not superclass.__bound__ is None:
if not _issubclass(subclass, superclass.__bound__, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
return False
if not bound_typevars is None:
try:
if superclass.__contravariant__:
return _issubclass(bound_typevars[superclass], subclass, bound_Generic,
bound_typevars, bound_typevars_readonly, follow_fwd_refs,
_recursion_check)
elif superclass.__covariant__:
return _issubclass(subclass, bound_typevars[superclass], bound_Generic,
bound_typevars, bound_typevars_readonly, follow_fwd_refs,
_recursion_check)
else:
return _issubclass(bound_typevars[superclass], subclass, bound_Generic,
bound_typevars, bound_typevars_readonly, follow_fwd_refs,
_recursion_check) and \
_issubclass(subclass, bound_typevars[superclass], bound_Generic,
bound_typevars, bound_typevars_readonly, follow_fwd_refs,
_recursion_check)
except:
pass
if not bound_Generic is None:
superclass = get_arg_for_TypeVar(superclass, bound_Generic)
if not superclass is None:
return _issubclass(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if not bound_typevars is None:
if bound_typevars_readonly:
return False
else:
# bind it...
bound_typevars[superclass] = subclass
return True
return False
if isinstance(subclass, TypeVar):
if not bound_typevars is None:
try:
return _issubclass(bound_typevars[subclass], superclass, bound_Generic,
bound_typevars, bound_typevars_readonly, follow_fwd_refs,
_recursion_check)
except:
pass
if not bound_Generic is None:
subclass = get_arg_for_TypeVar(subclass, bound_Generic)
if not subclass is None:
return _issubclass(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if not subclass.__bound__ is None:
return _issubclass(subclass.__bound__, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
return False
res = _issubclass_2(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
return res | [
"Access this via ``pytypes.is_subtype``.\n Works like ``issubclass``, but supports PEP 484 style types from ``typing`` module.\n\n subclass : type\n The type to check for being a subtype of ``superclass``.\n\n superclass : type\n The type to check for being a supertype of ``subclass``.\n\n bound_Generic : Optional[Generic]\n A type object holding values for unbound typevars occurring in ``subclass`` or ``superclass``.\n Default: None\n If subclass or superclass contains unbound ``TypeVar``s and ``bound_Generic`` is\n provided, this function attempts to retrieve corresponding values for the\n unbound ``TypeVar``s from ``bound_Generic``.\n In collision case with ``bound_typevars`` the value from ``bound_Generic`` if preferred.\n\n bound_typevars : Optional[Dict[typing.TypeVar, type]]\n A dictionary holding values for unbound typevars occurring in ``subclass`` or ``superclass``.\n Default: {}\n Depending on ``bound_typevars_readonly`` pytypes can also bind values to typevars as needed.\n This is done by inserting according mappings into this dictionary. This can e.g. be useful to\n infer values for ``TypeVar``s or to consistently check a set of ``TypeVar``s across multiple\n calls, e.g. when checking all arguments of a function call.\n In collision case with ``bound_Generic`` the value from ``bound_Generic`` if preferred.\n\n bound_typevars_readonly : bool\n Defines if pytypes is allowed to write into the ``bound_typevars`` dictionary.\n Default: True\n If set to False, pytypes cannot assign values to ``TypeVar``s, but only checks regarding\n values already present in ``bound_typevars`` or ``bound_Generic``.\n\n follow_fwd_refs : bool\n Defines if ``_ForwardRef``s should be explored.\n Default: True\n If this is set to ``False`` and a ``_ForwardRef`` is encountered, pytypes aborts the check\n raising a ForwardRefError.\n\n _recursion_check : Optional[Dict[type, Set[type]]]\n Internally used for recursion checks.\n Default: None\n If ``Union``s and ``_ForwardRef``s occur in the same type, recursions can occur. As soon as\n a ``_ForwardRef`` is encountered, pytypes automatically creates this dictionary and\n continues in recursion-proof manner.\n "
] |
Please provide a description of the function:def _issubclass_2(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
if is_Tuple(superclass):
return _issubclass_Tuple(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if is_Union(superclass):
return _issubclass_Union(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if is_Union(subclass):
return all(_issubclass(t, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check) \
for t in get_Union_params(subclass))
if is_Generic(superclass):
cls = superclass.__origin__ if not superclass.__origin__ is None else superclass
# We would rather use issubclass(superclass.__origin__, Mapping), but that's somehow erroneous
if pytypes.covariant_Mapping and (_has_base(cls, Mapping) or
# Python 3.7 maps everything to collections.abc:
(cls in _extra_dict and issubclass(cls, collections.abc.Mapping))):
return _issubclass_Mapping_covariant(subclass, superclass,
bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check)
else:
return _issubclass_Generic(subclass, superclass, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if subclass in _extra_dict:
subclass = _extra_dict[subclass]
try:
return issubclass(subclass, superclass)
except TypeError:
if not is_Type(subclass):
# For Python 3.7, types from typing are not types.
# So issubclass emits TypeError: issubclass() arg 1 must be a class
raise TypeError("Invalid type declaration: %s, %s" %
(type_str(subclass), type_str(superclass)))
return False | [
"Helper for _issubclass, a.k.a pytypes.issubtype.\n "
] |
Please provide a description of the function:def _isinstance(obj, cls, bound_Generic=None, bound_typevars=None,
bound_typevars_readonly=False, follow_fwd_refs=True, _recursion_check=None):
if bound_typevars is None:
bound_typevars = {}
# Special treatment if cls is Iterable[...]
if is_Generic(cls) and cls.__origin__ is typing.Iterable:
if not is_iterable(obj):
return False
itp = get_iterable_itemtype(obj)
if itp is None:
return not pytypes.check_iterables
else:
return _issubclass(itp, cls.__args__[0], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
if is_Callable(cls):
return _isinstance_Callable(obj, cls, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check)
return _issubclass(deep_type(obj), cls, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check) | [
"Access this via ``pytypes.is_of_type``.\n Works like ``isinstance``, but supports PEP 484 style types from ``typing`` module.\n\n obj : Any\n The object to check for being an instance of ``cls``.\n\n cls : type\n The type to check for ``obj`` being an instance of.\n\n bound_Generic : Optional[Generic]\n A type object holding values for unbound typevars occurring in ``cls``.\n Default: None\n If ``cls`` contains unbound ``TypeVar``s and ``bound_Generic`` is provided, this function\n attempts to retrieve corresponding values for the unbound ``TypeVar``s from ``bound_Generic``.\n In collision case with ``bound_typevars`` the value from ``bound_Generic`` if preferred.\n\n bound_typevars : Optional[Dict[typing.TypeVar, type]]\n A dictionary holding values for unbound typevars occurring in ``cls``.\n Default: {}\n Depending on ``bound_typevars_readonly`` pytypes can also bind values to typevars as needed.\n This is done by inserting according mappings into this dictionary. This can e.g. be useful to\n infer values for ``TypeVar``s or to consistently check a set of ``TypeVar``s across multiple\n calls, e.g. when checking all arguments of a function call.\n In collision case with ``bound_Generic`` the value from ``bound_Generic`` if preferred.\n\n bound_typevars_readonly : bool\n Defines if pytypes is allowed to write into the ``bound_typevars`` dictionary.\n Default: True\n If set to False, pytypes cannot assign values to ``TypeVar``s, but only checks regarding\n values already present in ``bound_typevars`` or ``bound_Generic``.\n\n follow_fwd_refs : bool\n Defines if ``ForwardRef``s should be explored.\n Default: True\n If this is set to ``False`` and a ``ForwardRef`` is encountered, pytypes aborts the check\n raising a ForwardRefError.\n\n _recursion_check : Optional[Dict[type, Set[type]]]\n Internally used for recursion checks.\n Default: None\n If ``Union``s and ``ForwardRef``s occur in the same type, recursions can occur. As soon as\n a ``ForwardRef`` is encountered, pytypes automatically creates this dictionary and\n continues in recursion-proof manner.\n "
] |
Please provide a description of the function:def generator_checker_py3(gen, gen_type, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
initialized = False
sn = None
try:
while True:
a = gen.send(sn)
if initialized or not a is None:
if not gen_type.__args__[0] is Any and \
not _isinstance(a, gen_type.__args__[0], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs,
_recursion_check):
tpa = deep_type(a)
msg = _make_generator_error_message(deep_type(a), gen, gen_type.__args__[0],
'has incompatible yield type')
_raise_typecheck_error(msg, True, a, tpa, gen_type.__args__[0])
# raise pytypes.ReturnTypeError(_make_generator_error_message(deep_type(a), gen,
# gen_type.__args__[0], 'has incompatible yield type'))
initialized = True
sn = yield a
if not gen_type.__args__[1] is Any and \
not _isinstance(sn, gen_type.__args__[1], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
tpsn = deep_type(sn)
msg = _make_generator_error_message(tpsn, gen, gen_type.__args__[1],
'has incompatible send type')
_raise_typecheck_error(msg, False, sn, tpsn, gen_type.__args__[1])
# raise pytypes.InputTypeError(_make_generator_error_message(deep_type(sn), gen,
# gen_type.__args__[1], 'has incompatible send type'))
except StopIteration as st:
# Python 3:
# todo: Check if st.value is always defined (i.e. as None if not present)
if not gen_type.__args__[2] is Any and \
not _isinstance(st.value, gen_type.__args__[2], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
tpst = deep_type(st.value)
msg = _make_generator_error_message(tpst, gen, gen_type.__args__[2],
'has incompatible return type')
_raise_typecheck_error(msg, True, st.value, tpst, gen_type.__args__[2])
# raise pytypes.ReturnTypeError(_make_generator_error_message(sttp, gen,
# gen_type.__args__[2], 'has incompatible return type'))
raise st | [
"Builds a typechecking wrapper around a Python 3 style generator object.\n "
] |
Please provide a description of the function:def generator_checker_py2(gen, gen_type, bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
initialized = False
sn = None
while True:
a = gen.send(sn)
if initialized or not a is None:
if not gen_type.__args__[0] is Any and \
not _isinstance(a, gen_type.__args__[0], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
tpa = deep_type(a)
msg = _make_generator_error_message(tpa, gen, gen_type.__args__[0],
'has incompatible yield type')
_raise_typecheck_error(msg, True, a, tpa, gen_type.__args__[0])
# raise pytypes.ReturnTypeError(_make_generator_error_message(tpa, gen,
# gen_type.__args__[0], 'has incompatible yield type'))
initialized = True
sn = yield a
if not gen_type.__args__[1] is Any and \
not _isinstance(sn, gen_type.__args__[1], bound_Generic, bound_typevars,
bound_typevars_readonly, follow_fwd_refs, _recursion_check):
tpsn = deep_type(sn)
msg = _make_generator_error_message(tpsn, gen, gen_type.__args__[1],
'has incompatible send type')
_raise_typecheck_error(msg, False, sn, tpsn, gen_type.__args__[1]) | [
"Builds a typechecking wrapper around a Python 2 style generator object.\n "
] |
Please provide a description of the function:def annotations_func(func):
if not has_type_hints(func):
# What about defaults?
func.__annotations__ = {}
func.__annotations__ = _get_type_hints(func,
infer_defaults = False)
return func | [
"Works like annotations, but is only applicable to functions,\n methods and properties.\n "
] |
Please provide a description of the function:def annotations_class(cls):
assert(isclass(cls))
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in cls.__dict__]
for key in keys:
memb = cls.__dict__[key]
if _check_as_func(memb):
annotations_func(memb)
elif isclass(memb):
annotations_class(memb)
return cls | [
"Works like annotations, but is only applicable to classes.\n "
] |
Please provide a description of the function:def annotations_module(md):
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md
elif md in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md].append(annotations_module)
return md
assert(ismodule(md))
if md.__name__ in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md.__name__].append(annotations_module)
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
if md.__name__ in _annotated_modules and \
_annotated_modules[md.__name__] == len(md.__dict__):
return md
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if _check_as_func(memb) and memb.__module__ == md.__name__:
annotations_func(memb)
elif isclass(memb) and memb.__module__ == md.__name__:
annotations_class(memb)
if not md.__name__ in pytypes.typechecker._pending_modules:
_annotated_modules[md.__name__] = len(md.__dict__)
return md | [
"Works like annotations, but is only applicable to modules (by explicit call).\n md must be a module or a module name contained in sys.modules.\n "
] |
Please provide a description of the function:def annotations(memb):
if _check_as_func(memb):
return annotations_func(memb)
if isclass(memb):
return annotations_class(memb)
if ismodule(memb):
return annotations_module(memb)
if memb in sys.modules or memb in pytypes.typechecker._pending_modules:
return annotations_module(memb)
return memb | [
"Decorator applicable to functions, methods, properties,\n classes or modules (by explicit call).\n If applied on a module, memb must be a module or a module name contained in sys.modules.\n See pytypes.set_global_annotations_decorator to apply this on all modules.\n Methods with type comment will have type hints parsed from that\n string and get them attached as __annotations__ attribute.\n Methods with either a type comment or ordinary type annotations in\n a stubfile will get that information attached as __annotations__\n attribute (also a relevant use case in Python 3).\n Behavior in case of collision with previously (manually)\n attached __annotations__ can be controlled using the flags\n pytypes.annotations_override_typestring and pytypes.annotations_from_typestring.\n "
] |
Please provide a description of the function:def simplify_for_Union(type_list):
i = 0
while i < len(type_list):
j = 0
while j < i:
if _issubclass(type_list[j], type_list[i]):
del type_list[j]
i -= 1
else:
j += 1
j = i+1
while j < len(type_list):
if _issubclass(type_list[j], type_list[i]):
del type_list[j]
else:
j += 1
i += 1 | [
"Removes types that are subtypes of other elements in the list.\n Does not return a copy, but instead modifies the given list.\n Intended for preprocessing of types to be combined into a typing.Union.\n Subtypecheck is backed by pytypes.is_subtype, so this differs from\n typing.Union's own simplification efforts.\n E.g. this also considers numeric tower like described in\n https://www.python.org/dev/peps/pep-0484/#the-numeric-tower\n (treats int as subtype of float as subtype of complex)\n Use pytypes.apply_numeric_tower flag to switch off numeric tower support.\n "
] |
Please provide a description of the function:def _preprocess_typecheck(argSig, argspecs, slf_or_clsm = False):
# todo: Maybe move also slf-logic here
vargs = argspecs.varargs
try:
kw = argspecs.keywords
except AttributeError:
kw = argspecs.varkw
try:
kwonly = argspecs.kwonlyargs
except AttributeError:
kwonly = None
if not vargs is None or not kw is None:
arg_type_lst = list(get_Tuple_params(argSig))
if not vargs is None:
vargs_pos = (len(argspecs.args)-1) \
if slf_or_clsm else len(argspecs.args)
# IndexErrors in this section indicate that a child-method was
# checked against a parent's type-info with the child featuring
# a more wider type on signature level (e.g. adding vargs)
try:
vargs_type = typing.Sequence[arg_type_lst[vargs_pos]]
except IndexError:
vargs_type = typing.Sequence[typing.Any]
try:
arg_type_lst[vargs_pos] = vargs_type
except IndexError:
arg_type_lst.append(vargs_type)
if not kw is None:
kw_pos = len(argspecs.args)
if slf_or_clsm:
kw_pos -= 1
if not vargs is None:
kw_pos += 1
if not kwonly is None:
kw_pos += len(kwonly)
try:
kw_type = typing.Dict[str, arg_type_lst[kw_pos]]
except IndexError:
kw_type = typing.Dict[str, typing.Any]
try:
arg_type_lst[kw_pos] = kw_type
except IndexError:
arg_type_lst.append(kw_type)
return typing.Tuple[tuple(arg_type_lst)]
else:
return argSig | [
"From a PEP 484 style type-tuple with types for *varargs and/or **kw\n this returns a type-tuple containing Tuple[tp, ...] and Dict[str, kw-tp]\n instead.\n "
] |
Please provide a description of the function:def restore_profiler():
idn = threading.current_thread().ident
if not sys.getprofile() is None:
warn("restore_profiler: Current profile is not None!")
if not idn in _saved_profilers:
warn("restore_profiler: No saved profiler for calling thread!")
else:
sys.setprofile(_saved_profilers[idn])
del _saved_profilers[idn] | [
"If a typechecking profiler is active, e.g. created by\n pytypes.set_global_typechecked_profiler(), such a profiler\n must be restored whenever a TypeCheckError is caught.\n The call must stem from the thread that raised the error.\n Otherwise the typechecking profiler is implicitly disabled.\n Alternatively one can turn pytypes into warning mode. In that\n mode no calls to this function are required (unless one uses\n filterwarnings(\"error\") or likewise).\n "
] |
Please provide a description of the function:def log_type(args_kw, ret, func, slf=False, prop_getter=False, clss=None, argspecs=None,
args_kw_type=None, ret_type = None):
if args_kw_type is None:
args_kw_type = deep_type(args_kw)
if ret_type is None:
ret_type = deep_type(ret)
if argspecs is None:
argspecs = getargspecs(func)
node = _register_logged_func(func, slf, prop_getter, clss, argspecs)
node.add_observation(args_kw_type, ret_type)
md = util.getmodule_for_member(func, prop_getter)
if not md.__name__ in _module_file_map:
_module_file_map[md.__name__] = md.__file__
if clss is None:
try:
clss = util.get_class_that_defined_method(func)
except ValueError:
pass
if not clss is None and not clss in _member_line_map:
_member_line_map[clss] = findsource(clss)[1] | [
"Stores information of a function or method call into a cache, so pytypes can\n create a PEP 484 stubfile from this information later on (see dump_cache).\n "
] |
Please provide a description of the function:def combine_argtype(observations):
assert len(observations) > 0
assert is_Tuple(observations[0])
if len(observations) > 1:
prms = [get_Tuple_params(observations[0])]
ln = len(prms[0])
for obs in observations[1:]:
assert is_Tuple(obs)
prms.append(get_Tuple_params(obs))
assert len(prms[-1]) == ln
if simplify:
prms = map(list, zip(*prms))
if not isinstance(prms, list):
# special care for Python 3
prms = list(prms)
for type_list in prms:
simplify_for_Union(type_list)
prms = map(tuple, prms)
else:
prms = map(tuple, zip(*prms))
prms = map(Union.__getitem__, prms)
return Tuple[tuple(prms)]
else:
return observations[0] | [
"Combines a list of Tuple types into one.\n Basically these are combined element wise into a Union with some\n additional unification effort (e.g. can apply PEP 484 style numeric tower).\n "
] |
Please provide a description of the function:def combine_type(observations):
assert len(observations) > 0
if len(observations) == 1:
return observations[0]
else:
if simplify:
simplify_for_Union(observations)
return Union[tuple(observations)] | [
"Combines a list of types into one.\n Basically these are combined into a Union with some\n additional unification effort (e.g. can apply PEP 484 style numeric tower).\n "
] |
Please provide a description of the function:def dump_cache(path=None, python2=False, suffix=None):
typelogging_enabled_tmp = pytypes.typelogging_enabled
pytypes.typelogging_enabled = False
if suffix is None:
suffix = 'pyi2' if python2 else 'pyi'
if path is None:
path = pytypes.default_typelogger_path
modules = {}
for key in _member_cache:
node = _member_cache[key]
mname = node.get_modulename()
if not mname in modules:
mnode = _module_node(mname)
modules[mname] = mnode
else:
mnode = modules[mname]
mnode.append(node)
for module in modules:
_dump_module(modules[module], path, python2, suffix)
pytypes.typelogging_enabled = typelogging_enabled_tmp | [
"Writes cached observations by @typelogged into stubfiles.\n Files will be created in the directory provided as 'path'; overwrites\n existing files without notice.\n Uses 'pyi2' suffix if 'python2' flag is given else 'pyi'. Resulting\n files will be Python 2.7 compilant accordingly.\n "
] |
Please provide a description of the function:def get_indentation(func):
src_lines = getsourcelines(func)[0]
for line in src_lines:
if not (line.startswith('@') or line.startswith('def') or line.lstrip().startswith('#')):
return line[:len(line) - len(line.lstrip())]
return pytypes.default_indent | [
"Extracts a function's indentation as a string,\n In contrast to an inspect.indentsize based implementation,\n this function preserves tabs if present.\n "
] |
Please provide a description of the function:def typelogged_func(func):
if not pytypes.typelogging_enabled:
return func
if hasattr(func, 'do_logging'):
func.do_logging = True
return func
elif hasattr(func, 'do_typecheck'):
# actually shouldn't happen
return _typeinspect_func(func, func.do_typecheck, True)
else:
return _typeinspect_func(func, False, True) | [
"Works like typelogged, but is only applicable to functions,\n methods and properties.\n "
] |
Please provide a description of the function:def typelogged_class(cls):
if not pytypes.typelogging_enabled:
return cls
assert(isclass(cls))
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in cls.__dict__]
for key in keys:
memb = cls.__dict__[key]
if _check_as_func(memb):
setattr(cls, key, typelogged_func(memb))
elif isclass(memb):
typelogged_class(memb)
return cls | [
"Works like typelogged, but is only applicable to classes.\n "
] |
Please provide a description of the function:def typelogged_module(md):
if not pytypes.typelogging_enabled:
return md
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md
elif md in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md].append(typelogged_module)
return md
assert(ismodule(md))
if md.__name__ in pytypes.typechecker._pending_modules:
# if import is pending, we just store this call for later
pytypes.typechecker._pending_modules[md.__name__].append(typelogged_module)
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
assert(ismodule(md))
if md.__name__ in _fully_typelogged_modules and \
_fully_typelogged_modules[md.__name__] == len(md.__dict__):
return md
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if _check_as_func(memb) and memb.__module__ == md.__name__:
setattr(md, key, typelogged_func(memb))
elif isclass(memb) and memb.__module__ == md.__name__:
typelogged_class(memb)
if not md.__name__ in pytypes.typechecker._pending_modules:
_fully_typelogged_modules[md.__name__] = len(md.__dict__)
return md | [
"Works like typelogged, but is only applicable to modules by explicit call).\n md must be a module or a module name contained in sys.modules.\n "
] |
Please provide a description of the function:def typelogged(memb):
if not pytypes.typelogging_enabled:
return memb
if _check_as_func(memb):
return typelogged_func(memb)
if isclass(memb):
return typelogged_class(memb)
if ismodule(memb):
return typelogged_module(memb)
if memb in sys.modules or memb in pytypes.typechecker._pending_modules:
return typelogged_module(memb)
return memb | [
"Decorator applicable to functions, methods, properties,\n classes or modules (by explicit call).\n If applied on a module, memb must be a module or a module name contained in sys.modules.\n See pytypes.set_global_typelogged_decorator to apply this on all modules.\n Observes function and method calls at runtime and allows pytypes to generate stubfiles\n from the acquired type information.\n Use dump_cache to write a stubfile in this manner.\n "
] |
Please provide a description of the function:def enable_global_typechecked_decorator(flag = True, retrospective = True):
global global_typechecked_decorator
global_typechecked_decorator = flag
if import_hook_enabled:
_install_import_hook()
if global_typechecked_decorator and retrospective:
_catch_up_global_typechecked_decorator()
return global_typechecked_decorator | [
"Enables or disables global typechecking mode via decorators.\n See flag global_typechecked_decorator.\n In contrast to setting the flag directly, this function provides\n a retrospective option. If retrospective is true, this will also\n affect already imported modules, not only future imports.\n Does not work if checking_enabled is false.\n Does not work reliably if checking_enabled has ever been set to\n false during current run.\n "
] |
Please provide a description of the function:def enable_global_auto_override_decorator(flag = True, retrospective = True):
global global_auto_override_decorator
global_auto_override_decorator = flag
if import_hook_enabled:
_install_import_hook()
if global_auto_override_decorator and retrospective:
_catch_up_global_auto_override_decorator()
return global_auto_override_decorator | [
"Enables or disables global auto_override mode via decorators.\n See flag global_auto_override_decorator.\n In contrast to setting the flag directly, this function provides\n a retrospective option. If retrospective is true, this will also\n affect already imported modules, not only future imports.\n "
] |
Please provide a description of the function:def enable_global_annotations_decorator(flag = True, retrospective = True):
global global_annotations_decorator
global_annotations_decorator = flag
if import_hook_enabled:
_install_import_hook()
if global_annotations_decorator and retrospective:
_catch_up_global_annotations_decorator()
return global_annotations_decorator | [
"Enables or disables global annotation mode via decorators.\n See flag global_annotations_decorator.\n In contrast to setting the flag directly, this function provides\n a retrospective option. If retrospective is true, this will also\n affect already imported modules, not only future imports.\n "
] |
Please provide a description of the function:def enable_global_typelogged_decorator(flag = True, retrospective = True):
global global_typelogged_decorator
global_typelogged_decorator = flag
if import_hook_enabled:
_install_import_hook()
if global_typelogged_decorator and retrospective:
_catch_up_global_typelogged_decorator()
return global_typelogged_decorator | [
"Enables or disables global typelog mode via decorators.\n See flag global_typelogged_decorator.\n In contrast to setting the flag directly, this function provides\n a retrospective option. If retrospective is true, this will also\n affect already imported modules, not only future imports.\n "
] |
Please provide a description of the function:def enable_global_typechecked_profiler(flag = True):
global global_typechecked_profiler, _global_type_agent, global_typelogged_profiler
global_typechecked_profiler = flag
if flag and checking_enabled:
if _global_type_agent is None:
_global_type_agent = TypeAgent()
_global_type_agent.start()
elif not _global_type_agent.active:
_global_type_agent.start()
elif not flag and not global_typelogged_profiler and \
not _global_type_agent is None and _global_type_agent.active:
_global_type_agent.stop() | [
"Enables or disables global typechecking mode via a profiler.\n See flag global_typechecked_profiler.\n Does not work if checking_enabled is false.\n "
] |
Please provide a description of the function:def enable_global_typelogged_profiler(flag = True):
global global_typelogged_profiler, _global_type_agent, global_typechecked_profiler
global_typelogged_profiler = flag
if flag and typelogging_enabled:
if _global_type_agent is None:
_global_type_agent = TypeAgent()
_global_type_agent.start()
elif not _global_type_agent.active:
_global_type_agent.start()
elif not flag and not global_typechecked_profiler and \
not _global_type_agent is None and _global_type_agent.active:
_global_type_agent.stop() | [
"Enables or disables global typelogging mode via a profiler.\n See flag global_typelogged_profiler.\n Does not work if typelogging_enabled is false.\n "
] |
Please provide a description of the function:def _detect_issue351():
class Tuple(typing.Generic[typing.T]):
pass
res = Tuple[str] == typing.Tuple[str]
del Tuple
return res | [
"Detect if github.com/python/typing/issues/351 applies\n to the installed typing-version.\n "
] |
Please provide a description of the function:def _preprocess_override(meth_types, base_types, meth_argspec, base_argspec):
try:
base_kw = base_argspec.keywords
kw = meth_argspec.keywords
except AttributeError:
base_kw = base_argspec.varkw
kw = meth_argspec.varkw
try:
kwonly = meth_argspec.kwonlyargs
base_kwonly = base_argspec.kwonlyargs
except AttributeError:
kwonly = None
base_kwonly = None
if meth_argspec.varargs is None and base_argspec.varargs is None \
and kw is None and base_kw is None \
and kwonly is None and base_kwonly is None \
and (meth_argspec.defaults is None or \
len(meth_argspec.args) == len(base_argspec.args)):
return meth_types, base_types
arg_types = type_util.get_Tuple_params(meth_types[0])
base_arg_types = type_util.get_Tuple_params(base_types[0])
kw_off = len(meth_argspec.args)-1 # -1 for self
if not meth_argspec.defaults is None and base_argspec.varargs is None:
kw_off -= len(meth_argspec.defaults)
base_kw_off = len(base_argspec.args)
if base_argspec.varargs is None:
base_kw_off -= 1 # one decrement anyway for self
arg_types2 = list(arg_types[:kw_off])
base_arg_types2 = list(base_arg_types[:base_kw_off])
base_vargtype = None
vargtype = None
base_argnames = None
argnames = None
if not base_argspec.varargs is None or not base_kw is None:
base_argnames = util.getargnames(base_argspec)[1:]
if not base_argspec.varargs is None:
base_vargtype = base_arg_types[base_argnames.index(base_argspec.varargs)]
# Should have been already checked:
assert not meth_argspec.varargs is None
if not meth_argspec.varargs is None or not kw is None:
argnames = util.getargnames(meth_argspec)[1:]
if not meth_argspec.varargs is None:
vargtype = arg_types[argnames.index(meth_argspec.varargs)]
if not meth_argspec.defaults is None:
pos = 0
if not base_argspec.varargs is None:
# fill up parent's types with varg type to account for child's defaults
while len(arg_types2) > len(base_arg_types2):
base_arg_types2.append(base_vargtype)
base_arg_types2.append(base_vargtype) # restore one more for the actual vargtype
else:
# parent has no vargtype, so fill up child with default-types afap
while len(arg_types2) < len(base_arg_types2) and \
pos < len(meth_argspec.defaults):
arg_types2.append(arg_types[kw_off+pos])
pos += 1
while len(arg_types2) < len(base_arg_types2):
arg_types2.append(vargtype)
if not kw is None:
kw_type = arg_types[argnames.index(kw)]
if not base_kwonly is None:
for name in base_kwonly:
base_arg_types2.append(base_arg_types[base_argnames.index(name)])
if name in argnames:
arg_types2.append(arg_types[argnames.index(name)])
else:
arg_types2.append(kw_type)
if not base_kw is None:
base_arg_types2.append(base_arg_types[base_argnames.index(base_kw)])
arg_types2.append(kw_type)
return (typing.Tuple[tuple(arg_types2)], meth_types[1]), \
(typing.Tuple[tuple(base_arg_types2)], base_types[1]) | [
"This function linearizes type info of ordinary, vararg, kwonly and varkw\n arguments, such that override-feasibility can be conveniently checked. \n "
] |
Please provide a description of the function:def override(func, auto = False):
if not pytypes.checking_enabled:
return func
# notes:
# - don't use @override on __init__ (raise warning? Error for now!),
# because __init__ is not intended to be called after creation
# - @override applies typechecking to every match in mro, because class might be used as
# replacement for each class in its mro. So each must be compatible.
# - @override does not/cannot check signature of builtin ancestors (for now).
# - @override starts checking only at its declaration level. If in a subclass an @override
# annotated method is not s.t. @override any more.
# This is difficult to achieve in case of a call to super. Runtime-override checking
# would use the subclass-self and thus unintentionally would also check the submethod's
# signature. We actively avoid this here.
func.override_checked = True
_actualfunc(func).override_checked = True
if pytypes.check_override_at_class_definition_time:
# We need some trickery here, because details of the class are not yet available
# as it is just getting defined. Luckily we can get base-classes via inspect.stack():
stack = inspect.stack()
try:
base_classes = _re.search(r'class.+\((.+)\)\s*\:', stack[2][4][0]).group(1)
except IndexError:
raise _function_instead_of_method_error(func)
except AttributeError:
base_classes = 'object'
meth_cls_name = stack[1][3]
if func.__name__ == '__init__':
raise OverrideError(
'Invalid use of @override in %s:\n @override must not be applied to __init__.'
% util._fully_qualified_func_name(func, True, None, meth_cls_name))
# handle multiple inheritance
base_classes = [s.strip() for s in base_classes.split(',')]
if not base_classes:
raise ValueError('@override: unable to determine base class')
# stack[0]=overrides, stack[1]=inside class def'n, stack[2]=outside class def'n
derived_class_locals = stack[2][0].f_locals
derived_class_globals = stack[2][0].f_globals
# replace each class name in base_classes with the actual class type
for i, base_class in enumerate(base_classes):
if '.' not in base_class:
if base_class in derived_class_locals:
base_classes[i] = derived_class_locals[base_class]
elif base_class in derived_class_globals:
base_classes[i] = derived_class_globals[base_class]
elif base_class in types.__builtins__:
base_classes[i] = types.__builtins__[base_class]
else:
raise TypeError("Could not lookup type: "+base_class)
else:
components = base_class.split('.')
# obj is either a module or a class
if components[0] in derived_class_locals:
obj = derived_class_locals[components[0]]
elif components[0] in derived_class_globals:
obj = derived_class_globals[components[0]]
elif components[0] in types.__builtins__:
obj = types.__builtins__[components[0]]
elif components[0] in sys.modules:
obj = sys.modules[components[0]]
else:
raise TypeError("Could not lookup type or module: "+base_class)
for c in components[1:]:
assert(ismodule(obj) or isclass(obj))
obj = getattr(obj, c)
base_classes[i] = obj
mro_set = set() # contains everything in would-be-mro, however in unspecified order
mro_pool = [base_classes]
while len(mro_pool) > 0:
lst = mro_pool.pop()
for base_cls in lst:
if not is_builtin_type(base_cls):
mro_set.add(base_cls)
mro_pool.append(base_cls.__bases__)
base_method_exists = False
argSpecs = util.getargspecs(func)
for cls in mro_set:
if hasattr(cls, func.__name__):
base_method_exists = True
base_method = getattr(cls, func.__name__)
_check_override_argspecs(func, argSpecs, meth_cls_name, base_method, cls)
if has_type_hints(func):
try:
_check_override_types(func, _funcsigtypes(func, True, cls), meth_cls_name,
base_method, cls)
except NameError:
_delayed_checks.append(_DelayedCheck(func, func, meth_cls_name, base_method,
cls, sys.exc_info()))
if not base_method_exists:
if not auto:
raise _no_base_method_error(func)
if pytypes.check_override_at_runtime:
specs = util.getargspecs(func)
argNames = util.getargnames(specs)
def checker_ov(*args, **kw):
if hasattr(checker_ov, '__annotations__') and len(checker_ov.__annotations__) > 0:
checker_ov.ov_func.__annotations__ = checker_ov.__annotations__
args_kw = util.getargskw(args, kw, specs)
if len(argNames) > 0 and argNames[0] == 'self':
if hasattr(args_kw[0].__class__, func.__name__) and \
ismethod(getattr(args_kw[0], func.__name__)):
actual_class = args_kw[0].__class__
if _actualfunc(getattr(args_kw[0], func.__name__)) != func:
for acls in util.mro(args_kw[0].__class__):
if not is_builtin_type(acls):
if hasattr(acls, func.__name__) and func.__name__ in acls.__dict__ and \
_actualfunc(acls.__dict__[func.__name__]) == func:
actual_class = acls
if func.__name__ == '__init__':
raise OverrideError(
'Invalid use of @override in %s:\n @override must not be applied to __init__.'
% util._fully_qualified_func_name(func, True, actual_class))
ovmro = []
base_method_exists = False
for mc in util.mro(actual_class)[1:]:
if hasattr(mc, func.__name__):
ovf = getattr(mc, func.__name__)
base_method_exists = True
if not is_builtin_type(mc):
ovmro.append(mc)
if not base_method_exists:
if not auto:
raise _no_base_method_error(func)
else:
return func(*args, **kw)
# Not yet support overloading
# Check arg-count compatibility
for ovcls in ovmro:
ovf = getattr(ovcls, func.__name__)
_check_override_argspecs(func, specs, actual_class.__name__, ovf, ovcls)
# Check arg/res-type compatibility
meth_types = _funcsigtypes(func, True, args_kw[0].__class__)
if has_type_hints(func):
for ovcls in ovmro:
ovf = getattr(ovcls, func.__name__)
_check_override_types(func, meth_types, actual_class.__name__, ovf, ovcls)
else:
raise OverrideError('@override was applied to a non-method: %s.%s.\n'
% (func.__module__, func.__name__)
+ "that declares 'self' although not a method.")
else:
raise _function_instead_of_method_error(func)
return func(*args, **kw)
checker_ov.ov_func = func
if hasattr(func, '__func__'):
checker_ov.__func__ = func.__func__
checker_ov.__name__ = func.__name__
checker_ov.__module__ = func.__module__
checker_ov.__globals__.update(func.__globals__)
if hasattr(func, '__annotations__'):
checker_ov.__annotations__ = func.__annotations__
if hasattr(func, '__qualname__'):
checker_ov.__qualname__ = func.__qualname__
checker_ov.__doc__ = func.__doc__
# Todo: Check what other attributes might be needed (e.g. by debuggers).
checker_ov._check_parent_types = True
return checker_ov
else:
func._check_parent_types = True
return func | [
"Decorator applicable to methods only.\n For a version applicable also to classes or modules use auto_override.\n Asserts that for the decorated method a parent method exists in its mro.\n If both the decorated method and its parent method are type annotated,\n the decorator additionally asserts compatibility of the annotated types.\n Note that the return type is checked in contravariant manner.\n A successful check guarantees that the child method can always be used in\n places that support the parent method's signature.\n Use pytypes.check_override_at_runtime and pytypes.check_override_at_class_definition_time\n to control whether checks happen at class definition time or at \"actual runtime\".\n "
] |
Please provide a description of the function:def typechecked_func(func, force = False, argType = None, resType = None, prop_getter = False):
if not pytypes.checking_enabled and not pytypes.do_logging_in_typechecked:
return func
assert(_check_as_func(func))
if not force and is_no_type_check(func):
return func
if hasattr(func, 'do_typecheck'):
func.do_typecheck = True
return func
elif hasattr(func, 'do_logging'):
# actually shouldn't happen
return _typeinspect_func(func, True, func.do_logging, argType, resType, prop_getter)
else:
return _typeinspect_func(func, True, False, argType, resType, prop_getter) | [
"Works like typechecked, but is only applicable to functions, methods and properties.\n "
] |
Please provide a description of the function:def typechecked_class(cls, force = False, force_recursive = False):
return _typechecked_class(cls, set(), force, force_recursive) | [
"Works like typechecked, but is only applicable to classes.\n "
] |
Please provide a description of the function:def typechecked_module(md, force_recursive = False):
if not pytypes.checking_enabled:
return md
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md
elif md in _pending_modules:
# if import is pending, we just store this call for later
_pending_modules[md].append(lambda t: typechecked_module(t, True))
return md
assert(ismodule(md))
if md.__name__ in _pending_modules:
# if import is pending, we just store this call for later
_pending_modules[md.__name__].append(lambda t: typechecked_module(t, True))
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
if md.__name__ in _fully_typechecked_modules and \
_fully_typechecked_modules[md.__name__] == len(md.__dict__):
return md
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if force_recursive or not is_no_type_check(memb) and hasattr(memb, '__module__'):
if _check_as_func(memb) and memb.__module__ == md.__name__ and \
has_type_hints(memb):
setattr(md, key, typechecked_func(memb, force_recursive))
elif isclass(memb) and memb.__module__ == md.__name__:
typechecked_class(memb, force_recursive, force_recursive)
if not md.__name__ in _pending_modules:
_fully_typechecked_modules[md.__name__] = len(md.__dict__)
return md | [
"Works like typechecked, but is only applicable to modules (by explicit call).\n md must be a module or a module name contained in sys.modules.\n "
] |
Please provide a description of the function:def typechecked(memb):
if not pytypes.checking_enabled:
return memb
if is_no_type_check(memb):
return memb
if type_util._check_as_func(memb):
return typechecked_func(memb)
if isclass(memb):
return typechecked_class(memb)
if ismodule(memb):
return typechecked_module(memb, True)
if memb in sys.modules or memb in _pending_modules:
return typechecked_module(memb, True)
return memb | [
"Decorator applicable to functions, methods, properties,\n classes or modules (by explicit call).\n If applied on a module, memb must be a module or a module name contained in sys.modules.\n See pytypes.set_global_typechecked_decorator to apply this on all modules.\n Asserts compatibility of runtime argument and return values of all targeted functions\n and methods w.r.t. PEP 484-style type annotations of these functions and methods.\n "
] |
Please provide a description of the function:def auto_override_class(cls, force = False, force_recursive = False):
if not pytypes.checking_enabled:
return cls
assert(isclass(cls))
if not force and is_no_type_check(cls):
return cls
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in cls.__dict__]
for key in keys:
memb = cls.__dict__[key]
if force_recursive or not is_no_type_check(memb):
if isfunction(memb) or ismethod(memb) or ismethoddescriptor(memb):
if util._has_base_method(memb, cls):
setattr(cls, key, override(memb))
elif isclass(memb):
auto_override_class(memb, force_recursive, force_recursive)
return cls | [
"Works like auto_override, but is only applicable to classes.\n "
] |
Please provide a description of the function:def auto_override_module(md, force_recursive = False):
if not pytypes.checking_enabled:
return md
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md
elif md in _pending_modules:
# if import is pending, we just store this call for later
_pending_modules[md].append(lambda t: auto_override_module(t, True))
return md
assert(ismodule(md))
if md.__name__ in _pending_modules:
# if import is pending, we just store this call for later
_pending_modules[md.__name__].append(lambda t: auto_override_module(t, True))
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
if md.__name__ in _auto_override_modules and \
_auto_override_modules[md.__name__] == len(md.__dict__):
return md
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if force_recursive or not is_no_type_check(memb):
if isclass(memb) and memb.__module__ == md.__name__:
auto_override_class(memb, force_recursive, force_recursive)
if not md.__name__ in _pending_modules:
_auto_override_modules[md.__name__] = len(md.__dict__)
return md | [
"Works like auto_override, but is only applicable to modules (by explicit call).\n md must be a module or a module name contained in sys.modules.\n "
] |
Please provide a description of the function:def auto_override(memb):
if type_util._check_as_func(memb):
return override(memb, True)
if isclass(memb):
return auto_override_class(memb)
if ismodule(memb):
return auto_override_module(memb, True)
if memb in sys.modules or memb in _pending_modules:
return auto_override_module(memb, True)
return memb | [
"Decorator applicable to methods, classes or modules (by explicit call).\n If applied on a module, memb must be a module or a module name contained in sys.modules.\n See pytypes.set_global_auto_override_decorator to apply this on all modules.\n Works like override decorator on type annotated methods that actually have a type\n annotated parent method. Has no effect on methods that do not override anything.\n In contrast to plain override decorator, auto_override can be applied easily on\n every method in a class or module.\n In contrast to explicit override decorator, auto_override is not suitable to detect\n typos in spelling of a child method's name. It is only useful to assert compatibility\n of type information (note that return type is contravariant).\n Use pytypes.check_override_at_runtime and pytypes.check_override_at_class_definition_time\n to control whether checks happen at class definition time or at \"actual runtime\".\n "
] |
Please provide a description of the function:def no_type_check(memb):
try:
return typing.no_type_check(memb)
except(AttributeError):
_not_type_checked.add(memb)
return memb | [
"Works like typing.no_type_check, but also supports cases where\n typing.no_type_check fails due to AttributeError. This can happen,\n because typing.no_type_check wants to access __no_type_check__, which\n might fail if e.g. a class is using slots or an object doesn't support\n custom attributes.\n "
] |
Please provide a description of the function:def is_no_type_check(memb):
try:
return hasattr(memb, '__no_type_check__') and memb.__no_type_check__ or \
memb in _not_type_checked
except TypeError:
return False | [
"Checks if an object was annotated with @no_type_check\n (from typing or pytypes.typechecker).\n "
] |
Please provide a description of the function:def check_argument_types(cllable = None, call_args = None, clss = None, caller_level = 0):
return _check_caller_type(False, cllable, call_args, clss, caller_level+1) | [
"Can be called from within a function or method to apply typechecking to\n the arguments that were passed in by the caller. Checking is applied w.r.t.\n type hints of the function or method hosting the call to check_argument_types.\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.