body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def __init__(self, event, device): 'Initialize the Axis event.' super().__init__(device) self.event = event self._attr_name = f'{device.name} {event.TYPE} {event.id}' self._attr_unique_id = f'{device.unique_id}-{event.topic}-{event.id}' self._attr_device_class = event.CLASS
6,711,343,000,170,600,000
Initialize the Axis event.
homeassistant/components/axis/axis_base.py
__init__
2004happy/core
python
def __init__(self, event, device): super().__init__(device) self.event = event self._attr_name = f'{device.name} {event.TYPE} {event.id}' self._attr_unique_id = f'{device.unique_id}-{event.topic}-{event.id}' self._attr_device_class = event.CLASS
async def async_added_to_hass(self) -> None: 'Subscribe sensors events.' self.event.register_callback(self.update_callback) (await super().async_added_to_hass())
5,140,606,662,706,622,000
Subscribe sensors events.
homeassistant/components/axis/axis_base.py
async_added_to_hass
2004happy/core
python
async def async_added_to_hass(self) -> None: self.event.register_callback(self.update_callback) (await super().async_added_to_hass())
async def async_will_remove_from_hass(self) -> None: 'Disconnect device object when removed.' self.event.remove_callback(self.update_callback)
-8,581,445,601,509,573,000
Disconnect device object when removed.
homeassistant/components/axis/axis_base.py
async_will_remove_from_hass
2004happy/core
python
async def async_will_remove_from_hass(self) -> None: self.event.remove_callback(self.update_callback)
def decode_prediction(prediction): '\n Decodes predictions and returns a result string.\n ' if (np.where((prediction == np.amax(prediction)))[1] == 2): prob_orange = round((prediction[0][2] * 100), 2) label = f'I am {prob_orange} % sure this is an orange 🍊!' if (np.where((prediction == np.amax(prediction)))[1] == 1): prob_banana = round((prediction[0][1] * 100), 2) label = f'I am {prob_banana} % sure this is a banana 🍌!' if (np.where((prediction == np.amax(prediction)))[1] == 0): prob_apple = round((prediction[0][0] * 100), 2) label = f'I am {prob_apple} % sure this is an apple 🍎!' return label
992,678,582,589,011,700
Decodes predictions and returns a result string.
src/predict.py
decode_prediction
DariusTorabian/image-classifier
python
def decode_prediction(prediction): '\n \n ' if (np.where((prediction == np.amax(prediction)))[1] == 2): prob_orange = round((prediction[0][2] * 100), 2) label = f'I am {prob_orange} % sure this is an orange 🍊!' if (np.where((prediction == np.amax(prediction)))[1] == 1): prob_banana = round((prediction[0][1] * 100), 2) label = f'I am {prob_banana} % sure this is a banana 🍌!' if (np.where((prediction == np.amax(prediction)))[1] == 0): prob_apple = round((prediction[0][0] * 100), 2) label = f'I am {prob_apple} % sure this is an apple 🍎!' return label
def predict(frame): '\n Takes a frame as input, makes a prediction, decoodes it\n and returns a result string.\n ' img = cv2.resize(frame, (224, 224)) img = cv2.cvtColor(np.float32(img), cv2.COLOR_BGR2RGB) img = img.reshape(1, 224, 224, 3) prediction = model.predict(img) label = decode_prediction(prediction) return label
-677,628,201,894,662,000
Takes a frame as input, makes a prediction, decoodes it and returns a result string.
src/predict.py
predict
DariusTorabian/image-classifier
python
def predict(frame): '\n Takes a frame as input, makes a prediction, decoodes it\n and returns a result string.\n ' img = cv2.resize(frame, (224, 224)) img = cv2.cvtColor(np.float32(img), cv2.COLOR_BGR2RGB) img = img.reshape(1, 224, 224, 3) prediction = model.predict(img) label = decode_prediction(prediction) return label
def declare_types(**types): "\n Decorator to declare argument and result types for a function\n\n Usage is similar to `check_units` except that types must be one of ``{VALID_ARG_TYPES}``\n and the result type must be one of ``{VALID_RETURN_TYPES}``. Unspecified argument\n types are assumed to be ``'all'`` (i.e. anything is permitted), and an unspecified\n result type is assumed to be ``'float'``. Note that the ``'highest'`` option for\n result type will give the highest type of its argument, e.g. if the arguments\n were boolean and integer then the result would be integer, if the arguments were\n integer and float it would be float.\n " def annotate_function_with_types(f): if hasattr(f, '_orig_arg_names'): arg_names = f._orig_arg_names else: arg_names = f.__code__.co_varnames[0:f.__code__.co_argcount] argtypes = [] for name in arg_names: arg_type = types.get(name, 'any') if (arg_type not in VALID_ARG_TYPES): raise ValueError(('Argument type %s is not valid, must be one of %s, for argument %s' % (arg_type, VALID_ARG_TYPES, name))) argtypes.append(arg_type) for n in types: if ((n not in arg_names) and (n != 'result')): raise ValueError(('Type specified for unknown argument ' + n)) return_type = types.get('result', 'float') if (return_type not in VALID_RETURN_TYPES): raise ValueError(('Result type %s is not valid, must be one of %s' % (return_type, VALID_RETURN_TYPES))) f._arg_types = argtypes f._return_type = return_type f._orig_arg_names = arg_names f._annotation_attributes = (getattr(f, '_annotation_attributes', []) + ['_arg_types', '_return_type']) return f return annotate_function_with_types
515,835,094,503,969,150
Decorator to declare argument and result types for a function Usage is similar to `check_units` except that types must be one of ``{VALID_ARG_TYPES}`` and the result type must be one of ``{VALID_RETURN_TYPES}``. Unspecified argument types are assumed to be ``'all'`` (i.e. anything is permitted), and an unspecified result type is assumed to be ``'float'``. Note that the ``'highest'`` option for result type will give the highest type of its argument, e.g. if the arguments were boolean and integer then the result would be integer, if the arguments were integer and float it would be float.
brian2/core/functions.py
declare_types
Ziaeemehr/brian2
python
def declare_types(**types): "\n Decorator to declare argument and result types for a function\n\n Usage is similar to `check_units` except that types must be one of ``{VALID_ARG_TYPES}``\n and the result type must be one of ``{VALID_RETURN_TYPES}``. Unspecified argument\n types are assumed to be ``'all'`` (i.e. anything is permitted), and an unspecified\n result type is assumed to be ``'float'``. Note that the ``'highest'`` option for\n result type will give the highest type of its argument, e.g. if the arguments\n were boolean and integer then the result would be integer, if the arguments were\n integer and float it would be float.\n " def annotate_function_with_types(f): if hasattr(f, '_orig_arg_names'): arg_names = f._orig_arg_names else: arg_names = f.__code__.co_varnames[0:f.__code__.co_argcount] argtypes = [] for name in arg_names: arg_type = types.get(name, 'any') if (arg_type not in VALID_ARG_TYPES): raise ValueError(('Argument type %s is not valid, must be one of %s, for argument %s' % (arg_type, VALID_ARG_TYPES, name))) argtypes.append(arg_type) for n in types: if ((n not in arg_names) and (n != 'result')): raise ValueError(('Type specified for unknown argument ' + n)) return_type = types.get('result', 'float') if (return_type not in VALID_RETURN_TYPES): raise ValueError(('Result type %s is not valid, must be one of %s' % (return_type, VALID_RETURN_TYPES))) f._arg_types = argtypes f._return_type = return_type f._orig_arg_names = arg_names f._annotation_attributes = (getattr(f, '_annotation_attributes', []) + ['_arg_types', '_return_type']) return f return annotate_function_with_types
def implementation(target, code=None, namespace=None, dependencies=None, discard_units=None, name=None, **compiler_kwds): '\n A simple decorator to extend user-written Python functions to work with code\n generation in other languages.\n\n Parameters\n ----------\n target : str\n Name of the code generation target (e.g. ``\'cython\'``) for which to add\n an implementation.\n code : str or dict-like, optional\n What kind of code the target language expects is language-specific,\n e.g. C++ code allows for a dictionary of code blocks instead of a\n single string.\n namespaces : dict-like, optional\n A namespace dictionary (i.e. a mapping of names to values) that\n should be added to a `CodeObject` namespace when using this function.\n dependencies : dict-like, optional\n A mapping of names to `Function` objects, for additional functions\n needed by this function.\n discard_units: bool, optional\n Numpy functions can internally make use of the unit system. However,\n during a simulation run, state variables are passed around as unitless\n values for efficiency. If `discard_units` is set to ``False``, input\n arguments will have units added to them so that the function can still\n use units internally (the units will be stripped away from the return\n value as well). Alternatively, if `discard_units` is set to ``True``,\n the function will receive unitless values as its input. The namespace\n of the function will be altered to make references to units (e.g.\n ``ms``) refer to the corresponding floating point values so that no\n unit mismatch errors are raised. Note that this system cannot work in\n all cases, e.g. it does not work with functions that internally imports\n values (e.g. does ``from brian2 import ms``) or access values with\n units indirectly (e.g. uses ``brian2.ms`` instead of ``ms``). If no\n value is given, defaults to the preference setting\n `codegen.runtime.numpy.discard_units`.\n name : str, optional\n The name of the function in the target language. Should only be\n specified if the function has to be renamed for the target language.\n compiler_kwds : dict, optional\n Additional keyword arguments will be transferred to the code generation\n stage, e.g. for C++-based targets, the code can make use of additional\n header files by providing a list of strings as the ``headers`` argument.\n\n Notes\n -----\n While it is in principle possible to provide a numpy implementation\n as an argument for this decorator, this is normally not necessary -- the\n numpy implementation should be provided in the decorated function.\n\n If this decorator is used with other decorators such as `check_units` or\n `declare_types`, it should be the uppermost decorator (that is, the\n last one to be applied).\n\n Examples\n --------\n Sample usage::\n\n @implementation(\'cpp\',"""\n #include<math.h>\n inline double usersin(double x)\n {\n return sin(x);\n }\n """)\n def usersin(x):\n return sin(x)\n ' def do_user_implementation(func): if isinstance(func, Function): function = func else: function = Function(func) if discard_units: if (not ((target == 'numpy') and (code is None))): raise TypeError("'discard_units' can only be set for code generation target 'numpy', without providing any code.") function.implementations.add_numpy_implementation(wrapped_func=func, dependencies=dependencies, discard_units=discard_units, compiler_kwds=compiler_kwds) else: function.implementations.add_implementation(target, code=code, dependencies=dependencies, namespace=namespace, name=name, compiler_kwds=compiler_kwds) return function return do_user_implementation
6,971,452,595,776,065,000
A simple decorator to extend user-written Python functions to work with code generation in other languages. Parameters ---------- target : str Name of the code generation target (e.g. ``'cython'``) for which to add an implementation. code : str or dict-like, optional What kind of code the target language expects is language-specific, e.g. C++ code allows for a dictionary of code blocks instead of a single string. namespaces : dict-like, optional A namespace dictionary (i.e. a mapping of names to values) that should be added to a `CodeObject` namespace when using this function. dependencies : dict-like, optional A mapping of names to `Function` objects, for additional functions needed by this function. discard_units: bool, optional Numpy functions can internally make use of the unit system. However, during a simulation run, state variables are passed around as unitless values for efficiency. If `discard_units` is set to ``False``, input arguments will have units added to them so that the function can still use units internally (the units will be stripped away from the return value as well). Alternatively, if `discard_units` is set to ``True``, the function will receive unitless values as its input. The namespace of the function will be altered to make references to units (e.g. ``ms``) refer to the corresponding floating point values so that no unit mismatch errors are raised. Note that this system cannot work in all cases, e.g. it does not work with functions that internally imports values (e.g. does ``from brian2 import ms``) or access values with units indirectly (e.g. uses ``brian2.ms`` instead of ``ms``). If no value is given, defaults to the preference setting `codegen.runtime.numpy.discard_units`. name : str, optional The name of the function in the target language. Should only be specified if the function has to be renamed for the target language. compiler_kwds : dict, optional Additional keyword arguments will be transferred to the code generation stage, e.g. for C++-based targets, the code can make use of additional header files by providing a list of strings as the ``headers`` argument. Notes ----- While it is in principle possible to provide a numpy implementation as an argument for this decorator, this is normally not necessary -- the numpy implementation should be provided in the decorated function. If this decorator is used with other decorators such as `check_units` or `declare_types`, it should be the uppermost decorator (that is, the last one to be applied). Examples -------- Sample usage:: @implementation('cpp',""" #include<math.h> inline double usersin(double x) { return sin(x); } """) def usersin(x): return sin(x)
brian2/core/functions.py
implementation
Ziaeemehr/brian2
python
def implementation(target, code=None, namespace=None, dependencies=None, discard_units=None, name=None, **compiler_kwds): '\n A simple decorator to extend user-written Python functions to work with code\n generation in other languages.\n\n Parameters\n ----------\n target : str\n Name of the code generation target (e.g. ``\'cython\'``) for which to add\n an implementation.\n code : str or dict-like, optional\n What kind of code the target language expects is language-specific,\n e.g. C++ code allows for a dictionary of code blocks instead of a\n single string.\n namespaces : dict-like, optional\n A namespace dictionary (i.e. a mapping of names to values) that\n should be added to a `CodeObject` namespace when using this function.\n dependencies : dict-like, optional\n A mapping of names to `Function` objects, for additional functions\n needed by this function.\n discard_units: bool, optional\n Numpy functions can internally make use of the unit system. However,\n during a simulation run, state variables are passed around as unitless\n values for efficiency. If `discard_units` is set to ``False``, input\n arguments will have units added to them so that the function can still\n use units internally (the units will be stripped away from the return\n value as well). Alternatively, if `discard_units` is set to ``True``,\n the function will receive unitless values as its input. The namespace\n of the function will be altered to make references to units (e.g.\n ``ms``) refer to the corresponding floating point values so that no\n unit mismatch errors are raised. Note that this system cannot work in\n all cases, e.g. it does not work with functions that internally imports\n values (e.g. does ``from brian2 import ms``) or access values with\n units indirectly (e.g. uses ``brian2.ms`` instead of ``ms``). If no\n value is given, defaults to the preference setting\n `codegen.runtime.numpy.discard_units`.\n name : str, optional\n The name of the function in the target language. Should only be\n specified if the function has to be renamed for the target language.\n compiler_kwds : dict, optional\n Additional keyword arguments will be transferred to the code generation\n stage, e.g. for C++-based targets, the code can make use of additional\n header files by providing a list of strings as the ``headers`` argument.\n\n Notes\n -----\n While it is in principle possible to provide a numpy implementation\n as an argument for this decorator, this is normally not necessary -- the\n numpy implementation should be provided in the decorated function.\n\n If this decorator is used with other decorators such as `check_units` or\n `declare_types`, it should be the uppermost decorator (that is, the\n last one to be applied).\n\n Examples\n --------\n Sample usage::\n\n @implementation(\'cpp\',"\n #include<math.h>\n inline double usersin(double x)\n {\n return sin(x);\n }\n ")\n def usersin(x):\n return sin(x)\n ' def do_user_implementation(func): if isinstance(func, Function): function = func else: function = Function(func) if discard_units: if (not ((target == 'numpy') and (code is None))): raise TypeError("'discard_units' can only be set for code generation target 'numpy', without providing any code.") function.implementations.add_numpy_implementation(wrapped_func=func, dependencies=dependencies, discard_units=discard_units, compiler_kwds=compiler_kwds) else: function.implementations.add_implementation(target, code=code, dependencies=dependencies, namespace=namespace, name=name, compiler_kwds=compiler_kwds) return function return do_user_implementation
def timestep(t, dt): '\n Converts a given time to an integer time step. This function slightly shifts\n the time before dividing it by ``dt`` to make sure that multiples of ``dt``\n do not end up in the preceding time step due to floating point issues. This\n function is used in the refractoriness calculation.\n\n .. versionadded:: 2.1.3\n\n Parameters\n ----------\n t : np.ndarray, float, Quantity\n The time to convert.\n dt : float or Quantity\n The length of a simulation time step.\n\n Returns\n -------\n ts : np.ndarray, np.int64\n The time step corresponding to the given time.\n\n Notes\n -----\n This function cannot handle infinity values, use big values instead (e.g.\n a `NeuronGroup` will use ``-1e4*second`` as the value of the ``lastspike``\n variable for neurons that never spiked).\n ' elapsed_steps = np.array(((t + (0.001 * dt)) / dt), dtype=np.int64) if (elapsed_steps.shape == ()): elapsed_steps = elapsed_steps.item() return elapsed_steps
-7,115,279,182,672,181,000
Converts a given time to an integer time step. This function slightly shifts the time before dividing it by ``dt`` to make sure that multiples of ``dt`` do not end up in the preceding time step due to floating point issues. This function is used in the refractoriness calculation. .. versionadded:: 2.1.3 Parameters ---------- t : np.ndarray, float, Quantity The time to convert. dt : float or Quantity The length of a simulation time step. Returns ------- ts : np.ndarray, np.int64 The time step corresponding to the given time. Notes ----- This function cannot handle infinity values, use big values instead (e.g. a `NeuronGroup` will use ``-1e4*second`` as the value of the ``lastspike`` variable for neurons that never spiked).
brian2/core/functions.py
timestep
Ziaeemehr/brian2
python
def timestep(t, dt): '\n Converts a given time to an integer time step. This function slightly shifts\n the time before dividing it by ``dt`` to make sure that multiples of ``dt``\n do not end up in the preceding time step due to floating point issues. This\n function is used in the refractoriness calculation.\n\n .. versionadded:: 2.1.3\n\n Parameters\n ----------\n t : np.ndarray, float, Quantity\n The time to convert.\n dt : float or Quantity\n The length of a simulation time step.\n\n Returns\n -------\n ts : np.ndarray, np.int64\n The time step corresponding to the given time.\n\n Notes\n -----\n This function cannot handle infinity values, use big values instead (e.g.\n a `NeuronGroup` will use ``-1e4*second`` as the value of the ``lastspike``\n variable for neurons that never spiked).\n ' elapsed_steps = np.array(((t + (0.001 * dt)) / dt), dtype=np.int64) if (elapsed_steps.shape == ()): elapsed_steps = elapsed_steps.item() return elapsed_steps
def is_locally_constant(self, dt): '\n Return whether this function (if interpreted as a function of time)\n should be considered constant over a timestep. This is most importantly\n used by `TimedArray` so that linear integration can be used. In its\n standard implementation, always returns ``False``.\n\n Parameters\n ----------\n dt : float\n The length of a timestep (without units).\n\n Returns\n -------\n constant : bool\n Whether the results of this function can be considered constant\n over one timestep of length `dt`.\n ' return False
-504,631,884,878,651,900
Return whether this function (if interpreted as a function of time) should be considered constant over a timestep. This is most importantly used by `TimedArray` so that linear integration can be used. In its standard implementation, always returns ``False``. Parameters ---------- dt : float The length of a timestep (without units). Returns ------- constant : bool Whether the results of this function can be considered constant over one timestep of length `dt`.
brian2/core/functions.py
is_locally_constant
Ziaeemehr/brian2
python
def is_locally_constant(self, dt): '\n Return whether this function (if interpreted as a function of time)\n should be considered constant over a timestep. This is most importantly\n used by `TimedArray` so that linear integration can be used. In its\n standard implementation, always returns ``False``.\n\n Parameters\n ----------\n dt : float\n The length of a timestep (without units).\n\n Returns\n -------\n constant : bool\n Whether the results of this function can be considered constant\n over one timestep of length `dt`.\n ' return False
def __getitem__(self, key): '\n Find an implementation for this function that can be used by the\n `CodeObject` given as `key`. Will find implementations registered\n for `key` itself (or one of its parents), or for the `CodeGenerator`\n class that `key` uses (or one of its parents). In all cases,\n implementations registered for the corresponding names qualify as well.\n\n Parameters\n ----------\n key : `CodeObject`\n The `CodeObject` that will use the `Function`\n\n Returns\n -------\n implementation : `FunctionImplementation`\n An implementation suitable for `key`.\n ' fallback = getattr(key, 'generator_class', None) fallback_parent = getattr(key, 'original_generator_class', None) for K in [key, fallback, fallback_parent]: name = getattr(K, 'class_name', 'no class name for key') for (impl_key, impl) in self._implementations.items(): impl_key_name = getattr(impl_key, 'class_name', 'no class name for implementation') if (((impl_key_name is not None) and (impl_key_name in [K, name])) or ((impl_key is not None) and (impl_key in [K, name]))): return impl if hasattr(K, '__bases__'): for cls in inspect.getmro(K): if (cls in self._implementations): return self._implementations[cls] name = getattr(cls, 'class_name', None) if (name in self._implementations): return self._implementations[name] if (getattr(key, 'class_name', None) is not None): key = key.class_name elif (getattr(fallback, 'class_name', None) is not None): key = fallback.class_name keys = ', '.join([getattr(k, 'class_name', str(k)) for k in self._implementations]) raise KeyError('No implementation available for target {key}. Available implementations: {keys}'.format(key=key, keys=keys))
-7,995,967,572,741,398,000
Find an implementation for this function that can be used by the `CodeObject` given as `key`. Will find implementations registered for `key` itself (or one of its parents), or for the `CodeGenerator` class that `key` uses (or one of its parents). In all cases, implementations registered for the corresponding names qualify as well. Parameters ---------- key : `CodeObject` The `CodeObject` that will use the `Function` Returns ------- implementation : `FunctionImplementation` An implementation suitable for `key`.
brian2/core/functions.py
__getitem__
Ziaeemehr/brian2
python
def __getitem__(self, key): '\n Find an implementation for this function that can be used by the\n `CodeObject` given as `key`. Will find implementations registered\n for `key` itself (or one of its parents), or for the `CodeGenerator`\n class that `key` uses (or one of its parents). In all cases,\n implementations registered for the corresponding names qualify as well.\n\n Parameters\n ----------\n key : `CodeObject`\n The `CodeObject` that will use the `Function`\n\n Returns\n -------\n implementation : `FunctionImplementation`\n An implementation suitable for `key`.\n ' fallback = getattr(key, 'generator_class', None) fallback_parent = getattr(key, 'original_generator_class', None) for K in [key, fallback, fallback_parent]: name = getattr(K, 'class_name', 'no class name for key') for (impl_key, impl) in self._implementations.items(): impl_key_name = getattr(impl_key, 'class_name', 'no class name for implementation') if (((impl_key_name is not None) and (impl_key_name in [K, name])) or ((impl_key is not None) and (impl_key in [K, name]))): return impl if hasattr(K, '__bases__'): for cls in inspect.getmro(K): if (cls in self._implementations): return self._implementations[cls] name = getattr(cls, 'class_name', None) if (name in self._implementations): return self._implementations[name] if (getattr(key, 'class_name', None) is not None): key = key.class_name elif (getattr(fallback, 'class_name', None) is not None): key = fallback.class_name keys = ', '.join([getattr(k, 'class_name', str(k)) for k in self._implementations]) raise KeyError('No implementation available for target {key}. Available implementations: {keys}'.format(key=key, keys=keys))
def add_numpy_implementation(self, wrapped_func, dependencies=None, discard_units=None, compiler_kwds=None): '\n Add a numpy implementation to a `Function`.\n\n Parameters\n ----------\n function : `Function`\n The function description for which an implementation should be added.\n wrapped_func : callable\n The original function (that will be used for the numpy implementation)\n dependencies : list of `Function`, optional\n A list of functions this function needs.\n discard_units : bool, optional\n See `implementation`.\n ' if (discard_units is None): discard_units = prefs['codegen.runtime.numpy.discard_units'] if hasattr(wrapped_func, '_orig_func'): orig_func = wrapped_func._orig_func else: orig_func = wrapped_func if discard_units: new_globals = dict(orig_func.__globals__) for (key, value) in new_globals.items(): if isinstance(value, Quantity): new_globals[key] = np.asarray(value) unitless_func = types.FunctionType(orig_func.__code__, new_globals, orig_func.__name__, orig_func.__defaults__, orig_func.__closure__) self._implementations['numpy'] = FunctionImplementation(name=None, code=unitless_func, dependencies=dependencies, compiler_kwds=None) else: def wrapper_function(*args): arg_units = list(self._function._arg_units) if self._function.auto_vectorise: arg_units += [DIMENSIONLESS] if (not (len(args) == len(arg_units))): raise ValueError(('Function %s got %d arguments, expected %d' % (self._function.pyfunc.__name__, len(args), len(arg_units)))) new_args = [] for (arg, arg_unit) in zip(args, arg_units): if ((arg_unit == bool) or (arg_unit is None) or isinstance(arg_unit, str)): new_args.append(arg) else: new_args.append(Quantity.with_dimensions(arg, get_dimensions(arg_unit))) result = orig_func(*new_args) if isinstance(self._function._return_unit, Callable): return_unit = self._function._return_unit(*[get_dimensions(a) for a in args]) else: return_unit = self._function._return_unit if (return_unit == bool): if (not (isinstance(result, bool) or (np.asarray(result).dtype == bool))): raise TypeError(('The function %s returned %s, but it was expected to return a boolean value ' % (orig_func.__name__, result))) elif ((isinstance(return_unit, int) and (return_unit == 1)) or (return_unit.dim is DIMENSIONLESS)): fail_for_dimension_mismatch(result, return_unit, ('The function %s returned {value}, but it was expected to return a dimensionless quantity' % orig_func.__name__), value=result) else: fail_for_dimension_mismatch(result, return_unit, ('The function %s returned {value}, but it was expected to return a quantity with units %r' % (orig_func.__name__, return_unit)), value=result) return np.asarray(result) self._implementations['numpy'] = FunctionImplementation(name=None, code=wrapper_function, dependencies=dependencies)
6,471,998,481,695,690,000
Add a numpy implementation to a `Function`. Parameters ---------- function : `Function` The function description for which an implementation should be added. wrapped_func : callable The original function (that will be used for the numpy implementation) dependencies : list of `Function`, optional A list of functions this function needs. discard_units : bool, optional See `implementation`.
brian2/core/functions.py
add_numpy_implementation
Ziaeemehr/brian2
python
def add_numpy_implementation(self, wrapped_func, dependencies=None, discard_units=None, compiler_kwds=None): '\n Add a numpy implementation to a `Function`.\n\n Parameters\n ----------\n function : `Function`\n The function description for which an implementation should be added.\n wrapped_func : callable\n The original function (that will be used for the numpy implementation)\n dependencies : list of `Function`, optional\n A list of functions this function needs.\n discard_units : bool, optional\n See `implementation`.\n ' if (discard_units is None): discard_units = prefs['codegen.runtime.numpy.discard_units'] if hasattr(wrapped_func, '_orig_func'): orig_func = wrapped_func._orig_func else: orig_func = wrapped_func if discard_units: new_globals = dict(orig_func.__globals__) for (key, value) in new_globals.items(): if isinstance(value, Quantity): new_globals[key] = np.asarray(value) unitless_func = types.FunctionType(orig_func.__code__, new_globals, orig_func.__name__, orig_func.__defaults__, orig_func.__closure__) self._implementations['numpy'] = FunctionImplementation(name=None, code=unitless_func, dependencies=dependencies, compiler_kwds=None) else: def wrapper_function(*args): arg_units = list(self._function._arg_units) if self._function.auto_vectorise: arg_units += [DIMENSIONLESS] if (not (len(args) == len(arg_units))): raise ValueError(('Function %s got %d arguments, expected %d' % (self._function.pyfunc.__name__, len(args), len(arg_units)))) new_args = [] for (arg, arg_unit) in zip(args, arg_units): if ((arg_unit == bool) or (arg_unit is None) or isinstance(arg_unit, str)): new_args.append(arg) else: new_args.append(Quantity.with_dimensions(arg, get_dimensions(arg_unit))) result = orig_func(*new_args) if isinstance(self._function._return_unit, Callable): return_unit = self._function._return_unit(*[get_dimensions(a) for a in args]) else: return_unit = self._function._return_unit if (return_unit == bool): if (not (isinstance(result, bool) or (np.asarray(result).dtype == bool))): raise TypeError(('The function %s returned %s, but it was expected to return a boolean value ' % (orig_func.__name__, result))) elif ((isinstance(return_unit, int) and (return_unit == 1)) or (return_unit.dim is DIMENSIONLESS)): fail_for_dimension_mismatch(result, return_unit, ('The function %s returned {value}, but it was expected to return a dimensionless quantity' % orig_func.__name__), value=result) else: fail_for_dimension_mismatch(result, return_unit, ('The function %s returned {value}, but it was expected to return a quantity with units %r' % (orig_func.__name__, return_unit)), value=result) return np.asarray(result) self._implementations['numpy'] = FunctionImplementation(name=None, code=wrapper_function, dependencies=dependencies)
def add_dynamic_implementation(self, target, code, namespace=None, dependencies=None, availability_check=None, name=None, compiler_kwds=None): '\n Adds an "dynamic implementation" for this function. `code` and `namespace`\n arguments are expected to be callables that will be called in\n `Network.before_run` with the owner of the `CodeObject` as an argument.\n This allows to generate code that depends on details of the context it\n is run in, e.g. the ``dt`` of a clock.\n ' if (not callable(code)): raise TypeError(('code argument has to be a callable, is type %s instead' % type(code))) if ((namespace is not None) and (not callable(namespace))): raise TypeError(('namespace argument has to be a callable, is type %s instead' % type(code))) self._implementations[target] = FunctionImplementation(name=name, code=code, namespace=namespace, dependencies=dependencies, availability_check=availability_check, dynamic=True, compiler_kwds=compiler_kwds)
502,401,531,114,870,500
Adds an "dynamic implementation" for this function. `code` and `namespace` arguments are expected to be callables that will be called in `Network.before_run` with the owner of the `CodeObject` as an argument. This allows to generate code that depends on details of the context it is run in, e.g. the ``dt`` of a clock.
brian2/core/functions.py
add_dynamic_implementation
Ziaeemehr/brian2
python
def add_dynamic_implementation(self, target, code, namespace=None, dependencies=None, availability_check=None, name=None, compiler_kwds=None): '\n Adds an "dynamic implementation" for this function. `code` and `namespace`\n arguments are expected to be callables that will be called in\n `Network.before_run` with the owner of the `CodeObject` as an argument.\n This allows to generate code that depends on details of the context it\n is run in, e.g. the ``dt`` of a clock.\n ' if (not callable(code)): raise TypeError(('code argument has to be a callable, is type %s instead' % type(code))) if ((namespace is not None) and (not callable(namespace))): raise TypeError(('namespace argument has to be a callable, is type %s instead' % type(code))) self._implementations[target] = FunctionImplementation(name=name, code=code, namespace=namespace, dependencies=dependencies, availability_check=availability_check, dynamic=True, compiler_kwds=compiler_kwds)
def fdiff(self, argindex=1): '\n Returns the first derivative of this function.\n ' if (argindex == 1): return (((sympy.exp(*self.args) * (self.args[0] - S.One)) + S.One) / (self.args[0] ** 2)) else: raise sympy.ArgumentIndexError(self, argindex)
1,287,340,907,368,516,400
Returns the first derivative of this function.
brian2/core/functions.py
fdiff
Ziaeemehr/brian2
python
def fdiff(self, argindex=1): '\n \n ' if (argindex == 1): return (((sympy.exp(*self.args) * (self.args[0] - S.One)) + S.One) / (self.args[0] ** 2)) else: raise sympy.ArgumentIndexError(self, argindex)
def get_sentiment(instances_content): 'Analyzing Sentiment in a String\n\n Args:\n text_content The text content to analyze\n ' scores = [] client = language_v1.LanguageServiceClient() encoding_type = enums.EncodingType.UTF8 language = 'en' type_ = enums.Document.Type.PLAIN_TEXT for content in instances_content: content = (content.encode('utf-8') if isinstance(content, unicode) else str(content)) document = {'content': content, 'type': type_, 'language': language} try: response = client.analyze_sentiment(document, encoding_type=encoding_type, timeout=30, retry=retry.Retry(deadline=60)) if response.document_sentiment.score: scores.append(response.document_sentiment.score) else: scores.append((- 1)) logging.error('Document sentiment score not found for {}'.format(content)) except exceptions.GoogleAPICallError as e: logging.exception(e) except exceptions.RetryError as e: logging.exception(e) except ValueError as e: logging.exception(e) return scores
6,303,273,581,906,286,000
Analyzing Sentiment in a String Args: text_content The text content to analyze
notebooks/samples/tensorflow/sentiment_analysis/dataflow/PubSubToBigQueryWithAPI.py
get_sentiment
dlminvestments/ai-platform-samples
python
def get_sentiment(instances_content): 'Analyzing Sentiment in a String\n\n Args:\n text_content The text content to analyze\n ' scores = [] client = language_v1.LanguageServiceClient() encoding_type = enums.EncodingType.UTF8 language = 'en' type_ = enums.Document.Type.PLAIN_TEXT for content in instances_content: content = (content.encode('utf-8') if isinstance(content, unicode) else str(content)) document = {'content': content, 'type': type_, 'language': language} try: response = client.analyze_sentiment(document, encoding_type=encoding_type, timeout=30, retry=retry.Retry(deadline=60)) if response.document_sentiment.score: scores.append(response.document_sentiment.score) else: scores.append((- 1)) logging.error('Document sentiment score not found for {}'.format(content)) except exceptions.GoogleAPICallError as e: logging.exception(e) except exceptions.RetryError as e: logging.exception(e) except ValueError as e: logging.exception(e) return scores
def prediction_helper(messages): 'Processes PubSub messages and calls AI Platform prediction.\n\n :param messages:\n :return:\n ' if (not isinstance(messages, list)): messages = [messages] instances = list(map((lambda message: json.loads(message)), messages)) scores = get_sentiment([instance['text'] for instance in instances if instance.get('text')]) if (len(scores) == len(instances)): for (i, instance) in enumerate(instances): logging.info('Processed {} instances.'.format(len(instances))) instance['sentiment'] = scores[i] return instances logging.error('Invalid scores {} instances {}'.format(len(scores), len(instances))) logging.error(instances) return
-5,345,835,277,603,021,000
Processes PubSub messages and calls AI Platform prediction. :param messages: :return:
notebooks/samples/tensorflow/sentiment_analysis/dataflow/PubSubToBigQueryWithAPI.py
prediction_helper
dlminvestments/ai-platform-samples
python
def prediction_helper(messages): 'Processes PubSub messages and calls AI Platform prediction.\n\n :param messages:\n :return:\n ' if (not isinstance(messages, list)): messages = [messages] instances = list(map((lambda message: json.loads(message)), messages)) scores = get_sentiment([instance['text'] for instance in instances if instance.get('text')]) if (len(scores) == len(instances)): for (i, instance) in enumerate(instances): logging.info('Processed {} instances.'.format(len(instances))) instance['sentiment'] = scores[i] return instances logging.error('Invalid scores {} instances {}'.format(len(scores), len(instances))) logging.error(instances) return
def run(args, pipeline_args=None): 'Executes Pipeline.\n\n :param args:\n :param pipeline_args:\n :return:\n ' 'Build and run the pipeline.' pipeline_options = PipelineOptions(pipeline_args, streaming=True, save_main_session=True) pipeline_options.view_as(StandardOptions).runner = args.runner google_cloud_options = pipeline_options.view_as(GoogleCloudOptions) google_cloud_options.project = PROJECT_ID google_cloud_options.job_name = 'pubsub-api-bigquery' google_cloud_options.staging_location = args.staging_location google_cloud_options.temp_location = args.temp_location google_cloud_options.region = args.region p = beam.Pipeline(options=pipeline_options) lines = (p | ('read in tweets' >> beam.io.ReadFromPubSub(topic=args.input_topic, with_attributes=False, id_label='tweet_id'))) output_tweets = (((lines | ('assign window key' >> beam.WindowInto(window.FixedWindows(args.window_size)))) | ('batch into n batches' >> BatchElements(min_batch_size=args.min_batch_size, max_batch_size=args.max_batch_size))) | ('predict sentiment' >> beam.FlatMap((lambda messages: prediction_helper(messages))))) bq_schema_json = {'fields': [{'name': 'id', 'type': 'STRING'}, {'name': 'text', 'type': 'STRING'}, {'name': 'user_id', 'type': 'STRING'}, {'name': 'sentiment', 'type': 'FLOAT'}, {'name': 'posted_at', 'type': 'TIMESTAMP'}, {'name': 'favorite_count', 'type': 'INTEGER'}, {'name': 'retweet_count', 'type': 'INTEGER'}, {'name': 'media', 'type': 'STRING'}]} bq_schema = parse_table_schema_from_json(json.dumps(bq_schema_json)) (output_tweets | ('store twitter posts' >> beam.io.WriteToBigQuery(table=args.bigquery_table, dataset=args.bigquery_dataset, schema=bq_schema, write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND, create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, project=PROJECT_ID))) result = p.run() result.wait_until_finish()
-7,893,245,789,689,544,000
Executes Pipeline. :param args: :param pipeline_args: :return:
notebooks/samples/tensorflow/sentiment_analysis/dataflow/PubSubToBigQueryWithAPI.py
run
dlminvestments/ai-platform-samples
python
def run(args, pipeline_args=None): 'Executes Pipeline.\n\n :param args:\n :param pipeline_args:\n :return:\n ' 'Build and run the pipeline.' pipeline_options = PipelineOptions(pipeline_args, streaming=True, save_main_session=True) pipeline_options.view_as(StandardOptions).runner = args.runner google_cloud_options = pipeline_options.view_as(GoogleCloudOptions) google_cloud_options.project = PROJECT_ID google_cloud_options.job_name = 'pubsub-api-bigquery' google_cloud_options.staging_location = args.staging_location google_cloud_options.temp_location = args.temp_location google_cloud_options.region = args.region p = beam.Pipeline(options=pipeline_options) lines = (p | ('read in tweets' >> beam.io.ReadFromPubSub(topic=args.input_topic, with_attributes=False, id_label='tweet_id'))) output_tweets = (((lines | ('assign window key' >> beam.WindowInto(window.FixedWindows(args.window_size)))) | ('batch into n batches' >> BatchElements(min_batch_size=args.min_batch_size, max_batch_size=args.max_batch_size))) | ('predict sentiment' >> beam.FlatMap((lambda messages: prediction_helper(messages))))) bq_schema_json = {'fields': [{'name': 'id', 'type': 'STRING'}, {'name': 'text', 'type': 'STRING'}, {'name': 'user_id', 'type': 'STRING'}, {'name': 'sentiment', 'type': 'FLOAT'}, {'name': 'posted_at', 'type': 'TIMESTAMP'}, {'name': 'favorite_count', 'type': 'INTEGER'}, {'name': 'retweet_count', 'type': 'INTEGER'}, {'name': 'media', 'type': 'STRING'}]} bq_schema = parse_table_schema_from_json(json.dumps(bq_schema_json)) (output_tweets | ('store twitter posts' >> beam.io.WriteToBigQuery(table=args.bigquery_table, dataset=args.bigquery_dataset, schema=bq_schema, write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND, create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, project=PROJECT_ID))) result = p.run() result.wait_until_finish()
@staticmethod def process(element, publish_time=beam.DoFn.TimestampParam): 'Processes each incoming windowed element by extracting the Pub/Sub\n message and its publish timestamp into a dictionary. `publish_time`\n defaults to the publish timestamp returned by the Pub/Sub server. It\n is bound to each element by Beam at runtime.\n ' (yield {'message_body': element.decode('utf-8'), 'publish_time': datetime.datetime.utcfromtimestamp(float(publish_time)).strftime('%Y-%m-%d %H:%M:%S.%f')})
-394,851,617,096,089,500
Processes each incoming windowed element by extracting the Pub/Sub message and its publish timestamp into a dictionary. `publish_time` defaults to the publish timestamp returned by the Pub/Sub server. It is bound to each element by Beam at runtime.
notebooks/samples/tensorflow/sentiment_analysis/dataflow/PubSubToBigQueryWithAPI.py
process
dlminvestments/ai-platform-samples
python
@staticmethod def process(element, publish_time=beam.DoFn.TimestampParam): 'Processes each incoming windowed element by extracting the Pub/Sub\n message and its publish timestamp into a dictionary. `publish_time`\n defaults to the publish timestamp returned by the Pub/Sub server. It\n is bound to each element by Beam at runtime.\n ' (yield {'message_body': element.decode('utf-8'), 'publish_time': datetime.datetime.utcfromtimestamp(float(publish_time)).strftime('%Y-%m-%d %H:%M:%S.%f')})
def logp_ab(value): ' prior density' return tt.log(tt.pow(tt.sum(value), ((- 5) / 2)))
-8,962,368,835,196,094,000
prior density
scripts/hbayes_binom_rats_pymc3.py
logp_ab
NamDinhRobotics/pyprobml
python
def logp_ab(value): ' ' return tt.log(tt.pow(tt.sum(value), ((- 5) / 2)))
def test_help(self): 'Does the help display without error?' run_nbgrader(['export', '--help-all'])
-8,138,394,947,637,915,000
Does the help display without error?
nbgrader/tests/apps/test_nbgrader_export.py
test_help
FrattisUC/nbgrader
python
def test_help(self): run_nbgrader(['export', '--help-all'])
def __init__(self, loc, scale, low, high, validate_args=False, allow_nan_stats=True, name='TruncatedCauchy'): "Construct a TruncatedCauchy.\n\n All parameters of the distribution will be broadcast to the same shape,\n so the resulting distribution will have a batch_shape of the broadcast\n shape of all parameters.\n\n Args:\n loc: Floating point tensor; the modes of the corresponding non-truncated\n Cauchy distribution(s).\n scale: Floating point tensor; the scales of the distribution(s).\n Must contain only positive values.\n low: `float` `Tensor` representing lower bound of the distribution's\n support. Must be such that `low < high`.\n high: `float` `Tensor` representing upper bound of the distribution's\n support. Must be such that `low < high`.\n validate_args: Python `bool`, default `False`. When `True` distribution\n parameters are checked at run-time.\n allow_nan_stats: Python `bool`, default `True`. When `True`,\n statistics (e.g., mean, mode, variance) use the value '`NaN`' to\n indicate the result is undefined. When `False`, an exception is raised\n if one or more of the statistic's batch members are undefined.\n name: Python `str` name prefixed to Ops created by this class.\n " parameters = dict(locals()) with tf.name_scope(name) as name: dtype = dtype_util.common_dtype([loc, scale, low, high], tf.float32) self._loc = tensor_util.convert_nonref_to_tensor(loc, name='loc', dtype=dtype) self._scale = tensor_util.convert_nonref_to_tensor(scale, name='scale', dtype=dtype) self._low = tensor_util.convert_nonref_to_tensor(low, name='low', dtype=dtype) self._high = tensor_util.convert_nonref_to_tensor(high, name='high', dtype=dtype) dtype_util.assert_same_float_dtype([self._loc, self._scale, self._low, self._high]) super(TruncatedCauchy, self).__init__(dtype=dtype, reparameterization_type=reparameterization.NOT_REPARAMETERIZED, validate_args=validate_args, allow_nan_stats=allow_nan_stats, parameters=parameters, name=name)
4,228,639,656,235,631,600
Construct a TruncatedCauchy. All parameters of the distribution will be broadcast to the same shape, so the resulting distribution will have a batch_shape of the broadcast shape of all parameters. Args: loc: Floating point tensor; the modes of the corresponding non-truncated Cauchy distribution(s). scale: Floating point tensor; the scales of the distribution(s). Must contain only positive values. low: `float` `Tensor` representing lower bound of the distribution's support. Must be such that `low < high`. high: `float` `Tensor` representing upper bound of the distribution's support. Must be such that `low < high`. validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked at run-time. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value '`NaN`' to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. name: Python `str` name prefixed to Ops created by this class.
tensorflow_probability/python/distributions/truncated_cauchy.py
__init__
jeffpollock9/probability
python
def __init__(self, loc, scale, low, high, validate_args=False, allow_nan_stats=True, name='TruncatedCauchy'): "Construct a TruncatedCauchy.\n\n All parameters of the distribution will be broadcast to the same shape,\n so the resulting distribution will have a batch_shape of the broadcast\n shape of all parameters.\n\n Args:\n loc: Floating point tensor; the modes of the corresponding non-truncated\n Cauchy distribution(s).\n scale: Floating point tensor; the scales of the distribution(s).\n Must contain only positive values.\n low: `float` `Tensor` representing lower bound of the distribution's\n support. Must be such that `low < high`.\n high: `float` `Tensor` representing upper bound of the distribution's\n support. Must be such that `low < high`.\n validate_args: Python `bool`, default `False`. When `True` distribution\n parameters are checked at run-time.\n allow_nan_stats: Python `bool`, default `True`. When `True`,\n statistics (e.g., mean, mode, variance) use the value '`NaN`' to\n indicate the result is undefined. When `False`, an exception is raised\n if one or more of the statistic's batch members are undefined.\n name: Python `str` name prefixed to Ops created by this class.\n " parameters = dict(locals()) with tf.name_scope(name) as name: dtype = dtype_util.common_dtype([loc, scale, low, high], tf.float32) self._loc = tensor_util.convert_nonref_to_tensor(loc, name='loc', dtype=dtype) self._scale = tensor_util.convert_nonref_to_tensor(scale, name='scale', dtype=dtype) self._low = tensor_util.convert_nonref_to_tensor(low, name='low', dtype=dtype) self._high = tensor_util.convert_nonref_to_tensor(high, name='high', dtype=dtype) dtype_util.assert_same_float_dtype([self._loc, self._scale, self._low, self._high]) super(TruncatedCauchy, self).__init__(dtype=dtype, reparameterization_type=reparameterization.NOT_REPARAMETERIZED, validate_args=validate_args, allow_nan_stats=allow_nan_stats, parameters=parameters, name=name)
def _redefines_import(node): 'Detect that the given node (AssignName) is inside an\n exception handler and redefines an import from the tryexcept body.\n Returns True if the node redefines an import, False otherwise.\n ' current = node while (current and (not isinstance(current.parent, astroid.ExceptHandler))): current = current.parent if ((not current) or (not utils.error_of_type(current.parent, ImportError))): return False try_block = current.parent.parent for import_node in try_block.nodes_of_class((astroid.ImportFrom, astroid.Import)): for (name, alias) in import_node.names: if alias: if (alias == node.name): return True elif (name == node.name): return True return False
-3,549,627,939,768,581,000
Detect that the given node (AssignName) is inside an exception handler and redefines an import from the tryexcept body. Returns True if the node redefines an import, False otherwise.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_redefines_import
DiegoSilvaHoffmann/Small-Ecommerce
python
def _redefines_import(node): 'Detect that the given node (AssignName) is inside an\n exception handler and redefines an import from the tryexcept body.\n Returns True if the node redefines an import, False otherwise.\n ' current = node while (current and (not isinstance(current.parent, astroid.ExceptHandler))): current = current.parent if ((not current) or (not utils.error_of_type(current.parent, ImportError))): return False try_block = current.parent.parent for import_node in try_block.nodes_of_class((astroid.ImportFrom, astroid.Import)): for (name, alias) in import_node.names: if alias: if (alias == node.name): return True elif (name == node.name): return True return False
def in_loop(node): 'return True if the node is inside a kind of for loop' parent = node.parent while (parent is not None): if isinstance(parent, (astroid.For, astroid.ListComp, astroid.SetComp, astroid.DictComp, astroid.GeneratorExp)): return True parent = parent.parent return False
-8,813,213,378,525,552,000
return True if the node is inside a kind of for loop
venv/lib/python3.8/site-packages/pylint/checkers/base.py
in_loop
DiegoSilvaHoffmann/Small-Ecommerce
python
def in_loop(node): parent = node.parent while (parent is not None): if isinstance(parent, (astroid.For, astroid.ListComp, astroid.SetComp, astroid.DictComp, astroid.GeneratorExp)): return True parent = parent.parent return False
def in_nested_list(nested_list, obj): 'return true if the object is an element of <nested_list> or of a nested\n list\n ' for elmt in nested_list: if isinstance(elmt, (list, tuple)): if in_nested_list(elmt, obj): return True elif (elmt == obj): return True return False
7,580,033,024,259,611,000
return true if the object is an element of <nested_list> or of a nested list
venv/lib/python3.8/site-packages/pylint/checkers/base.py
in_nested_list
DiegoSilvaHoffmann/Small-Ecommerce
python
def in_nested_list(nested_list, obj): 'return true if the object is an element of <nested_list> or of a nested\n list\n ' for elmt in nested_list: if isinstance(elmt, (list, tuple)): if in_nested_list(elmt, obj): return True elif (elmt == obj): return True return False
def _get_break_loop_node(break_node): '\n Returns the loop node that holds the break node in arguments.\n\n Args:\n break_node (astroid.Break): the break node of interest.\n\n Returns:\n astroid.For or astroid.While: the loop node holding the break node.\n ' loop_nodes = (astroid.For, astroid.While) parent = break_node.parent while ((not isinstance(parent, loop_nodes)) or (break_node in getattr(parent, 'orelse', []))): break_node = parent parent = parent.parent if (parent is None): break return parent
-3,874,634,519,098,343,400
Returns the loop node that holds the break node in arguments. Args: break_node (astroid.Break): the break node of interest. Returns: astroid.For or astroid.While: the loop node holding the break node.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_get_break_loop_node
DiegoSilvaHoffmann/Small-Ecommerce
python
def _get_break_loop_node(break_node): '\n Returns the loop node that holds the break node in arguments.\n\n Args:\n break_node (astroid.Break): the break node of interest.\n\n Returns:\n astroid.For or astroid.While: the loop node holding the break node.\n ' loop_nodes = (astroid.For, astroid.While) parent = break_node.parent while ((not isinstance(parent, loop_nodes)) or (break_node in getattr(parent, 'orelse', []))): break_node = parent parent = parent.parent if (parent is None): break return parent
def _loop_exits_early(loop): '\n Returns true if a loop may ends up in a break statement.\n\n Args:\n loop (astroid.For, astroid.While): the loop node inspected.\n\n Returns:\n bool: True if the loop may ends up in a break statement, False otherwise.\n ' loop_nodes = (astroid.For, astroid.While) definition_nodes = (astroid.FunctionDef, astroid.ClassDef) inner_loop_nodes = [_node for _node in loop.nodes_of_class(loop_nodes, skip_klass=definition_nodes) if (_node != loop)] return any((_node for _node in loop.nodes_of_class(astroid.Break, skip_klass=definition_nodes) if (_get_break_loop_node(_node) not in inner_loop_nodes)))
-294,256,959,147,377,340
Returns true if a loop may ends up in a break statement. Args: loop (astroid.For, astroid.While): the loop node inspected. Returns: bool: True if the loop may ends up in a break statement, False otherwise.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_loop_exits_early
DiegoSilvaHoffmann/Small-Ecommerce
python
def _loop_exits_early(loop): '\n Returns true if a loop may ends up in a break statement.\n\n Args:\n loop (astroid.For, astroid.While): the loop node inspected.\n\n Returns:\n bool: True if the loop may ends up in a break statement, False otherwise.\n ' loop_nodes = (astroid.For, astroid.While) definition_nodes = (astroid.FunctionDef, astroid.ClassDef) inner_loop_nodes = [_node for _node in loop.nodes_of_class(loop_nodes, skip_klass=definition_nodes) if (_node != loop)] return any((_node for _node in loop.nodes_of_class(astroid.Break, skip_klass=definition_nodes) if (_get_break_loop_node(_node) not in inner_loop_nodes)))
def _get_properties(config): "Returns a tuple of property classes and names.\n\n Property classes are fully qualified, such as 'abc.abstractproperty' and\n property names are the actual names, such as 'abstract_property'.\n " property_classes = {BUILTIN_PROPERTY} property_names = set() if (config is not None): property_classes.update(config.property_classes) property_names.update((prop.rsplit('.', 1)[(- 1)] for prop in config.property_classes)) return (property_classes, property_names)
-2,447,406,039,172,952,600
Returns a tuple of property classes and names. Property classes are fully qualified, such as 'abc.abstractproperty' and property names are the actual names, such as 'abstract_property'.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_get_properties
DiegoSilvaHoffmann/Small-Ecommerce
python
def _get_properties(config): "Returns a tuple of property classes and names.\n\n Property classes are fully qualified, such as 'abc.abstractproperty' and\n property names are the actual names, such as 'abstract_property'.\n " property_classes = {BUILTIN_PROPERTY} property_names = set() if (config is not None): property_classes.update(config.property_classes) property_names.update((prop.rsplit('.', 1)[(- 1)] for prop in config.property_classes)) return (property_classes, property_names)
def _determine_function_name_type(node: astroid.FunctionDef, config=None): "Determine the name type whose regex the a function's name should match.\n\n :param node: A function node.\n :param config: Configuration from which to pull additional property classes.\n :type config: :class:`optparse.Values`\n\n :returns: One of ('function', 'method', 'attr')\n :rtype: str\n " (property_classes, property_names) = _get_properties(config) if (not node.is_method()): return 'function' if (is_property_setter(node) or is_property_deleter(node)): return 'attr' if node.decorators: decorators = node.decorators.nodes else: decorators = [] for decorator in decorators: if (isinstance(decorator, astroid.Name) or (isinstance(decorator, astroid.Attribute) and (decorator.attrname in property_names))): inferred = utils.safe_infer(decorator) if (inferred and hasattr(inferred, 'qname') and (inferred.qname() in property_classes)): return 'attr' return 'method'
4,250,182,075,832,558,600
Determine the name type whose regex the a function's name should match. :param node: A function node. :param config: Configuration from which to pull additional property classes. :type config: :class:`optparse.Values` :returns: One of ('function', 'method', 'attr') :rtype: str
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_determine_function_name_type
DiegoSilvaHoffmann/Small-Ecommerce
python
def _determine_function_name_type(node: astroid.FunctionDef, config=None): "Determine the name type whose regex the a function's name should match.\n\n :param node: A function node.\n :param config: Configuration from which to pull additional property classes.\n :type config: :class:`optparse.Values`\n\n :returns: One of ('function', 'method', 'attr')\n :rtype: str\n " (property_classes, property_names) = _get_properties(config) if (not node.is_method()): return 'function' if (is_property_setter(node) or is_property_deleter(node)): return 'attr' if node.decorators: decorators = node.decorators.nodes else: decorators = [] for decorator in decorators: if (isinstance(decorator, astroid.Name) or (isinstance(decorator, astroid.Attribute) and (decorator.attrname in property_names))): inferred = utils.safe_infer(decorator) if (inferred and hasattr(inferred, 'qname') and (inferred.qname() in property_classes)): return 'attr' return 'method'
def _has_abstract_methods(node): '\n Determine if the given `node` has abstract methods.\n\n The methods should be made abstract by decorating them\n with `abc` decorators.\n ' return (len(utils.unimplemented_abstract_methods(node)) > 0)
3,715,557,256,578,693,600
Determine if the given `node` has abstract methods. The methods should be made abstract by decorating them with `abc` decorators.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_has_abstract_methods
DiegoSilvaHoffmann/Small-Ecommerce
python
def _has_abstract_methods(node): '\n Determine if the given `node` has abstract methods.\n\n The methods should be made abstract by decorating them\n with `abc` decorators.\n ' return (len(utils.unimplemented_abstract_methods(node)) > 0)
def report_by_type_stats(sect, stats, old_stats): 'make a report of\n\n * percentage of different types documented\n * percentage of different types with a bad name\n ' nice_stats = {} for node_type in ('module', 'class', 'method', 'function'): try: total = stats[node_type] except KeyError as e: raise exceptions.EmptyReportError() from e nice_stats[node_type] = {} if (total != 0): try: documented = (total - stats[('undocumented_' + node_type)]) percent = ((documented * 100.0) / total) nice_stats[node_type]['percent_documented'] = ('%.2f' % percent) except KeyError: nice_stats[node_type]['percent_documented'] = 'NC' try: percent = ((stats[('badname_' + node_type)] * 100.0) / total) nice_stats[node_type]['percent_badname'] = ('%.2f' % percent) except KeyError: nice_stats[node_type]['percent_badname'] = 'NC' lines = ('type', 'number', 'old number', 'difference', '%documented', '%badname') for node_type in ('module', 'class', 'method', 'function'): new = stats[node_type] old = old_stats.get(node_type, None) if (old is not None): diff_str = lint_utils.diff_string(old, new) else: (old, diff_str) = ('NC', 'NC') lines += (node_type, str(new), str(old), diff_str, nice_stats[node_type].get('percent_documented', '0'), nice_stats[node_type].get('percent_badname', '0')) sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
7,206,156,904,805,886,000
make a report of * percentage of different types documented * percentage of different types with a bad name
venv/lib/python3.8/site-packages/pylint/checkers/base.py
report_by_type_stats
DiegoSilvaHoffmann/Small-Ecommerce
python
def report_by_type_stats(sect, stats, old_stats): 'make a report of\n\n * percentage of different types documented\n * percentage of different types with a bad name\n ' nice_stats = {} for node_type in ('module', 'class', 'method', 'function'): try: total = stats[node_type] except KeyError as e: raise exceptions.EmptyReportError() from e nice_stats[node_type] = {} if (total != 0): try: documented = (total - stats[('undocumented_' + node_type)]) percent = ((documented * 100.0) / total) nice_stats[node_type]['percent_documented'] = ('%.2f' % percent) except KeyError: nice_stats[node_type]['percent_documented'] = 'NC' try: percent = ((stats[('badname_' + node_type)] * 100.0) / total) nice_stats[node_type]['percent_badname'] = ('%.2f' % percent) except KeyError: nice_stats[node_type]['percent_badname'] = 'NC' lines = ('type', 'number', 'old number', 'difference', '%documented', '%badname') for node_type in ('module', 'class', 'method', 'function'): new = stats[node_type] old = old_stats.get(node_type, None) if (old is not None): diff_str = lint_utils.diff_string(old, new) else: (old, diff_str) = ('NC', 'NC') lines += (node_type, str(new), str(old), diff_str, nice_stats[node_type].get('percent_documented', '0'), nice_stats[node_type].get('percent_badname', '0')) sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
def redefined_by_decorator(node): 'return True if the object is a method redefined via decorator.\n\n For example:\n @property\n def x(self): return self._x\n @x.setter\n def x(self, value): self._x = value\n ' if node.decorators: for decorator in node.decorators.nodes: if (isinstance(decorator, astroid.Attribute) and (getattr(decorator.expr, 'name', None) == node.name)): return True return False
1,408,302,441,557,557,800
return True if the object is a method redefined via decorator. For example: @property def x(self): return self._x @x.setter def x(self, value): self._x = value
venv/lib/python3.8/site-packages/pylint/checkers/base.py
redefined_by_decorator
DiegoSilvaHoffmann/Small-Ecommerce
python
def redefined_by_decorator(node): 'return True if the object is a method redefined via decorator.\n\n For example:\n @property\n def x(self): return self._x\n @x.setter\n def x(self, value): self._x = value\n ' if node.decorators: for decorator in node.decorators.nodes: if (isinstance(decorator, astroid.Attribute) and (getattr(decorator.expr, 'name', None) == node.name)): return True return False
def _is_one_arg_pos_call(call): 'Is this a call with exactly 1 argument,\n where that argument is positional?\n ' return (isinstance(call, astroid.Call) and (len(call.args) == 1) and (not call.keywords))
2,640,663,257,080,729,600
Is this a call with exactly 1 argument, where that argument is positional?
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_is_one_arg_pos_call
DiegoSilvaHoffmann/Small-Ecommerce
python
def _is_one_arg_pos_call(call): 'Is this a call with exactly 1 argument,\n where that argument is positional?\n ' return (isinstance(call, astroid.Call) and (len(call.args) == 1) and (not call.keywords))
def register(linter): 'required method to auto register this checker' linter.register_checker(BasicErrorChecker(linter)) linter.register_checker(BasicChecker(linter)) linter.register_checker(NameChecker(linter)) linter.register_checker(DocStringChecker(linter)) linter.register_checker(PassChecker(linter)) linter.register_checker(ComparisonChecker(linter))
4,990,916,917,822,871,000
required method to auto register this checker
venv/lib/python3.8/site-packages/pylint/checkers/base.py
register
DiegoSilvaHoffmann/Small-Ecommerce
python
def register(linter): linter.register_checker(BasicErrorChecker(linter)) linter.register_checker(BasicChecker(linter)) linter.register_checker(NameChecker(linter)) linter.register_checker(DocStringChecker(linter)) linter.register_checker(PassChecker(linter)) linter.register_checker(ComparisonChecker(linter))
@utils.check_messages('star-needs-assignment-target') def visit_starred(self, node): 'Check that a Starred expression is used in an assignment target.' if isinstance(node.parent, astroid.Call): return if isinstance(node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)): return stmt = node.statement() if (not isinstance(stmt, astroid.Assign)): return if ((stmt.value is node) or stmt.value.parent_of(node)): self.add_message('star-needs-assignment-target', node=node)
-4,635,363,187,658,599,000
Check that a Starred expression is used in an assignment target.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_starred
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('star-needs-assignment-target') def visit_starred(self, node): if isinstance(node.parent, astroid.Call): return if isinstance(node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)): return stmt = node.statement() if (not isinstance(stmt, astroid.Assign)): return if ((stmt.value is node) or stmt.value.parent_of(node)): self.add_message('star-needs-assignment-target', node=node)
def _check_nonlocal_and_global(self, node): 'Check that a name is both nonlocal and global.' def same_scope(current): return (current.scope() is node) from_iter = itertools.chain.from_iterable nonlocals = set(from_iter((child.names for child in node.nodes_of_class(astroid.Nonlocal) if same_scope(child)))) if (not nonlocals): return global_vars = set(from_iter((child.names for child in node.nodes_of_class(astroid.Global) if same_scope(child)))) for name in nonlocals.intersection(global_vars): self.add_message('nonlocal-and-global', args=(name,), node=node)
6,190,297,754,518,350,000
Check that a name is both nonlocal and global.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_nonlocal_and_global
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_nonlocal_and_global(self, node): def same_scope(current): return (current.scope() is node) from_iter = itertools.chain.from_iterable nonlocals = set(from_iter((child.names for child in node.nodes_of_class(astroid.Nonlocal) if same_scope(child)))) if (not nonlocals): return global_vars = set(from_iter((child.names for child in node.nodes_of_class(astroid.Global) if same_scope(child)))) for name in nonlocals.intersection(global_vars): self.add_message('nonlocal-and-global', args=(name,), node=node)
@utils.check_messages('nonexistent-operator') def visit_unaryop(self, node): 'check use of the non-existent ++ and -- operator operator' if ((node.op in '+-') and isinstance(node.operand, astroid.UnaryOp) and (node.operand.op == node.op)): self.add_message('nonexistent-operator', node=node, args=(node.op * 2))
6,697,742,082,378,500,000
check use of the non-existent ++ and -- operator operator
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_unaryop
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('nonexistent-operator') def visit_unaryop(self, node): if ((node.op in '+-') and isinstance(node.operand, astroid.UnaryOp) and (node.operand.op == node.op)): self.add_message('nonexistent-operator', node=node, args=(node.op * 2))
@utils.check_messages('abstract-class-instantiated') def visit_call(self, node): 'Check instantiating abstract class with\n abc.ABCMeta as metaclass.\n ' try: for inferred in node.func.infer(): self._check_inferred_class_is_abstract(inferred, node) except astroid.InferenceError: return
2,693,506,591,139,922,400
Check instantiating abstract class with abc.ABCMeta as metaclass.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_call
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('abstract-class-instantiated') def visit_call(self, node): 'Check instantiating abstract class with\n abc.ABCMeta as metaclass.\n ' try: for inferred in node.func.infer(): self._check_inferred_class_is_abstract(inferred, node) except astroid.InferenceError: return
def _check_else_on_loop(self, node): 'Check that any loop with an else clause has a break statement.' if (node.orelse and (not _loop_exits_early(node))): self.add_message('useless-else-on-loop', node=node, line=(node.orelse[0].lineno - 1))
7,660,603,101,913,394,000
Check that any loop with an else clause has a break statement.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_else_on_loop
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_else_on_loop(self, node): if (node.orelse and (not _loop_exits_early(node))): self.add_message('useless-else-on-loop', node=node, line=(node.orelse[0].lineno - 1))
def _check_in_loop(self, node, node_name): 'check that a node is inside a for or while loop' _node = node.parent while _node: if isinstance(_node, (astroid.For, astroid.While)): if (node not in _node.orelse): return if isinstance(_node, (astroid.ClassDef, astroid.FunctionDef)): break if (isinstance(_node, astroid.TryFinally) and (node in _node.finalbody) and isinstance(node, astroid.Continue)): self.add_message('continue-in-finally', node=node) _node = _node.parent self.add_message('not-in-loop', node=node, args=node_name)
-3,765,741,054,601,940,000
check that a node is inside a for or while loop
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_in_loop
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_in_loop(self, node, node_name): _node = node.parent while _node: if isinstance(_node, (astroid.For, astroid.While)): if (node not in _node.orelse): return if isinstance(_node, (astroid.ClassDef, astroid.FunctionDef)): break if (isinstance(_node, astroid.TryFinally) and (node in _node.finalbody) and isinstance(node, astroid.Continue)): self.add_message('continue-in-finally', node=node) _node = _node.parent self.add_message('not-in-loop', node=node, args=node_name)
def _check_redefinition(self, redeftype, node): 'check for redefinition of a function / method / class name' parent_frame = node.parent.frame() redefinitions = parent_frame.locals[node.name] defined_self = next((local for local in redefinitions if (not utils.is_overload_stub(local))), node) if ((defined_self is not node) and (not astroid.are_exclusive(node, defined_self))): if (isinstance(parent_frame, astroid.ClassDef) and (node.name in REDEFINABLE_METHODS)): return if utils.is_overload_stub(node): return if isinstance(node.parent, astroid.If): if (isinstance(node.parent.test, astroid.UnaryOp) and (node.parent.test.op == 'not') and isinstance(node.parent.test.operand, astroid.Name) and (node.parent.test.operand.name == node.name)): return if (isinstance(node.parent.test, astroid.Compare) and isinstance(node.parent.test.left, astroid.Name) and (node.parent.test.left.name == node.name) and (node.parent.test.ops[0][0] == 'is') and isinstance(node.parent.test.ops[0][1], astroid.Const) and (node.parent.test.ops[0][1].value is None)): return try: redefinition_index = redefinitions.index(node) except ValueError: pass else: for redefinition in redefinitions[:redefinition_index]: inferred = utils.safe_infer(redefinition) if (inferred and isinstance(inferred, astroid.Instance) and (inferred.qname() == TYPING_FORWARD_REF_QNAME)): return dummy_variables_rgx = lint_utils.get_global_option(self, 'dummy-variables-rgx', default=None) if (dummy_variables_rgx and dummy_variables_rgx.match(node.name)): return self.add_message('function-redefined', node=node, args=(redeftype, defined_self.fromlineno))
-2,833,623,817,500,763,600
check for redefinition of a function / method / class name
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_redefinition
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_redefinition(self, redeftype, node): parent_frame = node.parent.frame() redefinitions = parent_frame.locals[node.name] defined_self = next((local for local in redefinitions if (not utils.is_overload_stub(local))), node) if ((defined_self is not node) and (not astroid.are_exclusive(node, defined_self))): if (isinstance(parent_frame, astroid.ClassDef) and (node.name in REDEFINABLE_METHODS)): return if utils.is_overload_stub(node): return if isinstance(node.parent, astroid.If): if (isinstance(node.parent.test, astroid.UnaryOp) and (node.parent.test.op == 'not') and isinstance(node.parent.test.operand, astroid.Name) and (node.parent.test.operand.name == node.name)): return if (isinstance(node.parent.test, astroid.Compare) and isinstance(node.parent.test.left, astroid.Name) and (node.parent.test.left.name == node.name) and (node.parent.test.ops[0][0] == 'is') and isinstance(node.parent.test.ops[0][1], astroid.Const) and (node.parent.test.ops[0][1].value is None)): return try: redefinition_index = redefinitions.index(node) except ValueError: pass else: for redefinition in redefinitions[:redefinition_index]: inferred = utils.safe_infer(redefinition) if (inferred and isinstance(inferred, astroid.Instance) and (inferred.qname() == TYPING_FORWARD_REF_QNAME)): return dummy_variables_rgx = lint_utils.get_global_option(self, 'dummy-variables-rgx', default=None) if (dummy_variables_rgx and dummy_variables_rgx.match(node.name)): return self.add_message('function-redefined', node=node, args=(redeftype, defined_self.fromlineno))
def open(self): 'initialize visit variables and statistics' self._tryfinallys = [] self.stats = self.linter.add_stats(module=0, function=0, method=0, class_=0)
-7,120,917,450,056,130,000
initialize visit variables and statistics
venv/lib/python3.8/site-packages/pylint/checkers/base.py
open
DiegoSilvaHoffmann/Small-Ecommerce
python
def open(self): self._tryfinallys = [] self.stats = self.linter.add_stats(module=0, function=0, method=0, class_=0)
def visit_module(self, _): 'check module name, docstring and required arguments' self.stats['module'] += 1
-3,172,526,212,306,251,000
check module name, docstring and required arguments
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_module
DiegoSilvaHoffmann/Small-Ecommerce
python
def visit_module(self, _): self.stats['module'] += 1
def visit_classdef(self, node): 'check module name, docstring and redefinition\n increment branch counter\n ' self.stats['class'] += 1
6,323,541,644,900,864,000
check module name, docstring and redefinition increment branch counter
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_classdef
DiegoSilvaHoffmann/Small-Ecommerce
python
def visit_classdef(self, node): 'check module name, docstring and redefinition\n increment branch counter\n ' self.stats['class'] += 1
@utils.check_messages('pointless-statement', 'pointless-string-statement', 'expression-not-assigned') def visit_expr(self, node): 'Check for various kind of statements without effect' expr = node.value if (isinstance(expr, astroid.Const) and isinstance(expr.value, str)): scope = expr.scope() if isinstance(scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)): if (isinstance(scope, astroid.FunctionDef) and (scope.name != '__init__')): pass else: sibling = expr.previous_sibling() if ((sibling is not None) and (sibling.scope() is scope) and isinstance(sibling, (astroid.Assign, astroid.AnnAssign))): return self.add_message('pointless-string-statement', node=node) return if (isinstance(expr, (astroid.Yield, astroid.Await, astroid.Ellipsis, astroid.Call)) or (isinstance(node.parent, astroid.TryExcept) and (node.parent.body == [node])) or (isinstance(expr, astroid.Const) and (expr.value is Ellipsis))): return if any(expr.nodes_of_class(astroid.Call)): self.add_message('expression-not-assigned', node=node, args=expr.as_string()) else: self.add_message('pointless-statement', node=node)
3,617,991,756,477,993,000
Check for various kind of statements without effect
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_expr
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('pointless-statement', 'pointless-string-statement', 'expression-not-assigned') def visit_expr(self, node): expr = node.value if (isinstance(expr, astroid.Const) and isinstance(expr.value, str)): scope = expr.scope() if isinstance(scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)): if (isinstance(scope, astroid.FunctionDef) and (scope.name != '__init__')): pass else: sibling = expr.previous_sibling() if ((sibling is not None) and (sibling.scope() is scope) and isinstance(sibling, (astroid.Assign, astroid.AnnAssign))): return self.add_message('pointless-string-statement', node=node) return if (isinstance(expr, (astroid.Yield, astroid.Await, astroid.Ellipsis, astroid.Call)) or (isinstance(node.parent, astroid.TryExcept) and (node.parent.body == [node])) or (isinstance(expr, astroid.Const) and (expr.value is Ellipsis))): return if any(expr.nodes_of_class(astroid.Call)): self.add_message('expression-not-assigned', node=node, args=expr.as_string()) else: self.add_message('pointless-statement', node=node)
@utils.check_messages('unnecessary-lambda') def visit_lambda(self, node): 'check whether or not the lambda is suspicious' if node.args.defaults: return call = node.body if (not isinstance(call, astroid.Call)): return if (isinstance(node.body.func, astroid.Attribute) and isinstance(node.body.func.expr, astroid.Call)): return call_site = astroid.arguments.CallSite.from_call(call) ordinary_args = list(node.args.args) new_call_args = list(self._filter_vararg(node, call.args)) if node.args.kwarg: if self._has_variadic_argument(call.kwargs, node.args.kwarg): return if node.args.vararg: if self._has_variadic_argument(call.starargs, node.args.vararg): return elif call.starargs: return if call.keywords: lambda_kwargs = {keyword.name for keyword in node.args.defaults} if (len(lambda_kwargs) != len(call_site.keyword_arguments)): return if set(call_site.keyword_arguments).difference(lambda_kwargs): return if (len(ordinary_args) != len(new_call_args)): return for (arg, passed_arg) in zip(ordinary_args, new_call_args): if (not isinstance(passed_arg, astroid.Name)): return if (arg.name != passed_arg.name): return self.add_message('unnecessary-lambda', line=node.fromlineno, node=node)
-7,762,045,850,180,491,000
check whether or not the lambda is suspicious
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_lambda
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('unnecessary-lambda') def visit_lambda(self, node): if node.args.defaults: return call = node.body if (not isinstance(call, astroid.Call)): return if (isinstance(node.body.func, astroid.Attribute) and isinstance(node.body.func.expr, astroid.Call)): return call_site = astroid.arguments.CallSite.from_call(call) ordinary_args = list(node.args.args) new_call_args = list(self._filter_vararg(node, call.args)) if node.args.kwarg: if self._has_variadic_argument(call.kwargs, node.args.kwarg): return if node.args.vararg: if self._has_variadic_argument(call.starargs, node.args.vararg): return elif call.starargs: return if call.keywords: lambda_kwargs = {keyword.name for keyword in node.args.defaults} if (len(lambda_kwargs) != len(call_site.keyword_arguments)): return if set(call_site.keyword_arguments).difference(lambda_kwargs): return if (len(ordinary_args) != len(new_call_args)): return for (arg, passed_arg) in zip(ordinary_args, new_call_args): if (not isinstance(passed_arg, astroid.Name)): return if (arg.name != passed_arg.name): return self.add_message('unnecessary-lambda', line=node.fromlineno, node=node)
@utils.check_messages('dangerous-default-value') def visit_functiondef(self, node): 'check function name, docstring, arguments, redefinition,\n variable names, max locals\n ' self.stats[('method' if node.is_method() else 'function')] += 1 self._check_dangerous_default(node)
-5,048,651,208,268,462,000
check function name, docstring, arguments, redefinition, variable names, max locals
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_functiondef
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('dangerous-default-value') def visit_functiondef(self, node): 'check function name, docstring, arguments, redefinition,\n variable names, max locals\n ' self.stats[('method' if node.is_method() else 'function')] += 1 self._check_dangerous_default(node)
def _check_dangerous_default(self, node): 'Check for dangerous default values as arguments.' def is_iterable(internal_node): return isinstance(internal_node, (astroid.List, astroid.Set, astroid.Dict)) defaults = (node.args.defaults or ([] + node.args.kw_defaults) or []) for default in defaults: if (not default): continue try: value = next(default.infer()) except astroid.InferenceError: continue if (isinstance(value, astroid.Instance) and (value.qname() in DEFAULT_ARGUMENT_SYMBOLS)): if (value is default): msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()] elif (isinstance(value, astroid.Instance) or is_iterable(value)): if is_iterable(default): msg = value.pytype() elif isinstance(default, astroid.Call): msg = f'{value.name}() ({value.qname()})' else: msg = f'{default.as_string()} ({value.qname()})' else: msg = f'{default.as_string()} ({DEFAULT_ARGUMENT_SYMBOLS[value.qname()]})' self.add_message('dangerous-default-value', node=node, args=(msg,))
8,315,180,032,150,322,000
Check for dangerous default values as arguments.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_dangerous_default
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_dangerous_default(self, node): def is_iterable(internal_node): return isinstance(internal_node, (astroid.List, astroid.Set, astroid.Dict)) defaults = (node.args.defaults or ([] + node.args.kw_defaults) or []) for default in defaults: if (not default): continue try: value = next(default.infer()) except astroid.InferenceError: continue if (isinstance(value, astroid.Instance) and (value.qname() in DEFAULT_ARGUMENT_SYMBOLS)): if (value is default): msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()] elif (isinstance(value, astroid.Instance) or is_iterable(value)): if is_iterable(default): msg = value.pytype() elif isinstance(default, astroid.Call): msg = f'{value.name}() ({value.qname()})' else: msg = f'{default.as_string()} ({value.qname()})' else: msg = f'{default.as_string()} ({DEFAULT_ARGUMENT_SYMBOLS[value.qname()]})' self.add_message('dangerous-default-value', node=node, args=(msg,))
@utils.check_messages('unreachable', 'lost-exception') def visit_return(self, node): "1 - check is the node has a right sibling (if so, that's some\n unreachable code)\n 2 - check is the node is inside the finally clause of a try...finally\n block\n " self._check_unreachable(node) self._check_not_in_finally(node, 'return', (astroid.FunctionDef,))
-3,677,153,586,974,532,600
1 - check is the node has a right sibling (if so, that's some unreachable code) 2 - check is the node is inside the finally clause of a try...finally block
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_return
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('unreachable', 'lost-exception') def visit_return(self, node): "1 - check is the node has a right sibling (if so, that's some\n unreachable code)\n 2 - check is the node is inside the finally clause of a try...finally\n block\n " self._check_unreachable(node) self._check_not_in_finally(node, 'return', (astroid.FunctionDef,))
@utils.check_messages('unreachable') def visit_continue(self, node): "check is the node has a right sibling (if so, that's some unreachable\n code)\n " self._check_unreachable(node)
-4,721,979,628,514,516,000
check is the node has a right sibling (if so, that's some unreachable code)
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_continue
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('unreachable') def visit_continue(self, node): "check is the node has a right sibling (if so, that's some unreachable\n code)\n " self._check_unreachable(node)
@utils.check_messages('unreachable', 'lost-exception') def visit_break(self, node): "1 - check is the node has a right sibling (if so, that's some\n unreachable code)\n 2 - check is the node is inside the finally clause of a try...finally\n block\n " self._check_unreachable(node) self._check_not_in_finally(node, 'break', (astroid.For, astroid.While))
-2,772,419,799,165,416,400
1 - check is the node has a right sibling (if so, that's some unreachable code) 2 - check is the node is inside the finally clause of a try...finally block
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_break
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('unreachable', 'lost-exception') def visit_break(self, node): "1 - check is the node has a right sibling (if so, that's some\n unreachable code)\n 2 - check is the node is inside the finally clause of a try...finally\n block\n " self._check_unreachable(node) self._check_not_in_finally(node, 'break', (astroid.For, astroid.While))
@utils.check_messages('unreachable') def visit_raise(self, node): "check if the node has a right sibling (if so, that's some unreachable\n code)\n " self._check_unreachable(node)
-6,611,969,601,107,533,000
check if the node has a right sibling (if so, that's some unreachable code)
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_raise
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('unreachable') def visit_raise(self, node): "check if the node has a right sibling (if so, that's some unreachable\n code)\n " self._check_unreachable(node)
@utils.check_messages('exec-used') def visit_exec(self, node): 'just print a warning on exec statements' self.add_message('exec-used', node=node)
-7,449,857,530,318,227,000
just print a warning on exec statements
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_exec
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('exec-used') def visit_exec(self, node): self.add_message('exec-used', node=node)
@utils.check_messages('eval-used', 'exec-used', 'bad-reversed-sequence', 'misplaced-format-function') def visit_call(self, node): 'visit a Call node -> check if this is not a disallowed builtin\n call and check for * or ** use\n ' self._check_misplaced_format_function(node) if isinstance(node.func, astroid.Name): name = node.func.name if (not ((name in node.frame()) or (name in node.root()))): if (name == 'exec'): self.add_message('exec-used', node=node) elif (name == 'reversed'): self._check_reversed(node) elif (name == 'eval'): self.add_message('eval-used', node=node)
-3,051,920,367,229,449,000
visit a Call node -> check if this is not a disallowed builtin call and check for * or ** use
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_call
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('eval-used', 'exec-used', 'bad-reversed-sequence', 'misplaced-format-function') def visit_call(self, node): 'visit a Call node -> check if this is not a disallowed builtin\n call and check for * or ** use\n ' self._check_misplaced_format_function(node) if isinstance(node.func, astroid.Name): name = node.func.name if (not ((name in node.frame()) or (name in node.root()))): if (name == 'exec'): self.add_message('exec-used', node=node) elif (name == 'reversed'): self._check_reversed(node) elif (name == 'eval'): self.add_message('eval-used', node=node)
@utils.check_messages('assert-on-tuple', 'assert-on-string-literal') def visit_assert(self, node): 'check whether assert is used on a tuple or string literal.' if ((node.fail is None) and isinstance(node.test, astroid.Tuple) and (len(node.test.elts) == 2)): self.add_message('assert-on-tuple', node=node) if (isinstance(node.test, astroid.Const) and isinstance(node.test.value, str)): if node.test.value: when = 'never' else: when = 'always' self.add_message('assert-on-string-literal', node=node, args=(when,))
5,528,318,846,618,252,000
check whether assert is used on a tuple or string literal.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_assert
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('assert-on-tuple', 'assert-on-string-literal') def visit_assert(self, node): if ((node.fail is None) and isinstance(node.test, astroid.Tuple) and (len(node.test.elts) == 2)): self.add_message('assert-on-tuple', node=node) if (isinstance(node.test, astroid.Const) and isinstance(node.test.value, str)): if node.test.value: when = 'never' else: when = 'always' self.add_message('assert-on-string-literal', node=node, args=(when,))
@utils.check_messages('duplicate-key') def visit_dict(self, node): 'check duplicate key in dictionary' keys = set() for (k, _) in node.items: if isinstance(k, astroid.Const): key = k.value if (key in keys): self.add_message('duplicate-key', node=node, args=key) keys.add(key)
7,305,081,576,754,378,000
check duplicate key in dictionary
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_dict
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('duplicate-key') def visit_dict(self, node): keys = set() for (k, _) in node.items: if isinstance(k, astroid.Const): key = k.value if (key in keys): self.add_message('duplicate-key', node=node, args=key) keys.add(key)
def visit_tryfinally(self, node): 'update try...finally flag' self._tryfinallys.append(node)
567,963,392,912,776,770
update try...finally flag
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_tryfinally
DiegoSilvaHoffmann/Small-Ecommerce
python
def visit_tryfinally(self, node): self._tryfinallys.append(node)
def leave_tryfinally(self, node): 'update try...finally flag' self._tryfinallys.pop()
3,084,186,197,470,985,000
update try...finally flag
venv/lib/python3.8/site-packages/pylint/checkers/base.py
leave_tryfinally
DiegoSilvaHoffmann/Small-Ecommerce
python
def leave_tryfinally(self, node): self._tryfinallys.pop()
def _check_unreachable(self, node): 'check unreachable code' unreach_stmt = node.next_sibling() if (unreach_stmt is not None): self.add_message('unreachable', node=unreach_stmt)
-4,818,121,943,211,807,000
check unreachable code
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_unreachable
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_unreachable(self, node): unreach_stmt = node.next_sibling() if (unreach_stmt is not None): self.add_message('unreachable', node=unreach_stmt)
def _check_not_in_finally(self, node, node_name, breaker_classes=()): 'check that a node is not inside a finally clause of a\n try...finally statement.\n If we found before a try...finally bloc a parent which its type is\n in breaker_classes, we skip the whole check.' if (not self._tryfinallys): return _parent = node.parent _node = node while (_parent and (not isinstance(_parent, breaker_classes))): if (hasattr(_parent, 'finalbody') and (_node in _parent.finalbody)): self.add_message('lost-exception', node=node, args=node_name) return _node = _parent _parent = _node.parent
6,710,553,124,971,190,000
check that a node is not inside a finally clause of a try...finally statement. If we found before a try...finally bloc a parent which its type is in breaker_classes, we skip the whole check.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_not_in_finally
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_not_in_finally(self, node, node_name, breaker_classes=()): 'check that a node is not inside a finally clause of a\n try...finally statement.\n If we found before a try...finally bloc a parent which its type is\n in breaker_classes, we skip the whole check.' if (not self._tryfinallys): return _parent = node.parent _node = node while (_parent and (not isinstance(_parent, breaker_classes))): if (hasattr(_parent, 'finalbody') and (_node in _parent.finalbody)): self.add_message('lost-exception', node=node, args=node_name) return _node = _parent _parent = _node.parent
def _check_reversed(self, node): 'check that the argument to `reversed` is a sequence' try: argument = utils.safe_infer(utils.get_argument_from_call(node, position=0)) except utils.NoSuchArgumentError: pass else: if (argument is astroid.Uninferable): return if (argument is None): if isinstance(node.args[0], astroid.Call): try: func = next(node.args[0].func.infer()) except astroid.InferenceError: return if ((getattr(func, 'name', None) == 'iter') and utils.is_builtin_object(func)): self.add_message('bad-reversed-sequence', node=node) return if isinstance(argument, (astroid.List, astroid.Tuple)): return if isinstance(argument, astroid.Instance): if any((((ancestor.name == 'dict') and utils.is_builtin_object(ancestor)) for ancestor in itertools.chain((argument._proxied,), argument._proxied.ancestors()))): try: argument.locals[REVERSED_PROTOCOL_METHOD] except KeyError: self.add_message('bad-reversed-sequence', node=node) return if hasattr(argument, 'getattr'): for methods in REVERSED_METHODS: for meth in methods: try: argument.getattr(meth) except astroid.NotFoundError: break else: break else: self.add_message('bad-reversed-sequence', node=node) else: self.add_message('bad-reversed-sequence', node=node)
-442,172,745,389,106,800
check that the argument to `reversed` is a sequence
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_reversed
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_reversed(self, node): try: argument = utils.safe_infer(utils.get_argument_from_call(node, position=0)) except utils.NoSuchArgumentError: pass else: if (argument is astroid.Uninferable): return if (argument is None): if isinstance(node.args[0], astroid.Call): try: func = next(node.args[0].func.infer()) except astroid.InferenceError: return if ((getattr(func, 'name', None) == 'iter') and utils.is_builtin_object(func)): self.add_message('bad-reversed-sequence', node=node) return if isinstance(argument, (astroid.List, astroid.Tuple)): return if isinstance(argument, astroid.Instance): if any((((ancestor.name == 'dict') and utils.is_builtin_object(ancestor)) for ancestor in itertools.chain((argument._proxied,), argument._proxied.ancestors()))): try: argument.locals[REVERSED_PROTOCOL_METHOD] except KeyError: self.add_message('bad-reversed-sequence', node=node) return if hasattr(argument, 'getattr'): for methods in REVERSED_METHODS: for meth in methods: try: argument.getattr(meth) except astroid.NotFoundError: break else: break else: self.add_message('bad-reversed-sequence', node=node) else: self.add_message('bad-reversed-sequence', node=node)
@utils.check_messages('disallowed-name', 'invalid-name', 'assign-to-new-keyword', 'non-ascii-name') def visit_assignname(self, node): 'check module level assigned names' self._check_assign_to_new_keyword_violation(node.name, node) frame = node.frame() assign_type = node.assign_type() if isinstance(assign_type, astroid.Comprehension): self._check_name('inlinevar', node.name, node) elif isinstance(frame, astroid.Module): if isinstance(assign_type, astroid.Assign): if isinstance(utils.safe_infer(assign_type.value), astroid.ClassDef): self._check_name('class', node.name, node) elif ((not _redefines_import(node)) and isinstance(utils.safe_infer(assign_type.value), astroid.Const)): self._check_name('const', node.name, node) elif isinstance(assign_type, astroid.ExceptHandler): self._check_name('variable', node.name, node) elif (isinstance(assign_type, astroid.AnnAssign) and utils.is_assign_name_annotated_with(node, 'Final')): self._check_name('const', node.name, node) elif isinstance(frame, astroid.FunctionDef): if ((node.name in frame) and (node.name not in frame.argnames())): if (not _redefines_import(node)): self._check_name('variable', node.name, node) elif isinstance(frame, astroid.ClassDef): if (not list(frame.local_attr_ancestors(node.name))): for ancestor in frame.ancestors(): if (((ancestor.name == 'Enum') and (ancestor.root().name == 'enum')) or utils.is_assign_name_annotated_with(node, 'Final')): self._check_name('class_const', node.name, node) break else: self._check_name('class_attribute', node.name, node)
-4,169,838,064,300,232,000
check module level assigned names
venv/lib/python3.8/site-packages/pylint/checkers/base.py
visit_assignname
DiegoSilvaHoffmann/Small-Ecommerce
python
@utils.check_messages('disallowed-name', 'invalid-name', 'assign-to-new-keyword', 'non-ascii-name') def visit_assignname(self, node): self._check_assign_to_new_keyword_violation(node.name, node) frame = node.frame() assign_type = node.assign_type() if isinstance(assign_type, astroid.Comprehension): self._check_name('inlinevar', node.name, node) elif isinstance(frame, astroid.Module): if isinstance(assign_type, astroid.Assign): if isinstance(utils.safe_infer(assign_type.value), astroid.ClassDef): self._check_name('class', node.name, node) elif ((not _redefines_import(node)) and isinstance(utils.safe_infer(assign_type.value), astroid.Const)): self._check_name('const', node.name, node) elif isinstance(assign_type, astroid.ExceptHandler): self._check_name('variable', node.name, node) elif (isinstance(assign_type, astroid.AnnAssign) and utils.is_assign_name_annotated_with(node, 'Final')): self._check_name('const', node.name, node) elif isinstance(frame, astroid.FunctionDef): if ((node.name in frame) and (node.name not in frame.argnames())): if (not _redefines_import(node)): self._check_name('variable', node.name, node) elif isinstance(frame, astroid.ClassDef): if (not list(frame.local_attr_ancestors(node.name))): for ancestor in frame.ancestors(): if (((ancestor.name == 'Enum') and (ancestor.root().name == 'enum')) or utils.is_assign_name_annotated_with(node, 'Final')): self._check_name('class_const', node.name, node) break else: self._check_name('class_attribute', node.name, node)
def _recursive_check_names(self, args, node): 'check names in a possibly recursive list <arg>' for arg in args: if isinstance(arg, astroid.AssignName): self._check_name('argument', arg.name, node) else: self._recursive_check_names(arg.elts, node)
-4,889,120,403,578,327,000
check names in a possibly recursive list <arg>
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_recursive_check_names
DiegoSilvaHoffmann/Small-Ecommerce
python
def _recursive_check_names(self, args, node): for arg in args: if isinstance(arg, astroid.AssignName): self._check_name('argument', arg.name, node) else: self._recursive_check_names(arg.elts, node)
def _check_name(self, node_type, name, node, confidence=interfaces.HIGH): "check for a name using the type's regexp" non_ascii_match = self._non_ascii_rgx_compiled.match(name) if (non_ascii_match is not None): self._raise_name_warning(node, node_type, name, confidence, warning='non-ascii-name') def _should_exempt_from_invalid_name(node): if (node_type == 'variable'): inferred = utils.safe_infer(node) if isinstance(inferred, astroid.ClassDef): return True return False if utils.is_inside_except(node): (clobbering, _) = utils.clobber_in_except(node) if clobbering: return if self._name_allowed_by_regex(name=name): return if self._name_disallowed_by_regex(name=name): self.stats[('badname_' + node_type)] += 1 self.add_message('disallowed-name', node=node, args=name) return regexp = self._name_regexps[node_type] match = regexp.match(name) if _is_multi_naming_match(match, node_type, confidence): name_group = self._find_name_group(node_type) bad_name_group = self._bad_names.setdefault(name_group, {}) warnings = bad_name_group.setdefault(match.lastgroup, []) warnings.append((node, node_type, name, confidence)) if ((match is None) and (not _should_exempt_from_invalid_name(node))): self._raise_name_warning(node, node_type, name, confidence)
-4,220,091,793,548,102,700
check for a name using the type's regexp
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_name
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_name(self, node_type, name, node, confidence=interfaces.HIGH): non_ascii_match = self._non_ascii_rgx_compiled.match(name) if (non_ascii_match is not None): self._raise_name_warning(node, node_type, name, confidence, warning='non-ascii-name') def _should_exempt_from_invalid_name(node): if (node_type == 'variable'): inferred = utils.safe_infer(node) if isinstance(inferred, astroid.ClassDef): return True return False if utils.is_inside_except(node): (clobbering, _) = utils.clobber_in_except(node) if clobbering: return if self._name_allowed_by_regex(name=name): return if self._name_disallowed_by_regex(name=name): self.stats[('badname_' + node_type)] += 1 self.add_message('disallowed-name', node=node, args=name) return regexp = self._name_regexps[node_type] match = regexp.match(name) if _is_multi_naming_match(match, node_type, confidence): name_group = self._find_name_group(node_type) bad_name_group = self._bad_names.setdefault(name_group, {}) warnings = bad_name_group.setdefault(match.lastgroup, []) warnings.append((node, node_type, name, confidence)) if ((match is None) and (not _should_exempt_from_invalid_name(node))): self._raise_name_warning(node, node_type, name, confidence)
def _check_docstring(self, node_type, node, report_missing=True, confidence=interfaces.HIGH): 'check the node has a non empty docstring' docstring = node.doc if (docstring is None): docstring = _infer_dunder_doc_attribute(node) if (docstring is None): if (not report_missing): return lines = (utils.get_node_last_lineno(node) - node.lineno) if ((node_type == 'module') and (not lines)): return max_lines = self.config.docstring_min_length if ((node_type != 'module') and (max_lines > (- 1)) and (lines < max_lines)): return self.stats[('undocumented_' + node_type)] += 1 if (node.body and isinstance(node.body[0], astroid.Expr) and isinstance(node.body[0].value, astroid.Call)): func = utils.safe_infer(node.body[0].value.func) if (isinstance(func, astroid.BoundMethod) and isinstance(func.bound, astroid.Instance)): if (func.bound.name in ('str', 'unicode', 'bytes')): return if (node_type == 'module'): message = 'missing-module-docstring' elif (node_type == 'class'): message = 'missing-class-docstring' else: message = 'missing-function-docstring' self.add_message(message, node=node, confidence=confidence) elif (not docstring.strip()): self.stats[('undocumented_' + node_type)] += 1 self.add_message('empty-docstring', node=node, args=(node_type,), confidence=confidence)
691,359,148,964,642,600
check the node has a non empty docstring
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_docstring
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_docstring(self, node_type, node, report_missing=True, confidence=interfaces.HIGH): docstring = node.doc if (docstring is None): docstring = _infer_dunder_doc_attribute(node) if (docstring is None): if (not report_missing): return lines = (utils.get_node_last_lineno(node) - node.lineno) if ((node_type == 'module') and (not lines)): return max_lines = self.config.docstring_min_length if ((node_type != 'module') and (max_lines > (- 1)) and (lines < max_lines)): return self.stats[('undocumented_' + node_type)] += 1 if (node.body and isinstance(node.body[0], astroid.Expr) and isinstance(node.body[0].value, astroid.Call)): func = utils.safe_infer(node.body[0].value.func) if (isinstance(func, astroid.BoundMethod) and isinstance(func.bound, astroid.Instance)): if (func.bound.name in ('str', 'unicode', 'bytes')): return if (node_type == 'module'): message = 'missing-module-docstring' elif (node_type == 'class'): message = 'missing-class-docstring' else: message = 'missing-function-docstring' self.add_message(message, node=node, confidence=confidence) elif (not docstring.strip()): self.stats[('undocumented_' + node_type)] += 1 self.add_message('empty-docstring', node=node, args=(node_type,), confidence=confidence)
def _check_singleton_comparison(self, left_value, right_value, root_node, checking_for_absence: bool=False): 'Check if == or != is being used to compare a singleton value' singleton_values = (True, False, None) def _is_singleton_const(node) -> bool: return (isinstance(node, astroid.Const) and any(((node.value is value) for value in singleton_values))) if _is_singleton_const(left_value): (singleton, other_value) = (left_value.value, right_value) elif _is_singleton_const(right_value): (singleton, other_value) = (right_value.value, left_value) else: return singleton_comparison_example = {False: "'{} is {}'", True: "'{} is not {}'"} if (singleton in (True, False)): suggestion_template = '{} if checking for the singleton value {}, or {} if testing for {}' truthiness_example = {False: 'not {}', True: '{}'} truthiness_phrase = {True: 'truthiness', False: 'falsiness'} checking_truthiness = (singleton is not checking_for_absence) suggestion = suggestion_template.format(singleton_comparison_example[checking_for_absence].format(left_value.as_string(), right_value.as_string()), singleton, ("'bool({})'" if ((not utils.is_test_condition(root_node)) and checking_truthiness) else "'{}'").format(truthiness_example[checking_truthiness].format(other_value.as_string())), truthiness_phrase[checking_truthiness]) else: suggestion = singleton_comparison_example[checking_for_absence].format(left_value.as_string(), right_value.as_string()) self.add_message('singleton-comparison', node=root_node, args=(f"'{root_node.as_string()}'", suggestion))
3,666,883,644,392,001,500
Check if == or != is being used to compare a singleton value
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_singleton_comparison
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_singleton_comparison(self, left_value, right_value, root_node, checking_for_absence: bool=False): singleton_values = (True, False, None) def _is_singleton_const(node) -> bool: return (isinstance(node, astroid.Const) and any(((node.value is value) for value in singleton_values))) if _is_singleton_const(left_value): (singleton, other_value) = (left_value.value, right_value) elif _is_singleton_const(right_value): (singleton, other_value) = (right_value.value, left_value) else: return singleton_comparison_example = {False: "'{} is {}'", True: "'{} is not {}'"} if (singleton in (True, False)): suggestion_template = '{} if checking for the singleton value {}, or {} if testing for {}' truthiness_example = {False: 'not {}', True: '{}'} truthiness_phrase = {True: 'truthiness', False: 'falsiness'} checking_truthiness = (singleton is not checking_for_absence) suggestion = suggestion_template.format(singleton_comparison_example[checking_for_absence].format(left_value.as_string(), right_value.as_string()), singleton, ("'bool({})'" if ((not utils.is_test_condition(root_node)) and checking_truthiness) else "'{}'").format(truthiness_example[checking_truthiness].format(other_value.as_string())), truthiness_phrase[checking_truthiness]) else: suggestion = singleton_comparison_example[checking_for_absence].format(left_value.as_string(), right_value.as_string()) self.add_message('singleton-comparison', node=root_node, args=(f"'{root_node.as_string()}'", suggestion))
def _check_literal_comparison(self, literal, node): 'Check if we compare to a literal, which is usually what we do not want to do.' nodes = (astroid.List, astroid.Tuple, astroid.Dict, astroid.Set) is_other_literal = isinstance(literal, nodes) is_const = False if isinstance(literal, astroid.Const): if (isinstance(literal.value, bool) or (literal.value is None)): return is_const = isinstance(literal.value, (bytes, str, int, float)) if (is_const or is_other_literal): self.add_message('literal-comparison', node=node)
-3,776,770,463,105,765,000
Check if we compare to a literal, which is usually what we do not want to do.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_literal_comparison
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_literal_comparison(self, literal, node): nodes = (astroid.List, astroid.Tuple, astroid.Dict, astroid.Set) is_other_literal = isinstance(literal, nodes) is_const = False if isinstance(literal, astroid.Const): if (isinstance(literal.value, bool) or (literal.value is None)): return is_const = isinstance(literal.value, (bytes, str, int, float)) if (is_const or is_other_literal): self.add_message('literal-comparison', node=node)
def _check_logical_tautology(self, node): 'Check if identifier is compared against itself.\n :param node: Compare node\n :type node: astroid.node_classes.Compare\n :Example:\n val = 786\n if val == val: # [comparison-with-itself]\n pass\n ' left_operand = node.left right_operand = node.ops[0][1] operator = node.ops[0][0] if (isinstance(left_operand, astroid.Const) and isinstance(right_operand, astroid.Const)): left_operand = left_operand.value right_operand = right_operand.value elif (isinstance(left_operand, astroid.Name) and isinstance(right_operand, astroid.Name)): left_operand = left_operand.name right_operand = right_operand.name if (left_operand == right_operand): suggestion = f'{left_operand} {operator} {right_operand}' self.add_message('comparison-with-itself', node=node, args=(suggestion,))
6,435,169,647,564,362,000
Check if identifier is compared against itself. :param node: Compare node :type node: astroid.node_classes.Compare :Example: val = 786 if val == val: # [comparison-with-itself] pass
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_logical_tautology
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_logical_tautology(self, node): 'Check if identifier is compared against itself.\n :param node: Compare node\n :type node: astroid.node_classes.Compare\n :Example:\n val = 786\n if val == val: # [comparison-with-itself]\n pass\n ' left_operand = node.left right_operand = node.ops[0][1] operator = node.ops[0][0] if (isinstance(left_operand, astroid.Const) and isinstance(right_operand, astroid.Const)): left_operand = left_operand.value right_operand = right_operand.value elif (isinstance(left_operand, astroid.Name) and isinstance(right_operand, astroid.Name)): left_operand = left_operand.name right_operand = right_operand.name if (left_operand == right_operand): suggestion = f'{left_operand} {operator} {right_operand}' self.add_message('comparison-with-itself', node=node, args=(suggestion,))
def _check_type_x_is_y(self, node, left, operator, right): 'Check for expressions like type(x) == Y.' left_func = utils.safe_infer(left.func) if (not (isinstance(left_func, astroid.ClassDef) and (left_func.qname() == TYPE_QNAME))): return if ((operator in ('is', 'is not')) and _is_one_arg_pos_call(right)): right_func = utils.safe_infer(right.func) if (isinstance(right_func, astroid.ClassDef) and (right_func.qname() == TYPE_QNAME)): right_arg = utils.safe_infer(right.args[0]) if (not isinstance(right_arg, LITERAL_NODE_TYPES)): return self.add_message('unidiomatic-typecheck', node=node)
274,070,324,776,431,300
Check for expressions like type(x) == Y.
venv/lib/python3.8/site-packages/pylint/checkers/base.py
_check_type_x_is_y
DiegoSilvaHoffmann/Small-Ecommerce
python
def _check_type_x_is_y(self, node, left, operator, right): left_func = utils.safe_infer(left.func) if (not (isinstance(left_func, astroid.ClassDef) and (left_func.qname() == TYPE_QNAME))): return if ((operator in ('is', 'is not')) and _is_one_arg_pos_call(right)): right_func = utils.safe_infer(right.func) if (isinstance(right_func, astroid.ClassDef) and (right_func.qname() == TYPE_QNAME)): right_arg = utils.safe_infer(right.args[0]) if (not isinstance(right_arg, LITERAL_NODE_TYPES)): return self.add_message('unidiomatic-typecheck', node=node)
def send(self, inbox, data, **kwargs): 'Send the provided data to an inbox.' if (isinstance(data, dict) or isinstance(data, list)): self.__post_message(inbox, json.dumps(data), self.JSON_LD, **kwargs) elif isinstance(data, str): self.__post_message(inbox, data, self.JSON_LD, **kwargs) elif isinstance(data, Graph): ct = (self.__accept_post_options(inbox, **kwargs) or self.JSON_LD) self.__post_message(inbox, data.serialize(format=ct), ct, **kwargs) else: raise TypeError('You cannot send data of type {}.'.format(type(data)))
3,203,947,373,159,349,000
Send the provided data to an inbox.
ldnlib/sender.py
send
trellis-ldp/py-ldn
python
def send(self, inbox, data, **kwargs): if (isinstance(data, dict) or isinstance(data, list)): self.__post_message(inbox, json.dumps(data), self.JSON_LD, **kwargs) elif isinstance(data, str): self.__post_message(inbox, data, self.JSON_LD, **kwargs) elif isinstance(data, Graph): ct = (self.__accept_post_options(inbox, **kwargs) or self.JSON_LD) self.__post_message(inbox, data.serialize(format=ct), ct, **kwargs) else: raise TypeError('You cannot send data of type {}.'.format(type(data)))
def __init__(self, username=None, password=None, sharedKey=None, certChain=None, privateKey=None, cryptoID=None, protocol=None, x509Fingerprint=None, x509TrustList=None, x509CommonName=None, settings=None): "\n For client authentication, use one of these argument\n combinations:\n - username, password (SRP)\n - username, sharedKey (shared-key)\n - certChain, privateKey (certificate)\n\n For server authentication, you can either rely on the\n implicit mutual authentication performed by SRP or\n shared-keys, or you can do certificate-based server\n authentication with one of these argument combinations:\n - cryptoID[, protocol] (requires cryptoIDlib)\n - x509Fingerprint\n - x509TrustList[, x509CommonName] (requires cryptlib_py)\n\n Certificate-based server authentication is compatible with\n SRP or certificate-based client authentication. It is\n not compatible with shared-keys.\n\n The constructor does not perform the TLS handshake itself, but\n simply stores these arguments for later. The handshake is\n performed only when this class needs to connect with the\n server. Then you should be prepared to handle TLS-specific\n exceptions. See the client handshake functions in\n L{tlslite.TLSConnection.TLSConnection} for details on which\n exceptions might be raised.\n\n @type username: str\n @param username: SRP or shared-key username. Requires the\n 'password' or 'sharedKey' argument.\n\n @type password: str\n @param password: SRP password for mutual authentication.\n Requires the 'username' argument.\n\n @type sharedKey: str\n @param sharedKey: Shared key for mutual authentication.\n Requires the 'username' argument.\n\n @type certChain: L{tlslite.X509CertChain.X509CertChain} or\n L{cryptoIDlib.CertChain.CertChain}\n @param certChain: Certificate chain for client authentication.\n Requires the 'privateKey' argument. Excludes the SRP or\n shared-key related arguments.\n\n @type privateKey: L{tlslite.utils.RSAKey.RSAKey}\n @param privateKey: Private key for client authentication.\n Requires the 'certChain' argument. Excludes the SRP or\n shared-key related arguments.\n\n @type cryptoID: str\n @param cryptoID: cryptoID for server authentication. Mutually\n exclusive with the 'x509...' arguments.\n\n @type protocol: str\n @param protocol: cryptoID protocol URI for server\n authentication. Requires the 'cryptoID' argument.\n\n @type x509Fingerprint: str\n @param x509Fingerprint: Hex-encoded X.509 fingerprint for\n server authentication. Mutually exclusive with the 'cryptoID'\n and 'x509TrustList' arguments.\n\n @type x509TrustList: list of L{tlslite.X509.X509}\n @param x509TrustList: A list of trusted root certificates. The\n other party must present a certificate chain which extends to\n one of these root certificates. The cryptlib_py module must be\n installed to use this parameter. Mutually exclusive with the\n 'cryptoID' and 'x509Fingerprint' arguments.\n\n @type x509CommonName: str\n @param x509CommonName: The end-entity certificate's 'CN' field\n must match this value. For a web server, this is typically a\n server name such as 'www.amazon.com'. Mutually exclusive with\n the 'cryptoID' and 'x509Fingerprint' arguments. Requires the\n 'x509TrustList' argument.\n\n @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}\n @param settings: Various settings which can be used to control\n the ciphersuites, certificate types, and SSL/TLS versions\n offered by the client.\n " self.username = None self.password = None self.sharedKey = None self.certChain = None self.privateKey = None self.checker = None if (username and password and (not (sharedKey or certChain or privateKey))): self.username = username self.password = password elif (username and sharedKey and (not (password or certChain or privateKey))): self.username = username self.sharedKey = sharedKey elif (certChain and privateKey and (not (username or password or sharedKey))): self.certChain = certChain self.privateKey = privateKey elif ((not password) and (not username) and (not sharedKey) and (not certChain) and (not privateKey)): pass else: raise ValueError('Bad parameters') if (sharedKey and (cryptoID or protocol or x509Fingerprint)): raise ValueError("Can't use shared keys with other forms ofauthentication") self.checker = Checker(cryptoID, protocol, x509Fingerprint, x509TrustList, x509CommonName) self.settings = settings self.tlsSession = None
-42,145,672,005,571,300
For client authentication, use one of these argument combinations: - username, password (SRP) - username, sharedKey (shared-key) - certChain, privateKey (certificate) For server authentication, you can either rely on the implicit mutual authentication performed by SRP or shared-keys, or you can do certificate-based server authentication with one of these argument combinations: - cryptoID[, protocol] (requires cryptoIDlib) - x509Fingerprint - x509TrustList[, x509CommonName] (requires cryptlib_py) Certificate-based server authentication is compatible with SRP or certificate-based client authentication. It is not compatible with shared-keys. The constructor does not perform the TLS handshake itself, but simply stores these arguments for later. The handshake is performed only when this class needs to connect with the server. Then you should be prepared to handle TLS-specific exceptions. See the client handshake functions in L{tlslite.TLSConnection.TLSConnection} for details on which exceptions might be raised. @type username: str @param username: SRP or shared-key username. Requires the 'password' or 'sharedKey' argument. @type password: str @param password: SRP password for mutual authentication. Requires the 'username' argument. @type sharedKey: str @param sharedKey: Shared key for mutual authentication. Requires the 'username' argument. @type certChain: L{tlslite.X509CertChain.X509CertChain} or L{cryptoIDlib.CertChain.CertChain} @param certChain: Certificate chain for client authentication. Requires the 'privateKey' argument. Excludes the SRP or shared-key related arguments. @type privateKey: L{tlslite.utils.RSAKey.RSAKey} @param privateKey: Private key for client authentication. Requires the 'certChain' argument. Excludes the SRP or shared-key related arguments. @type cryptoID: str @param cryptoID: cryptoID for server authentication. Mutually exclusive with the 'x509...' arguments. @type protocol: str @param protocol: cryptoID protocol URI for server authentication. Requires the 'cryptoID' argument. @type x509Fingerprint: str @param x509Fingerprint: Hex-encoded X.509 fingerprint for server authentication. Mutually exclusive with the 'cryptoID' and 'x509TrustList' arguments. @type x509TrustList: list of L{tlslite.X509.X509} @param x509TrustList: A list of trusted root certificates. The other party must present a certificate chain which extends to one of these root certificates. The cryptlib_py module must be installed to use this parameter. Mutually exclusive with the 'cryptoID' and 'x509Fingerprint' arguments. @type x509CommonName: str @param x509CommonName: The end-entity certificate's 'CN' field must match this value. For a web server, this is typically a server name such as 'www.amazon.com'. Mutually exclusive with the 'cryptoID' and 'x509Fingerprint' arguments. Requires the 'x509TrustList' argument. @type settings: L{tlslite.HandshakeSettings.HandshakeSettings} @param settings: Various settings which can be used to control the ciphersuites, certificate types, and SSL/TLS versions offered by the client.
third_party/tlslite/tlslite/integration/ClientHelper.py
__init__
1065672644894730302/Chromium
python
def __init__(self, username=None, password=None, sharedKey=None, certChain=None, privateKey=None, cryptoID=None, protocol=None, x509Fingerprint=None, x509TrustList=None, x509CommonName=None, settings=None): "\n For client authentication, use one of these argument\n combinations:\n - username, password (SRP)\n - username, sharedKey (shared-key)\n - certChain, privateKey (certificate)\n\n For server authentication, you can either rely on the\n implicit mutual authentication performed by SRP or\n shared-keys, or you can do certificate-based server\n authentication with one of these argument combinations:\n - cryptoID[, protocol] (requires cryptoIDlib)\n - x509Fingerprint\n - x509TrustList[, x509CommonName] (requires cryptlib_py)\n\n Certificate-based server authentication is compatible with\n SRP or certificate-based client authentication. It is\n not compatible with shared-keys.\n\n The constructor does not perform the TLS handshake itself, but\n simply stores these arguments for later. The handshake is\n performed only when this class needs to connect with the\n server. Then you should be prepared to handle TLS-specific\n exceptions. See the client handshake functions in\n L{tlslite.TLSConnection.TLSConnection} for details on which\n exceptions might be raised.\n\n @type username: str\n @param username: SRP or shared-key username. Requires the\n 'password' or 'sharedKey' argument.\n\n @type password: str\n @param password: SRP password for mutual authentication.\n Requires the 'username' argument.\n\n @type sharedKey: str\n @param sharedKey: Shared key for mutual authentication.\n Requires the 'username' argument.\n\n @type certChain: L{tlslite.X509CertChain.X509CertChain} or\n L{cryptoIDlib.CertChain.CertChain}\n @param certChain: Certificate chain for client authentication.\n Requires the 'privateKey' argument. Excludes the SRP or\n shared-key related arguments.\n\n @type privateKey: L{tlslite.utils.RSAKey.RSAKey}\n @param privateKey: Private key for client authentication.\n Requires the 'certChain' argument. Excludes the SRP or\n shared-key related arguments.\n\n @type cryptoID: str\n @param cryptoID: cryptoID for server authentication. Mutually\n exclusive with the 'x509...' arguments.\n\n @type protocol: str\n @param protocol: cryptoID protocol URI for server\n authentication. Requires the 'cryptoID' argument.\n\n @type x509Fingerprint: str\n @param x509Fingerprint: Hex-encoded X.509 fingerprint for\n server authentication. Mutually exclusive with the 'cryptoID'\n and 'x509TrustList' arguments.\n\n @type x509TrustList: list of L{tlslite.X509.X509}\n @param x509TrustList: A list of trusted root certificates. The\n other party must present a certificate chain which extends to\n one of these root certificates. The cryptlib_py module must be\n installed to use this parameter. Mutually exclusive with the\n 'cryptoID' and 'x509Fingerprint' arguments.\n\n @type x509CommonName: str\n @param x509CommonName: The end-entity certificate's 'CN' field\n must match this value. For a web server, this is typically a\n server name such as 'www.amazon.com'. Mutually exclusive with\n the 'cryptoID' and 'x509Fingerprint' arguments. Requires the\n 'x509TrustList' argument.\n\n @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}\n @param settings: Various settings which can be used to control\n the ciphersuites, certificate types, and SSL/TLS versions\n offered by the client.\n " self.username = None self.password = None self.sharedKey = None self.certChain = None self.privateKey = None self.checker = None if (username and password and (not (sharedKey or certChain or privateKey))): self.username = username self.password = password elif (username and sharedKey and (not (password or certChain or privateKey))): self.username = username self.sharedKey = sharedKey elif (certChain and privateKey and (not (username or password or sharedKey))): self.certChain = certChain self.privateKey = privateKey elif ((not password) and (not username) and (not sharedKey) and (not certChain) and (not privateKey)): pass else: raise ValueError('Bad parameters') if (sharedKey and (cryptoID or protocol or x509Fingerprint)): raise ValueError("Can't use shared keys with other forms ofauthentication") self.checker = Checker(cryptoID, protocol, x509Fingerprint, x509TrustList, x509CommonName) self.settings = settings self.tlsSession = None
def get_graph_embedding_args(graph_embedding_name): '\n It will build the template for ``GNNBase`` model.\n Parameters\n ----------\n graph_embedding_name: str\n The graph embedding name. Expected in ["gcn", "gat", "graphsage", "ggnn"].\n If it can\'t find the ``graph_embedding_name``, it will return ``{}``.\n Returns\n -------\n template_dict: dict\n The template dict.\n The structure is shown as follows:\n {\n graph_embedding_share: {num_layers: 1, input_size: 300, ...},\n graph_embedding_private: {heads: [1], attn_drop: 0.0}\n }\n The ``graph_embedding_share`` contains the parameters shared by all ``GNNBase`` models.\n The ``graph_embedding_private`` contains the parameters specifically in each graph_embedding methods.\n ' if (graph_embedding_name in str2yaml.keys()): yaml_name = str2yaml[graph_embedding_name] path = os.path.join(dir_path, yaml_name) config = get_yaml_config(path) return config else: return {}
-8,607,500,043,819,556,000
It will build the template for ``GNNBase`` model. Parameters ---------- graph_embedding_name: str The graph embedding name. Expected in ["gcn", "gat", "graphsage", "ggnn"]. If it can't find the ``graph_embedding_name``, it will return ``{}``. Returns ------- template_dict: dict The template dict. The structure is shown as follows: { graph_embedding_share: {num_layers: 1, input_size: 300, ...}, graph_embedding_private: {heads: [1], attn_drop: 0.0} } The ``graph_embedding_share`` contains the parameters shared by all ``GNNBase`` models. The ``graph_embedding_private`` contains the parameters specifically in each graph_embedding methods.
graph4nlp/pytorch/modules/config/graph_embedding/__init__.py
get_graph_embedding_args
RyanWangZf/graph4nlp
python
def get_graph_embedding_args(graph_embedding_name): '\n It will build the template for ``GNNBase`` model.\n Parameters\n ----------\n graph_embedding_name: str\n The graph embedding name. Expected in ["gcn", "gat", "graphsage", "ggnn"].\n If it can\'t find the ``graph_embedding_name``, it will return ``{}``.\n Returns\n -------\n template_dict: dict\n The template dict.\n The structure is shown as follows:\n {\n graph_embedding_share: {num_layers: 1, input_size: 300, ...},\n graph_embedding_private: {heads: [1], attn_drop: 0.0}\n }\n The ``graph_embedding_share`` contains the parameters shared by all ``GNNBase`` models.\n The ``graph_embedding_private`` contains the parameters specifically in each graph_embedding methods.\n ' if (graph_embedding_name in str2yaml.keys()): yaml_name = str2yaml[graph_embedding_name] path = os.path.join(dir_path, yaml_name) config = get_yaml_config(path) return config else: return {}
def select_lines(infile, contrast, lines, res_dict, fits_dict, wloc, outfil): '\n displays new window with image infile + start + \'fit\n a rectangle around the selected line can be selected with dragging the mouse\n :param infile: filebase of image\n :param contrast: brightness of image\n :param lines: list of calibration wavelengths\n :param res_dict: dictionary\n :param fits_dict: "\n :param wloc: location of displayed window for selection\n :param outfil: filename without extension (.txt) with results of line selection\n :return:\n x0, y0: center coordinates of selected rectangle (int)\n dx, dy: half width and height of selected rectangle (int)\n ' def fitgaussimage(image, xy0, dxy, lam): x0 = xy0[0] y0 = xy0[1] dx = dxy[0] dy = dxy[1] print(x0, y0, dx, dy) data = image[(y0 - dy):(y0 + dy), (x0 - dx):(x0 + dx)] (params, success) = m_fun.fit_gaussian_2d(data) if (success in [1, 2, 3, 4]): (height, x, y, width_x, width_y) = params width_x = ((2 * np.sqrt((2 * np.log(2)))) * np.abs(width_x)) width_y = ((2 * np.sqrt((2 * np.log(2)))) * np.abs(width_y)) x = ((x + y0) - dy) y = ((y + x0) - dx) xyw = (y, x, width_y, width_x, lam) return xyw else: return (0, 0, 0, 0, 0) xyl = [] dxy = [10, 10] i = i_plot = 0 (im, header) = m_fun.get_fits_image(infile) if (len(im.shape) == 3): imbw = np.sum(im, axis=2) else: imbw = im m_fun.get_fits_keys(header, fits_dict, res_dict, keyprint=False) (imy, imx) = im.shape[:2] image_file = 'tmp.png' imrescale = np.flipud(ios.imread(image_file)) (canvasy, canvasx) = imrescale.shape[:2] wlocw = (wloc[0], wloc[1]) image_elem_sel = [sg.Graph(canvas_size=(canvasx, canvasy), graph_bottom_left=(0, 0), graph_top_right=(imx, imy), key='-GRAPH-', change_submits=True, drag_submits=True)] layout_select = [[sg.Ok(), sg.Cancel(), sg.Button('Skip Line'), sg.Button('Finish'), sg.Button('I'), sg.Button('D'), sg.Text(infile, size=(30, 1)), sg.Text(key='info', size=(40, 1))], image_elem_sel] winselect = sg.Window(f'select rectangle for fit size, click lines', layout_select, finalize=True, location=wlocw, keep_on_top=True, no_titlebar=False, resizable=True, disable_close=False, disable_minimize=True, element_padding=(2, 2)) graph = winselect['-GRAPH-'] winselect_active = True img = graph.draw_image(image_file, location=(0, imy)) dragging = False start_point = end_point = prior_rect = None index = 0 icircle = itext = None color = 'yellow' while winselect_active: (event, values) = winselect.read() if (event == '-GRAPH-'): (x, y) = values['-GRAPH-'] if (not dragging): start_point = (x, y) dragging = True else: end_point = (x, y) if prior_rect: graph.delete_figure(prior_rect) if (None not in (start_point, end_point)): prior_rect = graph.draw_rectangle(start_point, end_point, line_color='red') elif ((event is not None) and event.endswith('+UP')): xy0 = [int((0.5 * (start_point[0] + end_point[0]))), int((0.5 * (start_point[1] + end_point[1])))] size = (abs((start_point[0] - end_point[0])), abs((start_point[1] - end_point[1]))) info = winselect['info'] info.update(value=f'grabbed rectangle at {xy0} with size {size}') (start_point, end_point) = (None, None) dragging = False if (min(size[0], size[1]) > 2): info.update(value=f'rectangle at {xy0} with size {size}') dxy = size elif (i < len(lines)): if prior_rect: graph.delete_figure(prior_rect) print(xy0, lines[i]) xyw = fitgaussimage(imbw, xy0, dxy, lines[i]) if xyw[0]: if ((0 < xyw[0] < imx) and (0 < xyw[1] < imy)): print(np.float16(xyw)) xyl.append(np.float32(xyw)) r = ((xyw[2] + xyw[3]) / 4) icircle = graph.DrawCircle((xyw[0], xyw[1]), r, line_color=color, line_width=3) itext = graph.DrawText((' ' + str(lines[i])), location=(xyw[0], xyw[1]), color=color, font=('Arial', 12), angle=45, text_location=sg.TEXT_LOCATION_BOTTOM_LEFT) info.update(value=f'line {lines[i]} at {np.float16(xyw)}') graph.update() i += 1 i_plot += 1 else: info.update(value='bad fit, try again') print('bad fit, try again') else: info.update(value='Fit not successful, try again') print('Fit not successful, try again') else: info.update(value='all lines measured, press OK or Cancel') elif (event == 'Ok'): if (np.array(xyl).shape[0] > 1): xyl = np.array(xyl, dtype=np.float32) with open(m_fun.change_extension(outfil, '.txt'), 'ab+') as f: np.savetxt(f, xyl, fmt='%8.2f', header=(((str(index) + ' ') + str(infile)) + '.fit')) np.savetxt(f, np.zeros((1, 5)), fmt='%8.2f') index += 1 color = ('red' if (color == 'yellow') else 'yellow') elif icircle: graph.delete_figure(icircle) graph.delete_figure(itext) graph.update() xyl = [] i = i_plot = 0 elif (event == 'Cancel'): for ind in range(i_plot): xyl = np.array(xyl, dtype=np.float32) rsq2 = ((xyl[(ind, 2)] + xyl[(ind, 3)]) / 5.6) drag_figures = graph.get_figures_at_location(((xyl[(ind, 0)] + rsq2), (xyl[(ind, 1)] + rsq2))) for figure in drag_figures: if (figure != img): graph.delete_figure(figure) graph.update() xyl = [] i = i_plot = 0 elif (event == 'Skip Line'): i += 1 elif (event in ('I', 'D')): if (event == 'I'): contrast *= 2 else: contrast /= 2 im_tmp = (((imrescale / np.max(imrescale)) * 255) * contrast) im_tmp = np.clip(im_tmp, 0.0, 255) with warnings.catch_warnings(): warnings.simplefilter('ignore') ios.imsave(image_file, np.flipud(im_tmp).astype(np.uint8)) graph.delete_figure(img) img = graph.draw_image(image_file, location=(0, imy)) graph.send_figure_to_back(img) elif (event in ('Finish', None)): if (event == 'Finish'): with open((outfil + '.txt'), 'ab+') as f: np.savetxt(f, np.zeros((1, 5)), fmt='%8.2f') (x, y) = winselect.current_location() wlocw = (x, y) winselect.close() return wlocw
-2,950,692,695,651,559,000
displays new window with image infile + start + 'fit a rectangle around the selected line can be selected with dragging the mouse :param infile: filebase of image :param contrast: brightness of image :param lines: list of calibration wavelengths :param res_dict: dictionary :param fits_dict: " :param wloc: location of displayed window for selection :param outfil: filename without extension (.txt) with results of line selection :return: x0, y0: center coordinates of selected rectangle (int) dx, dy: half width and height of selected rectangle (int)
myselect.py
select_lines
meteorspectroscopy/meteor-spectrum-calibration
python
def select_lines(infile, contrast, lines, res_dict, fits_dict, wloc, outfil): '\n displays new window with image infile + start + \'fit\n a rectangle around the selected line can be selected with dragging the mouse\n :param infile: filebase of image\n :param contrast: brightness of image\n :param lines: list of calibration wavelengths\n :param res_dict: dictionary\n :param fits_dict: "\n :param wloc: location of displayed window for selection\n :param outfil: filename without extension (.txt) with results of line selection\n :return:\n x0, y0: center coordinates of selected rectangle (int)\n dx, dy: half width and height of selected rectangle (int)\n ' def fitgaussimage(image, xy0, dxy, lam): x0 = xy0[0] y0 = xy0[1] dx = dxy[0] dy = dxy[1] print(x0, y0, dx, dy) data = image[(y0 - dy):(y0 + dy), (x0 - dx):(x0 + dx)] (params, success) = m_fun.fit_gaussian_2d(data) if (success in [1, 2, 3, 4]): (height, x, y, width_x, width_y) = params width_x = ((2 * np.sqrt((2 * np.log(2)))) * np.abs(width_x)) width_y = ((2 * np.sqrt((2 * np.log(2)))) * np.abs(width_y)) x = ((x + y0) - dy) y = ((y + x0) - dx) xyw = (y, x, width_y, width_x, lam) return xyw else: return (0, 0, 0, 0, 0) xyl = [] dxy = [10, 10] i = i_plot = 0 (im, header) = m_fun.get_fits_image(infile) if (len(im.shape) == 3): imbw = np.sum(im, axis=2) else: imbw = im m_fun.get_fits_keys(header, fits_dict, res_dict, keyprint=False) (imy, imx) = im.shape[:2] image_file = 'tmp.png' imrescale = np.flipud(ios.imread(image_file)) (canvasy, canvasx) = imrescale.shape[:2] wlocw = (wloc[0], wloc[1]) image_elem_sel = [sg.Graph(canvas_size=(canvasx, canvasy), graph_bottom_left=(0, 0), graph_top_right=(imx, imy), key='-GRAPH-', change_submits=True, drag_submits=True)] layout_select = [[sg.Ok(), sg.Cancel(), sg.Button('Skip Line'), sg.Button('Finish'), sg.Button('I'), sg.Button('D'), sg.Text(infile, size=(30, 1)), sg.Text(key='info', size=(40, 1))], image_elem_sel] winselect = sg.Window(f'select rectangle for fit size, click lines', layout_select, finalize=True, location=wlocw, keep_on_top=True, no_titlebar=False, resizable=True, disable_close=False, disable_minimize=True, element_padding=(2, 2)) graph = winselect['-GRAPH-'] winselect_active = True img = graph.draw_image(image_file, location=(0, imy)) dragging = False start_point = end_point = prior_rect = None index = 0 icircle = itext = None color = 'yellow' while winselect_active: (event, values) = winselect.read() if (event == '-GRAPH-'): (x, y) = values['-GRAPH-'] if (not dragging): start_point = (x, y) dragging = True else: end_point = (x, y) if prior_rect: graph.delete_figure(prior_rect) if (None not in (start_point, end_point)): prior_rect = graph.draw_rectangle(start_point, end_point, line_color='red') elif ((event is not None) and event.endswith('+UP')): xy0 = [int((0.5 * (start_point[0] + end_point[0]))), int((0.5 * (start_point[1] + end_point[1])))] size = (abs((start_point[0] - end_point[0])), abs((start_point[1] - end_point[1]))) info = winselect['info'] info.update(value=f'grabbed rectangle at {xy0} with size {size}') (start_point, end_point) = (None, None) dragging = False if (min(size[0], size[1]) > 2): info.update(value=f'rectangle at {xy0} with size {size}') dxy = size elif (i < len(lines)): if prior_rect: graph.delete_figure(prior_rect) print(xy0, lines[i]) xyw = fitgaussimage(imbw, xy0, dxy, lines[i]) if xyw[0]: if ((0 < xyw[0] < imx) and (0 < xyw[1] < imy)): print(np.float16(xyw)) xyl.append(np.float32(xyw)) r = ((xyw[2] + xyw[3]) / 4) icircle = graph.DrawCircle((xyw[0], xyw[1]), r, line_color=color, line_width=3) itext = graph.DrawText((' ' + str(lines[i])), location=(xyw[0], xyw[1]), color=color, font=('Arial', 12), angle=45, text_location=sg.TEXT_LOCATION_BOTTOM_LEFT) info.update(value=f'line {lines[i]} at {np.float16(xyw)}') graph.update() i += 1 i_plot += 1 else: info.update(value='bad fit, try again') print('bad fit, try again') else: info.update(value='Fit not successful, try again') print('Fit not successful, try again') else: info.update(value='all lines measured, press OK or Cancel') elif (event == 'Ok'): if (np.array(xyl).shape[0] > 1): xyl = np.array(xyl, dtype=np.float32) with open(m_fun.change_extension(outfil, '.txt'), 'ab+') as f: np.savetxt(f, xyl, fmt='%8.2f', header=(((str(index) + ' ') + str(infile)) + '.fit')) np.savetxt(f, np.zeros((1, 5)), fmt='%8.2f') index += 1 color = ('red' if (color == 'yellow') else 'yellow') elif icircle: graph.delete_figure(icircle) graph.delete_figure(itext) graph.update() xyl = [] i = i_plot = 0 elif (event == 'Cancel'): for ind in range(i_plot): xyl = np.array(xyl, dtype=np.float32) rsq2 = ((xyl[(ind, 2)] + xyl[(ind, 3)]) / 5.6) drag_figures = graph.get_figures_at_location(((xyl[(ind, 0)] + rsq2), (xyl[(ind, 1)] + rsq2))) for figure in drag_figures: if (figure != img): graph.delete_figure(figure) graph.update() xyl = [] i = i_plot = 0 elif (event == 'Skip Line'): i += 1 elif (event in ('I', 'D')): if (event == 'I'): contrast *= 2 else: contrast /= 2 im_tmp = (((imrescale / np.max(imrescale)) * 255) * contrast) im_tmp = np.clip(im_tmp, 0.0, 255) with warnings.catch_warnings(): warnings.simplefilter('ignore') ios.imsave(image_file, np.flipud(im_tmp).astype(np.uint8)) graph.delete_figure(img) img = graph.draw_image(image_file, location=(0, imy)) graph.send_figure_to_back(img) elif (event in ('Finish', None)): if (event == 'Finish'): with open((outfil + '.txt'), 'ab+') as f: np.savetxt(f, np.zeros((1, 5)), fmt='%8.2f') (x, y) = winselect.current_location() wlocw = (x, y) winselect.close() return wlocw
def x(self, q): 'Apply X to q.' if isinstance(q, QuantumRegister): gs = InstructionSet() for j in range(q.size): gs.add(self.x((q, j))) return gs else: self._check_qubit(q) return self._attach(XGate(q, self))
8,371,005,246,915,321,000
Apply X to q.
qiskit/extensions/standard/x.py
x
NickyBar/QIP
python
def x(self, q): if isinstance(q, QuantumRegister): gs = InstructionSet() for j in range(q.size): gs.add(self.x((q, j))) return gs else: self._check_qubit(q) return self._attach(XGate(q, self))
def __init__(self, qubit, circ=None): 'Create new X gate.' super(XGate, self).__init__('x', [], [qubit], circ)
5,263,243,722,909,014,000
Create new X gate.
qiskit/extensions/standard/x.py
__init__
NickyBar/QIP
python
def __init__(self, qubit, circ=None): super(XGate, self).__init__('x', [], [qubit], circ)
def qasm(self): 'Return OPENQASM string.' qubit = self.arg[0] return self._qasmif(('x %s[%d];' % (qubit[0].name, qubit[1])))
-1,905,868,184,292,392,400
Return OPENQASM string.
qiskit/extensions/standard/x.py
qasm
NickyBar/QIP
python
def qasm(self): qubit = self.arg[0] return self._qasmif(('x %s[%d];' % (qubit[0].name, qubit[1])))
def inverse(self): 'Invert this gate.' return self
6,634,723,205,442,282,000
Invert this gate.
qiskit/extensions/standard/x.py
inverse
NickyBar/QIP
python
def inverse(self): return self
def reapply(self, circ): 'Reapply this gate to corresponding qubits in circ.' self._modifiers(circ.x(self.arg[0]))
3,164,904,002,953,134,600
Reapply this gate to corresponding qubits in circ.
qiskit/extensions/standard/x.py
reapply
NickyBar/QIP
python
def reapply(self, circ): self._modifiers(circ.x(self.arg[0]))
def __init__(self, **kwargs): '\n Initializes a new ScheduleSecretDeletionDetails object with values from keyword arguments.\n The following keyword arguments are supported (corresponding to the getters/setters of this class):\n\n :param time_of_deletion:\n The value to assign to the time_of_deletion property of this ScheduleSecretDeletionDetails.\n :type time_of_deletion: datetime\n\n ' self.swagger_types = {'time_of_deletion': 'datetime'} self.attribute_map = {'time_of_deletion': 'timeOfDeletion'} self._time_of_deletion = None
-6,563,169,429,261,355,000
Initializes a new ScheduleSecretDeletionDetails object with values from keyword arguments. The following keyword arguments are supported (corresponding to the getters/setters of this class): :param time_of_deletion: The value to assign to the time_of_deletion property of this ScheduleSecretDeletionDetails. :type time_of_deletion: datetime
darling_ansible/python_venv/lib/python3.7/site-packages/oci/vault/models/schedule_secret_deletion_details.py
__init__
revnav/sandbox
python
def __init__(self, **kwargs): '\n Initializes a new ScheduleSecretDeletionDetails object with values from keyword arguments.\n The following keyword arguments are supported (corresponding to the getters/setters of this class):\n\n :param time_of_deletion:\n The value to assign to the time_of_deletion property of this ScheduleSecretDeletionDetails.\n :type time_of_deletion: datetime\n\n ' self.swagger_types = {'time_of_deletion': 'datetime'} self.attribute_map = {'time_of_deletion': 'timeOfDeletion'} self._time_of_deletion = None
@property def time_of_deletion(self): '\n Gets the time_of_deletion of this ScheduleSecretDeletionDetails.\n An optional property indicating when to delete the secret version, expressed in `RFC 3339`__ timestamp format.\n\n __ https://tools.ietf.org/html/rfc3339\n\n\n :return: The time_of_deletion of this ScheduleSecretDeletionDetails.\n :rtype: datetime\n ' return self._time_of_deletion
-1,340,899,249,621,529,900
Gets the time_of_deletion of this ScheduleSecretDeletionDetails. An optional property indicating when to delete the secret version, expressed in `RFC 3339`__ timestamp format. __ https://tools.ietf.org/html/rfc3339 :return: The time_of_deletion of this ScheduleSecretDeletionDetails. :rtype: datetime
darling_ansible/python_venv/lib/python3.7/site-packages/oci/vault/models/schedule_secret_deletion_details.py
time_of_deletion
revnav/sandbox
python
@property def time_of_deletion(self): '\n Gets the time_of_deletion of this ScheduleSecretDeletionDetails.\n An optional property indicating when to delete the secret version, expressed in `RFC 3339`__ timestamp format.\n\n __ https://tools.ietf.org/html/rfc3339\n\n\n :return: The time_of_deletion of this ScheduleSecretDeletionDetails.\n :rtype: datetime\n ' return self._time_of_deletion
@time_of_deletion.setter def time_of_deletion(self, time_of_deletion): '\n Sets the time_of_deletion of this ScheduleSecretDeletionDetails.\n An optional property indicating when to delete the secret version, expressed in `RFC 3339`__ timestamp format.\n\n __ https://tools.ietf.org/html/rfc3339\n\n\n :param time_of_deletion: The time_of_deletion of this ScheduleSecretDeletionDetails.\n :type: datetime\n ' self._time_of_deletion = time_of_deletion
5,927,719,153,223,761,000
Sets the time_of_deletion of this ScheduleSecretDeletionDetails. An optional property indicating when to delete the secret version, expressed in `RFC 3339`__ timestamp format. __ https://tools.ietf.org/html/rfc3339 :param time_of_deletion: The time_of_deletion of this ScheduleSecretDeletionDetails. :type: datetime
darling_ansible/python_venv/lib/python3.7/site-packages/oci/vault/models/schedule_secret_deletion_details.py
time_of_deletion
revnav/sandbox
python
@time_of_deletion.setter def time_of_deletion(self, time_of_deletion): '\n Sets the time_of_deletion of this ScheduleSecretDeletionDetails.\n An optional property indicating when to delete the secret version, expressed in `RFC 3339`__ timestamp format.\n\n __ https://tools.ietf.org/html/rfc3339\n\n\n :param time_of_deletion: The time_of_deletion of this ScheduleSecretDeletionDetails.\n :type: datetime\n ' self._time_of_deletion = time_of_deletion
def laplacian(csgraph, normed=False, return_diag=False): ' Return the Laplacian matrix of a directed graph.\n\n For non-symmetric graphs the out-degree is used in the computation.\n\n Parameters\n ----------\n csgraph : array_like or sparse matrix, 2 dimensions\n compressed-sparse graph, with shape (N, N).\n normed : bool, optional\n If True, then compute normalized Laplacian.\n return_diag : bool, optional\n If True, then return diagonal as well as laplacian.\n\n Returns\n -------\n lap : ndarray\n The N x N laplacian matrix of graph.\n diag : ndarray\n The length-N diagonal of the laplacian matrix.\n diag is returned only if return_diag is True.\n\n Notes\n -----\n The Laplacian matrix of a graph is sometimes referred to as the\n "Kirchoff matrix" or the "admittance matrix", and is useful in many\n parts of spectral graph theory. In particular, the eigen-decomposition\n of the laplacian matrix can give insight into many properties of the graph.\n\n For non-symmetric directed graphs, the laplacian is computed using the\n out-degree of each node.\n\n Examples\n --------\n >>> from scipy.sparse import csgraph\n >>> G = np.arange(5) * np.arange(5)[:, np.newaxis]\n >>> G\n array([[ 0, 0, 0, 0, 0],\n [ 0, 1, 2, 3, 4],\n [ 0, 2, 4, 6, 8],\n [ 0, 3, 6, 9, 12],\n [ 0, 4, 8, 12, 16]])\n >>> csgraph.laplacian(G, normed=False)\n array([[ 0, 0, 0, 0, 0],\n [ 0, 9, -2, -3, -4],\n [ 0, -2, 16, -6, -8],\n [ 0, -3, -6, 21, -12],\n [ 0, -4, -8, -12, 24]])\n ' if ((csgraph.ndim != 2) or (csgraph.shape[0] != csgraph.shape[1])): raise ValueError('csgraph must be a square matrix or array') if (normed and (np.issubdtype(csgraph.dtype, np.int) or np.issubdtype(csgraph.dtype, np.uint))): csgraph = csgraph.astype(np.float) if isspmatrix(csgraph): return _laplacian_sparse(csgraph, normed=normed, return_diag=return_diag) else: return _laplacian_dense(csgraph, normed=normed, return_diag=return_diag)
-3,602,810,525,153,279,000
Return the Laplacian matrix of a directed graph. For non-symmetric graphs the out-degree is used in the computation. Parameters ---------- csgraph : array_like or sparse matrix, 2 dimensions compressed-sparse graph, with shape (N, N). normed : bool, optional If True, then compute normalized Laplacian. return_diag : bool, optional If True, then return diagonal as well as laplacian. Returns ------- lap : ndarray The N x N laplacian matrix of graph. diag : ndarray The length-N diagonal of the laplacian matrix. diag is returned only if return_diag is True. Notes ----- The Laplacian matrix of a graph is sometimes referred to as the "Kirchoff matrix" or the "admittance matrix", and is useful in many parts of spectral graph theory. In particular, the eigen-decomposition of the laplacian matrix can give insight into many properties of the graph. For non-symmetric directed graphs, the laplacian is computed using the out-degree of each node. Examples -------- >>> from scipy.sparse import csgraph >>> G = np.arange(5) * np.arange(5)[:, np.newaxis] >>> G array([[ 0, 0, 0, 0, 0], [ 0, 1, 2, 3, 4], [ 0, 2, 4, 6, 8], [ 0, 3, 6, 9, 12], [ 0, 4, 8, 12, 16]]) >>> csgraph.laplacian(G, normed=False) array([[ 0, 0, 0, 0, 0], [ 0, 9, -2, -3, -4], [ 0, -2, 16, -6, -8], [ 0, -3, -6, 21, -12], [ 0, -4, -8, -12, 24]])
docker_version/resources/usr/local/lib/python2.7/dist-packages/scipy/sparse/csgraph/_laplacian.py
laplacian
animesh/parliament2
python
def laplacian(csgraph, normed=False, return_diag=False): ' Return the Laplacian matrix of a directed graph.\n\n For non-symmetric graphs the out-degree is used in the computation.\n\n Parameters\n ----------\n csgraph : array_like or sparse matrix, 2 dimensions\n compressed-sparse graph, with shape (N, N).\n normed : bool, optional\n If True, then compute normalized Laplacian.\n return_diag : bool, optional\n If True, then return diagonal as well as laplacian.\n\n Returns\n -------\n lap : ndarray\n The N x N laplacian matrix of graph.\n diag : ndarray\n The length-N diagonal of the laplacian matrix.\n diag is returned only if return_diag is True.\n\n Notes\n -----\n The Laplacian matrix of a graph is sometimes referred to as the\n "Kirchoff matrix" or the "admittance matrix", and is useful in many\n parts of spectral graph theory. In particular, the eigen-decomposition\n of the laplacian matrix can give insight into many properties of the graph.\n\n For non-symmetric directed graphs, the laplacian is computed using the\n out-degree of each node.\n\n Examples\n --------\n >>> from scipy.sparse import csgraph\n >>> G = np.arange(5) * np.arange(5)[:, np.newaxis]\n >>> G\n array([[ 0, 0, 0, 0, 0],\n [ 0, 1, 2, 3, 4],\n [ 0, 2, 4, 6, 8],\n [ 0, 3, 6, 9, 12],\n [ 0, 4, 8, 12, 16]])\n >>> csgraph.laplacian(G, normed=False)\n array([[ 0, 0, 0, 0, 0],\n [ 0, 9, -2, -3, -4],\n [ 0, -2, 16, -6, -8],\n [ 0, -3, -6, 21, -12],\n [ 0, -4, -8, -12, 24]])\n ' if ((csgraph.ndim != 2) or (csgraph.shape[0] != csgraph.shape[1])): raise ValueError('csgraph must be a square matrix or array') if (normed and (np.issubdtype(csgraph.dtype, np.int) or np.issubdtype(csgraph.dtype, np.uint))): csgraph = csgraph.astype(np.float) if isspmatrix(csgraph): return _laplacian_sparse(csgraph, normed=normed, return_diag=return_diag) else: return _laplacian_dense(csgraph, normed=normed, return_diag=return_diag)
@classmethod def _dict_to_obj(cls, json_dict): 'Override dict_to_obj implementation' obj = cls._map_values_to_kwargs(json_dict) for key in obj.kwargs: obj.kwargs[key] = BandwidthInterface._dict_to_obj(obj.kwargs[key]) if obj.private: obj.private = BandwidthInterface._dict_to_obj(obj.private) if obj.public: obj.public = BandwidthInterface._dict_to_obj(obj.public) return obj
3,527,210,701,742,197,000
Override dict_to_obj implementation
cloudcafe/events/models/compute/common.py
_dict_to_obj
kurhula/cloudcafe
python
@classmethod def _dict_to_obj(cls, json_dict): obj = cls._map_values_to_kwargs(json_dict) for key in obj.kwargs: obj.kwargs[key] = BandwidthInterface._dict_to_obj(obj.kwargs[key]) if obj.private: obj.private = BandwidthInterface._dict_to_obj(obj.private) if obj.public: obj.public = BandwidthInterface._dict_to_obj(obj.public) return obj
def testNumber(self): 'Use a number.' self.assertEqual((i18n.twntranslate('de', 'test-plural', 0) % {'num': 0}), u'Bot: Ändere 0 Seiten.') self.assertEqual((i18n.twntranslate('de', 'test-plural', 1) % {'num': 1}), u'Bot: Ändere 1 Seite.') self.assertEqual((i18n.twntranslate('de', 'test-plural', 2) % {'num': 2}), u'Bot: Ändere 2 Seiten.') self.assertEqual((i18n.twntranslate('de', 'test-plural', 3) % {'num': 3}), u'Bot: Ändere 3 Seiten.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 0) % {'num': 'no'}), u'Bot: Changing no pages.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 1) % {'num': 'one'}), u'Bot: Changing one page.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 2) % {'num': 'two'}), u'Bot: Changing two pages.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 3) % {'num': 'three'}), u'Bot: Changing three pages.')
-6,116,252,089,283,957,000
Use a number.
tests/i18n_tests.py
testNumber
xZise/pywikibot-core
python
def testNumber(self): self.assertEqual((i18n.twntranslate('de', 'test-plural', 0) % {'num': 0}), u'Bot: Ändere 0 Seiten.') self.assertEqual((i18n.twntranslate('de', 'test-plural', 1) % {'num': 1}), u'Bot: Ändere 1 Seite.') self.assertEqual((i18n.twntranslate('de', 'test-plural', 2) % {'num': 2}), u'Bot: Ändere 2 Seiten.') self.assertEqual((i18n.twntranslate('de', 'test-plural', 3) % {'num': 3}), u'Bot: Ändere 3 Seiten.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 0) % {'num': 'no'}), u'Bot: Changing no pages.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 1) % {'num': 'one'}), u'Bot: Changing one page.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 2) % {'num': 'two'}), u'Bot: Changing two pages.') self.assertEqual((i18n.twntranslate('en', 'test-plural', 3) % {'num': 'three'}), u'Bot: Changing three pages.')
def testString(self): 'Use a string.' self.assertEqual((i18n.twntranslate('en', 'test-plural', '1') % {'num': 'one'}), u'Bot: Changing one page.')
-9,027,707,513,875,948,000
Use a string.
tests/i18n_tests.py
testString
xZise/pywikibot-core
python
def testString(self): self.assertEqual((i18n.twntranslate('en', 'test-plural', '1') % {'num': 'one'}), u'Bot: Changing one page.')
def testDict(self): 'Use a dictionary.' self.assertEqual(i18n.twntranslate('en', 'test-plural', {'num': 2}), u'Bot: Changing 2 pages.')
3,638,274,504,369,245,700
Use a dictionary.
tests/i18n_tests.py
testDict
xZise/pywikibot-core
python
def testDict(self): self.assertEqual(i18n.twntranslate('en', 'test-plural', {'num': 2}), u'Bot: Changing 2 pages.')
def testExtended(self): 'Use additional format strings.' self.assertEqual(i18n.twntranslate('fr', 'test-plural', {'num': 1, 'descr': 'seulement'}), u'Robot: Changer seulement une page.')
8,598,923,295,161,366,000
Use additional format strings.
tests/i18n_tests.py
testExtended
xZise/pywikibot-core
python
def testExtended(self): self.assertEqual(i18n.twntranslate('fr', 'test-plural', {'num': 1, 'descr': 'seulement'}), u'Robot: Changer seulement une page.')
def testExtendedOutside(self): 'Use additional format strings also outside.' self.assertEqual((i18n.twntranslate('fr', 'test-plural', 1) % {'descr': 'seulement'}), u'Robot: Changer seulement une page.')
182,762,766,881,581,250
Use additional format strings also outside.
tests/i18n_tests.py
testExtendedOutside
xZise/pywikibot-core
python
def testExtendedOutside(self): self.assertEqual((i18n.twntranslate('fr', 'test-plural', 1) % {'descr': 'seulement'}), u'Robot: Changer seulement une page.')
def testMultipleWrongParameterLength(self): 'Test wrong parameter length.' with self.assertRaisesRegex(ValueError, 'Length of parameter does not match PLURAL occurrences'): self.assertEqual((i18n.twntranslate('de', 'test-multiple-plurals', (1, 2)) % {'action': u'Ändere', 'line': u'drei'}), u'Bot: Ändere drei Zeilen von mehreren Seiten.') with self.assertRaisesRegex(ValueError, 'Length of parameter does not match PLURAL occurrences'): self.assertEqual((i18n.twntranslate('de', 'test-multiple-plurals', ['321']) % {'action': u'Ändere', 'line': u'dreihunderteinundzwanzig'}), u'Bot: Ändere dreihunderteinundzwanzig Zeilen von mehreren Seiten.')
3,387,738,654,253,292,000
Test wrong parameter length.
tests/i18n_tests.py
testMultipleWrongParameterLength
xZise/pywikibot-core
python
def testMultipleWrongParameterLength(self): with self.assertRaisesRegex(ValueError, 'Length of parameter does not match PLURAL occurrences'): self.assertEqual((i18n.twntranslate('de', 'test-multiple-plurals', (1, 2)) % {'action': u'Ändere', 'line': u'drei'}), u'Bot: Ändere drei Zeilen von mehreren Seiten.') with self.assertRaisesRegex(ValueError, 'Length of parameter does not match PLURAL occurrences'): self.assertEqual((i18n.twntranslate('de', 'test-multiple-plurals', ['321']) % {'action': u'Ändere', 'line': u'dreihunderteinundzwanzig'}), u'Bot: Ändere dreihunderteinundzwanzig Zeilen von mehreren Seiten.')
def testMultipleNonNumbers(self): 'Test error handling for multiple non-numbers.' with self.assertRaisesRegex(ValueError, "invalid literal for int\\(\\) with base 10: 'drei'"): self.assertEqual((i18n.twntranslate('de', 'test-multiple-plurals', ['drei', '1', 1]) % {'action': u'Ändere', 'line': u'drei'}), u'Bot: Ändere drei Zeilen von einer Seite.') with self.assertRaisesRegex(ValueError, "invalid literal for int\\(\\) with base 10: 'elf'"): self.assertEqual(i18n.twntranslate('de', 'test-multiple-plurals', {'action': u'Ändere', 'line': 'elf', 'page': 2}), u'Bot: Ändere elf Zeilen von mehreren Seiten.')
-1,921,360,065,697,760,500
Test error handling for multiple non-numbers.
tests/i18n_tests.py
testMultipleNonNumbers
xZise/pywikibot-core
python
def testMultipleNonNumbers(self): with self.assertRaisesRegex(ValueError, "invalid literal for int\\(\\) with base 10: 'drei'"): self.assertEqual((i18n.twntranslate('de', 'test-multiple-plurals', ['drei', '1', 1]) % {'action': u'Ändere', 'line': u'drei'}), u'Bot: Ändere drei Zeilen von einer Seite.') with self.assertRaisesRegex(ValueError, "invalid literal for int\\(\\) with base 10: 'elf'"): self.assertEqual(i18n.twntranslate('de', 'test-multiple-plurals', {'action': u'Ändere', 'line': 'elf', 'page': 2}), u'Bot: Ändere elf Zeilen von mehreren Seiten.')
def test_fallback_lang(self): "\n Test that twntranslate uses the translation's language.\n\n twntranslate calls _twtranslate which might return the translation for\n a different language and then the plural rules from that language need\n to be applied.\n " assert ('co' not in plural.plural_rules) assert (plural.plural_rules['fr']['plural'](0) is False) self.assertEqual(i18n.twntranslate('co', 'test-plural', {'num': 0, 'descr': 'seulement'}), u'Robot: Changer seulement une page.') self.assertEqual(i18n.twntranslate('co', 'test-plural', {'num': 1, 'descr': 'seulement'}), u'Robot: Changer seulement une page.')
767,523,222,211,064,200
Test that twntranslate uses the translation's language. twntranslate calls _twtranslate which might return the translation for a different language and then the plural rules from that language need to be applied.
tests/i18n_tests.py
test_fallback_lang
xZise/pywikibot-core
python
def test_fallback_lang(self): "\n Test that twntranslate uses the translation's language.\n\n twntranslate calls _twtranslate which might return the translation for\n a different language and then the plural rules from that language need\n to be applied.\n " assert ('co' not in plural.plural_rules) assert (plural.plural_rules['fr']['plural'](0) is False) self.assertEqual(i18n.twntranslate('co', 'test-plural', {'num': 0, 'descr': 'seulement'}), u'Robot: Changer seulement une page.') self.assertEqual(i18n.twntranslate('co', 'test-plural', {'num': 1, 'descr': 'seulement'}), u'Robot: Changer seulement une page.')
def test_basic(self): 'Verify that real messages are able to be loaded.' self.assertEqual(i18n.twntranslate('en', 'pywikibot-enter-new-text'), 'Please enter the new text:')
-1,621,876,870,489,097,700
Verify that real messages are able to be loaded.
tests/i18n_tests.py
test_basic
xZise/pywikibot-core
python
def test_basic(self): self.assertEqual(i18n.twntranslate('en', 'pywikibot-enter-new-text'), 'Please enter the new text:')
def test_missing(self): 'Test a missing message from a real message bundle.' self.assertRaises(i18n.TranslationError, i18n.twntranslate, 'en', 'pywikibot-missing-key')
5,642,107,501,364,760,000
Test a missing message from a real message bundle.
tests/i18n_tests.py
test_missing
xZise/pywikibot-core
python
def test_missing(self): self.assertRaises(i18n.TranslationError, i18n.twntranslate, 'en', 'pywikibot-missing-key')
def test_pagegen_i18n_input(self): 'Test i18n.input via .' result = self._execute(args=['listpages', '-cat'], data_in='non-existant-category\n', timeout=5) self.assertIn('Please enter the category name:', result['stderr'])
2,966,842,142,429,970,000
Test i18n.input via .
tests/i18n_tests.py
test_pagegen_i18n_input
xZise/pywikibot-core
python
def test_pagegen_i18n_input(self): result = self._execute(args=['listpages', '-cat'], data_in='non-existant-category\n', timeout=5) self.assertIn('Please enter the category name:', result['stderr'])
def test_pagegen_i18n_input(self): 'Test i18n.input falls back with missing message package.' rv = i18n.input('pywikibot-enter-category-name', fallback_prompt='dummy output') self.assertEqual(rv, 'dummy input') self.assertIn('dummy output: ', self.output_text)
1,770,211,232,122,889,700
Test i18n.input falls back with missing message package.
tests/i18n_tests.py
test_pagegen_i18n_input
xZise/pywikibot-core
python
def test_pagegen_i18n_input(self): rv = i18n.input('pywikibot-enter-category-name', fallback_prompt='dummy output') self.assertEqual(rv, 'dummy input') self.assertIn('dummy output: ', self.output_text)
def __init__(self, destination): '\n :param destination:\n ' self.__destination = destination
5,229,528,130,363,022,000
:param destination:
pv_simulator/CSVFileWriter.py
__init__
Shifuddin/PV-Simulator-Challenge
python
def __init__(self, destination): '\n \n ' self.__destination = destination
async def write(self, timestamp: datetime, meter_power_value: int, simulator_power_value: int, combined_power_value: int) -> None: '\n Writes values into a csv file\n :param timestamp:\n :param meter_power_value:\n :param simulator_power_value:\n :param combined_power_value:\n :return:\n ' async with aiofiles.open(self.__destination, mode='a') as csv_file: csv_file_writer = AsyncWriter(csv_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) (await csv_file_writer.writerow([datetime.now(), meter_power_value, simulator_power_value, combined_power_value])) logging.debug('%s, %s, %s, %s are writen to %s', datetime.now(), meter_power_value, simulator_power_value, combined_power_value, self.__destination)
5,507,098,978,224,734,000
Writes values into a csv file :param timestamp: :param meter_power_value: :param simulator_power_value: :param combined_power_value: :return:
pv_simulator/CSVFileWriter.py
write
Shifuddin/PV-Simulator-Challenge
python
async def write(self, timestamp: datetime, meter_power_value: int, simulator_power_value: int, combined_power_value: int) -> None: '\n Writes values into a csv file\n :param timestamp:\n :param meter_power_value:\n :param simulator_power_value:\n :param combined_power_value:\n :return:\n ' async with aiofiles.open(self.__destination, mode='a') as csv_file: csv_file_writer = AsyncWriter(csv_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) (await csv_file_writer.writerow([datetime.now(), meter_power_value, simulator_power_value, combined_power_value])) logging.debug('%s, %s, %s, %s are writen to %s', datetime.now(), meter_power_value, simulator_power_value, combined_power_value, self.__destination)
def print_path(path): 'Print path.' txt = path if os.path.isdir(path): txt += '/' if os.path.islink(path): txt += (' -> ' + os.readlink(path)) print(txt)
2,459,191,363,358,521,300
Print path.
tests/integration/test_data_finder.py
print_path
Peter9192/ESMValCore
python
def print_path(path): txt = path if os.path.isdir(path): txt += '/' if os.path.islink(path): txt += (' -> ' + os.readlink(path)) print(txt)
def tree(path): 'Print path, similar to the the `tree` command.' print_path(path) for (dirpath, dirnames, filenames) in os.walk(path): for dirname in dirnames: print_path(os.path.join(dirpath, dirname)) for filename in filenames: print_path(os.path.join(dirpath, filename))
-3,290,092,247,751,938,000
Print path, similar to the the `tree` command.
tests/integration/test_data_finder.py
tree
Peter9192/ESMValCore
python
def tree(path): print_path(path) for (dirpath, dirnames, filenames) in os.walk(path): for dirname in dirnames: print_path(os.path.join(dirpath, dirname)) for filename in filenames: print_path(os.path.join(dirpath, filename))
def create_file(filename): 'Create an empty file.' dirname = os.path.dirname(filename) if (not os.path.exists(dirname)): os.makedirs(dirname) with open(filename, 'a'): pass
5,985,060,040,766,657,000
Create an empty file.
tests/integration/test_data_finder.py
create_file
Peter9192/ESMValCore
python
def create_file(filename): dirname = os.path.dirname(filename) if (not os.path.exists(dirname)): os.makedirs(dirname) with open(filename, 'a'): pass
def create_tree(path, filenames=None, symlinks=None): 'Create directory structure and files.' for filename in (filenames or []): create_file(os.path.join(path, filename)) for symlink in (symlinks or []): link_name = os.path.join(path, symlink['link_name']) os.symlink(symlink['target'], link_name)
9,095,353,352,991,750,000
Create directory structure and files.
tests/integration/test_data_finder.py
create_tree
Peter9192/ESMValCore
python
def create_tree(path, filenames=None, symlinks=None): for filename in (filenames or []): create_file(os.path.join(path, filename)) for symlink in (symlinks or []): link_name = os.path.join(path, symlink['link_name']) os.symlink(symlink['target'], link_name)
@pytest.mark.parametrize('cfg', CONFIG['get_output_file']) def test_get_output_file(cfg): 'Test getting output name for preprocessed files.' output_file = get_output_file(cfg['variable'], cfg['preproc_dir']) assert (output_file == cfg['output_file'])
-7,210,884,641,818,079,000
Test getting output name for preprocessed files.
tests/integration/test_data_finder.py
test_get_output_file
Peter9192/ESMValCore
python
@pytest.mark.parametrize('cfg', CONFIG['get_output_file']) def test_get_output_file(cfg): output_file = get_output_file(cfg['variable'], cfg['preproc_dir']) assert (output_file == cfg['output_file'])