desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'leave function: check function\'s locals are consumed'
def leave_functiondef(self, node):
not_consumed = self._to_consume.pop()[0] if (not (self.linter.is_message_enabled('unused-variable') or self.linter.is_message_enabled('unused-argument'))): return if utils.is_error(node): return is_method = node.is_method() if (is_method and node.is_abstract()): return global_names = _flattened_scope_names(node.nodes_of_class(astroid.Global)) nonlocal_names = _flattened_scope_names(node.nodes_of_class(astroid.Nonlocal)) for (name, stmts) in six.iteritems(not_consumed): self._check_is_unused(name, node, stmts[0], global_names, nonlocal_names)
'check names imported exists in the global scope'
@utils.check_messages('global-variable-undefined', 'global-variable-not-assigned', 'global-statement', 'global-at-module-level', 'redefined-builtin') def visit_global(self, node):
frame = node.frame() if isinstance(frame, astroid.Module): self.add_message('global-at-module-level', node=node) return module = frame.root() default_message = True for name in node.names: try: assign_nodes = module.getattr(name) except astroid.NotFoundError: assign_nodes = [] if (not assign_nodes): self.add_message('global-variable-not-assigned', args=name, node=node) default_message = False continue for anode in assign_nodes: if (isinstance(anode, astroid.AssignName) and (anode.name in module.special_attributes)): self.add_message('redefined-builtin', args=name, node=node) break if (anode.frame() is module): break else: self.add_message('global-variable-undefined', args=name, node=node) default_message = False if default_message: self.add_message('global-statement', node=node)
'check that a name is defined if the current scope and doesn\'t redefine a built-in'
@utils.check_messages(*MSGS.keys()) def visit_name(self, node):
stmt = node.statement() if (stmt.fromlineno is None): assert (not stmt.root().file.endswith('.py')) return name = node.name frame = stmt.scope() if (utils.is_func_default(node) or utils.is_func_decorator(node) or utils.is_ancestor_name(frame, node)): start_index = (len(self._to_consume) - 2) else: start_index = (len(self._to_consume) - 1) base_scope_type = self._to_consume[start_index][(-1)] for i in range(start_index, (-1), (-1)): (to_consume, consumed, scope_type) = self._to_consume[i] if ((scope_type == 'class') and (i != start_index) and (not ((base_scope_type == 'comprehension') and (i == (start_index - 1))))): if self._ignore_class_scope(node, name, frame): continue if (name in consumed): defnode = utils.assign_parent(consumed[name][0]) self._check_late_binding_closure(node, defnode) self._loopvar_name(node, name) break found_node = self._next_to_consume(node, name, to_consume) if (found_node is None): continue defnode = utils.assign_parent(to_consume[name][0]) if (defnode is not None): self._check_late_binding_closure(node, defnode) defstmt = defnode.statement() defframe = defstmt.frame() recursive_klass = ((frame is defframe) and defframe.parent_of(node) and isinstance(defframe, astroid.ClassDef) and (node.name == defframe.name)) (maybee0601, annotation_return, use_outer_definition) = self._is_variable_violation(node, name, defnode, stmt, defstmt, frame, defframe, base_scope_type, recursive_klass) if use_outer_definition: continue if (maybee0601 and (not utils.is_defined_before(node)) and (not astroid.are_exclusive(stmt, defstmt, ('NameError',)))): defined_by_stmt = ((defstmt is stmt) and isinstance(node, (astroid.DelName, astroid.AssignName))) if (recursive_klass or defined_by_stmt or annotation_return or isinstance(defstmt, astroid.Delete)): if (not utils.node_ignores_exception(node, NameError)): self.add_message('undefined-variable', args=name, node=node) elif (base_scope_type != 'lambda'): self.add_message('used-before-assignment', args=name, node=node) elif (base_scope_type == 'lambda'): if (isinstance(frame, astroid.ClassDef) and (name in frame.locals)): if isinstance(node.parent, astroid.Arguments): if (stmt.fromlineno <= defstmt.fromlineno): self.add_message('used-before-assignment', args=name, node=node) else: self.add_message('undefined-variable', args=name, node=node) elif (scope_type == 'lambda'): self.add_message('undefined-variable', node=node, args=name) consumed[name] = found_node del to_consume[name] self._loopvar_name(node, name) break else: if (not ((name in astroid.Module.scope_attrs) or utils.is_builtin(name) or (name in self.config.additional_builtins))): if (not utils.node_ignores_exception(node, NameError)): self.add_message('undefined-variable', args=name, node=node)
'check modules attribute accesses'
@utils.check_messages('no-name-in-module') def visit_import(self, node):
if ((not self._analyse_fallback_blocks) and utils.is_from_fallback_block(node)): return for (name, _) in node.names: parts = name.split('.') try: module = next(node.infer_name_module(parts[0])) except astroid.ResolveError: continue self._check_module_attrs(node, module, parts[1:])
'check modules attribute accesses'
@utils.check_messages('no-name-in-module') def visit_importfrom(self, node):
if ((not self._analyse_fallback_blocks) and utils.is_from_fallback_block(node)): return name_parts = node.modname.split('.') try: module = node.do_import_module(name_parts[0]) except astroid.AstroidBuildingException: return module = self._check_module_attrs(node, module, name_parts[1:]) if (not module): return for (name, _) in node.names: if (name == '*'): continue self._check_module_attrs(node, module, name.split('.'))
'Check unbalanced tuple unpacking for assignments and unpacking non-sequences.'
@utils.check_messages('unbalanced-tuple-unpacking', 'unpacking-non-sequence') def visit_assign(self, node):
if (not isinstance(node.targets[0], (astroid.Tuple, astroid.List))): return targets = node.targets[0].itered() try: infered = utils.safe_infer(node.value) if (infered is not None): self._check_unpacking(infered, node, targets) except astroid.InferenceError: return
'Check for unbalanced tuple unpacking and unpacking non sequences.'
def _check_unpacking(self, infered, node, targets):
if utils.is_inside_abstract_class(node): return if utils.is_comprehension(node): return if (infered is astroid.YES): return if (isinstance(infered.parent, astroid.Arguments) and isinstance(node.value, astroid.Name) and (node.value.name == infered.parent.vararg)): return if isinstance(infered, (astroid.Tuple, astroid.List)): values = infered.itered() if (len(targets) != len(values)): if any((isinstance(target, astroid.Starred) for target in targets)): return self.add_message('unbalanced-tuple-unpacking', node=node, args=(_get_unpacking_extra_info(node, infered), len(targets), len(values))) elif (not utils.is_iterable(infered)): self.add_message('unpacking-non-sequence', node=node, args=(_get_unpacking_extra_info(node, infered),))
'check that module_names (list of string) are accessible through the given module if the latest access name corresponds to a module, return it'
def _check_module_attrs(self, node, module, module_names):
assert isinstance(module, astroid.Module), module while module_names: name = module_names.pop(0) if (name == '__dict__'): module = None break try: module = next(module.getattr(name)[0].infer()) if (module is astroid.YES): return None except astroid.NotFoundError: if (module.name in self._ignored_modules): return None self.add_message('no-name-in-module', args=(name, module.name), node=node) return None except astroid.InferenceError: return None if module_names: modname = (module.name if module else '__dict__') self.add_message('no-name-in-module', node=node, args=('.'.join(module_names), modname)) return None if isinstance(module, astroid.Module): return module return None
'visit dictcomp: update consumption analysis variable'
def visit_listcomp(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
'leave dictcomp: update consumption analysis variable'
def leave_listcomp(self, _):
self._to_consume.pop()
'Update consumption analysis for metaclasses.'
def _check_metaclasses(self, node):
consumed = [] for child_node in node.get_children(): if isinstance(child_node, astroid.ClassDef): consumed.extend(self._check_classdef_metaclasses(child_node, node)) for (scope_locals, name) in consumed: scope_locals.pop(name, None)
'Called for function and method definitions (def). :param node: Node for a function or method definition in the AST :type node: :class:`astroid.scoped_nodes.Function`'
def visit_functiondef(self, node):
node_doc = utils.docstringify(node.doc) self.check_functiondef_params(node, node_doc) self.check_functiondef_returns(node, node_doc) self.check_functiondef_yields(node, node_doc)
'Check that all parameters in a function, method or class constructor on the one hand and the parameters mentioned in the parameter documentation (e.g. the Sphinx tags \'param\' and \'type\') on the other hand are consistent with each other. * Undocumented parameters except \'self\' are noticed. * Undocumented parameter types except for \'self\' and the ``*<args>`` and ``**<kwargs>`` parameters are noticed. * Parameters mentioned in the parameter documentation that don\'t or no longer exist in the function parameter list are noticed. * If the text "For the parameters, see" or "For the other parameters, see" (ignoring additional whitespace) is mentioned in the docstring, missing parameter documentation is tolerated. * If there\'s no Sphinx style, Google style or NumPy style parameter documentation at all, i.e. ``:param`` is never mentioned etc., the checker assumes that the parameters are documented in another format and the absence is tolerated. :param doc: Docstring for the function, method or class. :type doc: str :param arguments_node: Arguments node for the function, method or class constructor. :type arguments_node: :class:`astroid.scoped_nodes.Arguments` :param warning_node: The node to assign the warnings to :type warning_node: :class:`astroid.scoped_nodes.Node` :param accept_no_param_doc: Whether or not to allow no parameters to be documented. If None then this value is read from the configuration. :type accept_no_param_doc: bool or None'
def check_arguments_in_docstring(self, doc, arguments_node, warning_node, accept_no_param_doc=None):
if (not doc.doc): return if (accept_no_param_doc is None): accept_no_param_doc = self.config.accept_no_param_doc tolerate_missing_params = doc.params_documented_elsewhere() expected_argument_names = set((arg.name for arg in arguments_node.args)) expected_argument_names.update((arg.name for arg in arguments_node.kwonlyargs)) not_needed_type_in_docstring = self.not_needed_param_in_docstring.copy() if (arguments_node.vararg is not None): expected_argument_names.add(arguments_node.vararg) not_needed_type_in_docstring.add(arguments_node.vararg) if (arguments_node.kwarg is not None): expected_argument_names.add(arguments_node.kwarg) not_needed_type_in_docstring.add(arguments_node.kwarg) (params_with_doc, params_with_type) = doc.match_param_docs() if ((not params_with_doc) and (not params_with_type) and accept_no_param_doc): tolerate_missing_params = True def _compare_missing_args(found_argument_names, message_id, not_needed_names): 'Compare the found argument names with the expected ones and\n generate a message if there are arguments missing.\n\n :param set found_argument_names: argument names found in the\n docstring\n\n :param str message_id: pylint message id\n\n :param not_needed_names: names that may be omitted\n :type not_needed_names: set of str\n ' if (not tolerate_missing_params): missing_argument_names = ((expected_argument_names - found_argument_names) - not_needed_names) if missing_argument_names: self.add_message(message_id, args=(', '.join(sorted(missing_argument_names)),), node=warning_node) def _compare_different_args(found_argument_names, message_id, not_needed_names): 'Compare the found argument names with the expected ones and\n generate a message if there are extra arguments found.\n\n :param set found_argument_names: argument names found in the\n docstring\n\n :param str message_id: pylint message id\n\n :param not_needed_names: names that may be omitted\n :type not_needed_names: set of str\n ' differing_argument_names = (((expected_argument_names ^ found_argument_names) - not_needed_names) - expected_argument_names) if differing_argument_names: self.add_message(message_id, args=(', '.join(sorted(differing_argument_names)),), node=warning_node) _compare_missing_args(params_with_doc, 'missing-param-doc', self.not_needed_param_in_docstring) _compare_missing_args(params_with_type, 'missing-type-doc', not_needed_type_in_docstring) _compare_different_args(params_with_doc, 'differing-param-doc', self.not_needed_param_in_docstring) _compare_different_args(params_with_type, 'differing-type-doc', not_needed_type_in_docstring)
'Adds a message on :param:`node` for the missing exception type. :param missing_excs: A list of missing exception types. :type missing_excs: list :param node: The node show the message on. :type node: astroid.node_classes.NodeNG'
def _add_raise_message(self, missing_excs, node):
if (not missing_excs): return self.add_message('missing-raises-doc', args=(', '.join(sorted(missing_excs)),), node=node)
'check for empty except'
@utils.check_messages('overlapping-except') def visit_tryexcept(self, node):
for handler in node.handlers: if (handler.type is None): continue if isinstance(handler.type, astroid.BoolOp): continue try: excs = list(_annotated_unpack_infer(handler.type)) except astroid.InferenceError: continue handled_in_clause = [] for (part, exc) in excs: if (exc is astroid.YES): continue if (isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(exc)): exc = exc._proxied if (not isinstance(exc, astroid.ClassDef)): continue exc_ancestors = [anc for anc in exc.ancestors() if isinstance(anc, astroid.ClassDef)] for (prev_part, prev_exc) in handled_in_clause: prev_exc_ancestors = [anc for anc in prev_exc.ancestors() if isinstance(anc, astroid.ClassDef)] if (exc == prev_exc): self.add_message('overlapping-except', node=handler.type, args=('%s and %s are the same' % (prev_part.as_string(), part.as_string()))) elif ((prev_exc in exc_ancestors) or (exc in prev_exc_ancestors)): ancestor = (part if (exc in prev_exc_ancestors) else prev_part) descendant = (part if (prev_exc in exc_ancestors) else prev_part) self.add_message('overlapping-except', node=handler.type, args=('%s is an ancestor class of %s' % (ancestor.as_string(), descendant.as_string()))) handled_in_clause += [(part, exc)]
'create the subgraphs representing any `if` and `for` statements'
def _subgraph(self, node, name, extra_blocks=()):
if (self.graph is None): self.graph = PathGraph(node) self._subgraph_parse(node, node, extra_blocks) self.graphs[('%s%s' % (self.classname, name))] = self.graph self.reset() else: self._append_node(node) self._subgraph_parse(node, node, extra_blocks)
'parse the body and any `else` block of `if` and `for` statements'
def _subgraph_parse(self, node, pathnode, extra_blocks):
loose_ends = [] self.tail = node self.dispatch_list(node.body) loose_ends.append(self.tail) for extra in extra_blocks: self.tail = node self.dispatch_list(extra.body) loose_ends.append(self.tail) if node.orelse: self.tail = node self.dispatch_list(node.orelse) loose_ends.append(self.tail) else: loose_ends.append(node) if node: bottom = ('%s' % self._bottom_counter) self._bottom_counter += 1 for le in loose_ends: self.graph.connect(le, bottom) self.tail = bottom
'visit an astroid.Module node to check too complex rating and add message if is greather than max_complexity stored from options'
@check_messages('too-complex') def visit_module(self, node):
visitor = PathGraphingAstVisitor() for child in node.body: visitor.preorder(child, visitor) for graph in visitor.graphs.values(): complexity = graph.complexity() node = graph.root if hasattr(node, 'name'): node_name = ("'%s'" % node.name) else: node_name = ("This '%s'" % node.__class__.__name__.lower()) if (complexity <= self.config.max_complexity): continue self.add_message('too-complex', node=node, confidence=HIGH, args=(node_name, complexity))
'take a list of module names which are pylint plugins and load and register them'
def load_plugin_modules(self, modnames):
for modname in modnames: if (modname in self._dynamic_plugins): continue self._dynamic_plugins.add(modname) module = modutils.load_module_from_name(modname) module.register(self)
'set the reporter used to display messages and reports'
def set_reporter(self, reporter):
self.reporter = reporter reporter.linter = self
'overridden from config.OptionsProviderMixin to handle some special options'
def set_option(self, optname, value, action=None, optdict=None):
if ((optname in self._options_methods) or (optname in self._bw_options_methods)): if value: try: meth = self._options_methods[optname] except KeyError: meth = self._bw_options_methods[optname] warnings.warn(('%s is deprecated, replace it by %s' % (optname, optname.split('-')[0])), DeprecationWarning) value = utils._check_csv(value) if isinstance(value, (list, tuple)): for _id in value: meth(_id, ignore_unknown=True) else: meth(value) return elif (optname == 'output-format'): self._reporter_name = value if self._reporters: self._load_reporter() try: checkers.BaseTokenChecker.set_option(self, optname, value, action, optdict) except config.UnsupportedAction: print(("option %s can't be read from config file" % optname), file=sys.stderr)
'register a new checker checker is an object implementing IRawChecker or / and IAstroidChecker'
def register_checker(self, checker):
assert (checker.priority <= 0), "checker priority can't be >= 0" self._checkers[checker.name].append(checker) for (r_id, r_title, r_cb) in checker.reports: self.register_report(r_id, r_title, r_cb, checker) self.register_options_provider(checker) if hasattr(checker, 'msgs'): self.msgs_store.register_messages(checker) checker.load_defaults() if (not getattr(checker, 'enabled', True)): self.disable(checker.name)
'disable all reporters'
def disable_reporters(self):
for _reporters in six.itervalues(self._reports): for (report_id, _, _) in _reporters: self.disable_report(report_id)
'error mode: enable only errors; no reports, no persistent'
def error_mode(self):
self._error_mode = True self.disable_noerror_messages() self.disable('miscellaneous') if self._python3_porting_mode: self.disable('all') for msg_id in self._checker_messages('python3'): if msg_id.startswith('E'): self.enable(msg_id) else: self.disable('python3') self.set_option('reports', False) self.set_option('persistent', False) self.set_option('score', False)
'Disable all other checkers and enable Python 3 warnings.'
def python3_porting_mode(self):
self.disable('all') self.enable('python3') if self._error_mode: for msg_id in self._checker_messages('python3'): if msg_id.startswith('E'): self.enable(msg_id) else: self.disable(msg_id) self._python3_porting_mode = True
'process tokens from the current module to search for module/block level options'
def process_tokens(self, tokens):
control_pragmas = {'disable', 'enable'} for (tok_type, content, start, _, _) in tokens: if (tok_type != tokenize.COMMENT): continue match = utils.OPTION_RGX.search(content) if (match is None): continue if ((match.group(1).strip() == 'disable-all') or (match.group(1).strip() == 'skip-file')): if (match.group(1).strip() == 'disable-all'): self.add_message('deprecated-pragma', line=start[0], args=('disable-all', 'skip-file')) self.add_message('file-ignored', line=start[0]) self._ignore_file = True return try: (opt, value) = match.group(1).split('=', 1) except ValueError: self.add_message('bad-inline-option', args=match.group(1).strip(), line=start[0]) continue opt = opt.strip() if ((opt in self._options_methods) or (opt in self._bw_options_methods)): try: meth = self._options_methods[opt] except KeyError: meth = self._bw_options_methods[opt] self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', ''))) for msgid in utils._splitstrip(value): if (opt in control_pragmas): self._pragma_lineno[msgid] = start[0] try: if ((opt, msgid) == ('disable', 'all')): self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file')) self.add_message('file-ignored', line=start[0]) self._ignore_file = True return meth(msgid, 'module', start[0]) except exceptions.UnknownMessageError: self.add_message('bad-option-value', args=msgid, line=start[0]) else: self.add_message('unrecognized-inline-option', args=opt, line=start[0])
'return all available checkers as a list'
def get_checkers(self):
return ([self] + [c for _checkers in six.itervalues(self._checkers) for c in _checkers if (c is not self)])
'return checkers needed for activated messages and reports'
def prepare_checkers(self):
if (not self.config.reports): self.disable_reporters() neededcheckers = [self] for checker in self.get_checkers()[1:]: messages = set((msg for msg in checker.msgs if self.is_message_enabled(msg))) if (messages or any((self.report_is_enabled(r[0]) for r in checker.reports))): neededcheckers.append(checker) neededcheckers = sorted(neededcheckers, key=operator.attrgetter('priority'), reverse=True) return neededcheckers
'Returns whether or not a module should be checked. This implementation returns True for all python source file, indicating that all files should be linted. Subclasses may override this method to indicate that modules satisfying certain conditions should not be linted. :param str modname: The name of the module to be checked. :param str path: The full path to the source code of the module. :param bool is_argument: Whetter the file is an argument to pylint or not. Files which respect this property are always checked, since the user requested it explicitly. :returns: True if the module should be checked. :rtype: bool'
@staticmethod def should_analyze_file(modname, path, is_argument=False):
if is_argument: return True return path.endswith('.py')
'main checking entry: check a list of files or modules from their name.'
def check(self, files_or_modules):
for msg in self.msgs_store.messages: if (not msg.may_be_emitted()): self._msgs_state[msg.msgid] = False if (not isinstance(files_or_modules, (list, tuple))): files_or_modules = (files_or_modules,) if (self.config.jobs == 1): self._do_check(files_or_modules) else: with _patch_sysmodules(): self._parallel_check(files_or_modules)
'get modules and errors from a list of modules and handle errors'
def expand_files(self, modules):
(result, errors) = utils.expand_modules(modules, self.config.black_list, self.config.black_list_re) for error in errors: message = modname = error['mod'] key = error['key'] self.set_current_module(modname) if (key == 'fatal'): message = str(error['ex']).replace((os.getcwd() + os.sep), '') self.add_message(key, args=message) return result
'set the name of the currently analyzed module and init statistics for it'
def set_current_module(self, modname, filepath=None):
if ((not modname) and (filepath is None)): return self.reporter.on_set_current_module(modname, filepath) self.current_name = modname self.current_file = (filepath or modname) self.stats['by_module'][modname] = {} self.stats['by_module'][modname]['statement'] = 0 for msg_cat in six.itervalues(utils.MSG_TYPES): self.stats['by_module'][modname][msg_cat] = 0
'return a ast(roid) representation for a module'
def get_ast(self, filepath, modname):
try: return MANAGER.ast_from_file(filepath, modname, source=True) except astroid.AstroidSyntaxError as ex: self.add_message('syntax-error', line=getattr(ex.error, 'lineno', 0), args=str(ex.error)) except astroid.AstroidBuildingException as ex: self.add_message('parse-error', args=ex) except Exception as ex: import traceback traceback.print_exc() self.add_message('astroid-error', args=(ex.__class__, ex))
'Check a module from its astroid representation.'
def check_astroid_module(self, ast_node, walker, rawcheckers, tokencheckers):
try: tokens = utils.tokenize_module(ast_node) except tokenize.TokenError as ex: self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0]) return if (not ast_node.pure_python): self.add_message('raw-checker-failed', args=ast_node.name) else: self.process_tokens(tokens) if self._ignore_file: return False self.file_state.collect_block_lines(self.msgs_store, ast_node) for checker in rawcheckers: checker.process_module(ast_node) for checker in tokencheckers: checker.process_tokens(tokens) walker.walk(ast_node) return True
'initialize counters'
def open(self):
self.stats = {'by_module': {}, 'by_msg': {}} MANAGER.always_load_extensions = self.config.unsafe_load_any_extension MANAGER.extension_package_whitelist.update(self.config.extension_pkg_whitelist) for msg_cat in six.itervalues(utils.MSG_TYPES): self.stats[msg_cat] = 0
'close the whole package /module, it\'s time to make reports ! if persistent run, pickle results for later comparison'
def generate_reports(self):
self.reporter.display_messages(report_nodes.Section()) if (self.file_state.base_name is not None): previous_stats = config.load_results(self.file_state.base_name) self.reporter.on_close(self.stats, previous_stats) if self.config.reports: sect = self.make_reports(self.stats, previous_stats) else: sect = report_nodes.Section() if self.config.reports: self.reporter.display_reports(sect) self._report_evaluation() if self.config.persistent: config.save_results(self.stats, self.file_state.base_name) else: self.reporter.on_close(self.stats, {})
'make the global evaluation report'
def _report_evaluation(self):
previous_stats = config.load_results(self.file_state.base_name) if (self.stats['statement'] == 0): return evaluation = self.config.evaluation try: note = eval(evaluation, {}, self.stats) except Exception as ex: msg = ('An exception occurred while rating: %s' % ex) else: self.stats['global_note'] = note msg = ('Your code has been rated at %.2f/10' % note) pnote = previous_stats.get('global_note') if (pnote is not None): msg += (' (previous run: %.2f/10, %+.2f)' % (pnote, (note - pnote))) if self.config.score: sect = report_nodes.EvaluationSection(msg) self.reporter.display_reports(sect)
'callback for option preprocessing (i.e. before option parsing)'
def cb_set_rcfile(self, name, value):
self._rcfile = value
'callback for option preprocessing (i.e. before option parsing)'
def cb_add_plugins(self, name, value):
self._plugins.extend(utils._splitstrip(value))
'error mode: * disable all but error messages * disable the \'miscellaneous\' checker which can be safely deactivated in debug * disable reports * do not save execution information'
def cb_error_mode(self, *args, **kwargs):
self.linter.error_mode()
'optik callback for sample config file generation'
def cb_generate_config(self, *args, **kwargs):
self.linter.generate_config(skipsections=('COMMANDS',)) sys.exit(0)
'optik callback for sample config file generation'
def cb_generate_manpage(self, *args, **kwargs):
from pylint import __pkginfo__ self.linter.generate_manpage(__pkginfo__) sys.exit(0)
'optik callback for printing some help about a particular message'
def cb_help_message(self, option, optname, value, parser):
self.linter.msgs_store.help_message(utils._splitstrip(value)) sys.exit(0)
'optik callback for printing full documentation'
def cb_full_documentation(self, option, optname, value, parser):
self.linter.print_full_documentation() sys.exit(0)
'optik callback for printing available messages'
def cb_list_messages(self, option, optname, value, parser):
self.linter.msgs_store.list_messages() sys.exit(0)
'Activate only the python3 porting checker.'
def cb_python3_porting_mode(self, *args, **kwargs):
self.linter.python3_porting_mode()
'returns self._source'
def get_source(self):
if (self._source is None): self.emit('}\n') self._source = '\n'.join(self.lines) del self.lines return self._source
'Generates a graph file. :param str outputfile: filename and path [defaults to graphname.png] :param str dotfile: filename and path [defaults to graphname.dot] :param str mapfile: filename and path :rtype: str :return: a path to the generated file'
def generate(self, outputfile=None, dotfile=None, mapfile=None):
import subprocess name = self.graphname if (not dotfile): if (outputfile and outputfile.endswith('.dot')): dotfile = outputfile else: dotfile = ('%s.dot' % name) if (outputfile is not None): (storedir, _, target) = target_info_from_filename(outputfile) if (target != 'dot'): (pdot, dot_sourcepath) = tempfile.mkstemp('.dot', name) os.close(pdot) else: dot_sourcepath = osp.join(storedir, dotfile) else: target = 'png' (pdot, dot_sourcepath) = tempfile.mkstemp('.dot', name) (ppng, outputfile) = tempfile.mkstemp('.png', name) os.close(pdot) os.close(ppng) pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8') pdot.write(self.source) pdot.close() if (target != 'dot'): use_shell = (sys.platform == 'win32') if mapfile: subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile], shell=use_shell) else: subprocess.call([self.renderer, '-T', target, dot_sourcepath, '-o', outputfile], shell=use_shell) os.unlink(dot_sourcepath) return outputfile
'Adds <line> to final output.'
def emit(self, line):
self.lines.append(line)
'emit an edge from <name1> to <name2>. edge properties: see http://www.graphviz.org/doc/info/attrs.html'
def emit_edge(self, name1, name2, **props):
attrs = [('%s="%s"' % (prop, value)) for (prop, value) in props.items()] (n_from, n_to) = (normalize_node_id(name1), normalize_node_id(name2)) self.emit(('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs)))))
'emit a node with given properties. node properties: see http://www.graphviz.org/doc/info/attrs.html'
def emit_node(self, name, **props):
attrs = [('%s="%s"' % (prop, value)) for (prop, value) in props.items()] self.emit(('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs)))))
'Init context.'
def __init__(self, code, path):
self.code = code self.path = path self._file = None
'Open a file and read it.'
def __enter__(self):
if (self.code is None): LOGGER.info('File is reading: %s', self.path) if (sys.version_info >= (3,)): mode = 'r' else: mode = 'rU' self._file = open(self.path, mode) self.code = self._file.read() return self
'Close the file which was opened.'
def __exit__(self, t, value, traceback):
if (self._file is not None): self._file.close() if (t and (LOGGER.level == logging.DEBUG)): LOGGER.debug(traceback)
'MCCabe code checking. :return list: List of errors.'
@staticmethod def run(path, code=None, params=None, **meta):
tree = compile(code, path, 'exec', ast.PyCF_ONLY_AST) McCabeChecker.max_complexity = int(params.get('complexity', 10)) return [{'lnum': lineno, 'offset': offset, 'text': text, 'type': McCabeChecker._code} for (lineno, offset, text, _) in McCabeChecker(tree, path).run()]
'Check path is relevant for linter. :return bool:'
@staticmethod def allow(path):
return path.endswith('.py')
'Method \'run\' should be defined.'
@staticmethod def run(path, **meta):
raise NotImplementedError(__doc__)
'Check code with pyflakes. :return list: List of errors.'
@staticmethod def run(path, code=None, params=None, **meta):
import _ast builtins = params.get('builtins', '') if builtins: builtins = builtins.split(',') tree = compile(code, path, 'exec', _ast.PyCF_ONLY_AST) w = checker.Checker(tree, path, builtins=builtins) w.messages = sorted(w.messages, key=(lambda m: m.lineno)) return [{'lnum': m.lineno, 'text': (m.message % m.message_args), 'type': m.message[0]} for m in w.messages]
'Check code with Radon. :return list: List of errors.'
@staticmethod def run(path, code=None, params=None, ignore=None, select=None, **meta):
complexity = params.get('complexity', 10) no_assert = params.get('no_assert', False) show_closures = params.get('show_closures', False) visitor = ComplexityVisitor.from_code(code, no_assert=no_assert) blocks = visitor.blocks if show_closures: blocks = add_inner_blocks(blocks) return [{'lnum': block.lineno, 'col': block.col_offset, 'type': 'R', 'number': 'R709', 'text': ('R701: %s is too complex %d' % (block.name, block.complexity))} for block in visitor.blocks if (block.complexity > complexity)]
'pydocstyle code checking. :return list: List of errors.'
@staticmethod def run(path, code=None, **meta):
check_source_args = ((code, path, None) if THIRD_ARG else (code, path)) return [{'lnum': e.line, 'text': ((e.message[0:4] + e.message[5:]) if (e.message[4] == ':') else e.message), 'type': 'D', 'number': e.code} for e in PyDocChecker().check_source(*check_source_args)]
'Check code with pycodestyle. :return list: List of errors.'
@staticmethod def run(path, code=None, params=None, **meta):
parser = get_parser() for option in parser.option_list: if (option.dest and (option.dest in params)): value = params[option.dest] if (not isinstance(value, str)): continue params[option.dest] = option.convert_value(option, params[option.dest]) P8Style = StyleGuide(reporter=_PycodestyleReport, **params) buf = StringIO(code) return P8Style.input_file(path, lines=buf.readlines())
'Prepare storage for errors.'
def init_file(self, filename, lines, expected, line_offset):
super(_PycodestyleReport, self).init_file(filename, lines, expected, line_offset) self.errors = []
'Save errors.'
def error(self, line_number, offset, text, check):
code = super(_PycodestyleReport, self).error(line_number, offset, text, check) if code: self.errors.append(dict(text=text, type=code.replace('E', 'C'), col=(offset + 1), lnum=line_number))
'Get errors. :return list: List of errors.'
def get_file_results(self):
return self.errors
'Pylint code checking. :return list: List of errors.'
@staticmethod def run(path, code, params=None, ignore=None, select=None, **meta):
logger.debug('Start pylint') clear_cache = params.pop('clear_cache', False) if clear_cache: MANAGER.astroid_cache.clear() class Reporter(BaseReporter, ): def __init__(self): self.errors = [] super(Reporter, self).__init__() def _display(self, layout): pass def handle_message(self, msg): self.errors.append(dict(lnum=msg.line, col=msg.column, text=('%s %s' % (msg.msg_id, msg.msg)), type=msg.msg_id[0])) params = _Params(ignore=ignore, select=select, params=params) logger.debug(params) reporter = Reporter() Run(([path] + params.to_attrs()), reporter=reporter, exit=False) return reporter.errors
'Prepare value to pylint.'
@staticmethod def prepare_value(value):
if isinstance(value, (list, tuple, set)): return ','.join(value) if isinstance(value, bool): return ('y' if value else 'n') return str(value)
'Convert to argument list.'
def to_attrs(self):
return [('--%s=%s' % item) for item in self.params.items()]
'Init Scanner instance. :param patterns: List of token patterns [(token, regexp)] :param ignore: List of ignored tokens'
def __init__(self, source, ignore=None, patterns=None):
self.reset(source) if patterns: self.patterns = [] for (k, r) in patterns: self.patterns.append((k, re.compile(r))) if ignore: self.ignore = ignore
'Reset scanner\'s state. :param source: Source for parsing'
def reset(self, source):
self.tokens = [] self.source = source self.pos = 0
'Scan source and grab tokens.'
def scan(self):
self.pre_scan() token = None end = len(self.source) while (self.pos < end): best_pat = None best_pat_len = 0 for (p, regexp) in self.patterns: m = regexp.match(self.source, self.pos) if m: best_pat = p best_pat_len = len(m.group(0)) break if (best_pat is None): raise SyntaxError(u'SyntaxError[@char {0}: {1}]'.format(self.pos, u'Bad token.')) if (best_pat in self.ignore): self.pos += best_pat_len continue token = (best_pat, self.source[self.pos:(self.pos + best_pat_len)], self.pos, (self.pos + best_pat_len)) self.pos = token[(-1)] self.tokens.append(token)
'Prepare source.'
def pre_scan(self):
pass
'Print the last 5 tokens that have been scanned in. :return str:'
def __repr__(self):
return ((u'<Scanner: ' + u','.join((u'{0}({2}:{3})'.format(*t) for t in self.tokens[(-5):]))) + u'>')
'Prepare string for scanning.'
def pre_scan(self):
escape_re = re.compile(u'\\\\\\n[\\t ]+') self.source = escape_re.sub(u'', self.source)
'Get item by name. :return object: value or None if name not exists'
def get(self, name, default=None):
if (name in self): return self[name] return default
'Iterate self items.'
def iteritems(self, raw=False):
for key in self: (yield (key, self.__getitem__(key, raw=raw)))
'Return default section or empty dict. :return :class:`inirama.Section`: section'
@property def default(self):
return self.sections.get(self.default_section, dict())
'Read and parse INI files. :param *files: Files for reading :param **params: Params for parsing Set `update=False` for prevent values redefinition.'
def read(self, *files, **params):
for f in files: try: with io.open(f, encoding=u'utf-8') as ff: NS_LOGGER.info(u'Read from `{0}`'.format(ff.name)) self.parse(ff.read(), **params) except (IOError, TypeError, SyntaxError, io.UnsupportedOperation): if (not self.silent_read): NS_LOGGER.error(u'Reading error `{0}`'.format(ff.name)) raise
'Write namespace as INI file. :param f: File object or path to file.'
def write(self, f):
if isinstance(f, str): f = io.open(f, u'w', encoding=u'utf-8') if (not hasattr(f, u'read')): raise AttributeError(u'Wrong type of file: {0}'.format(type(f))) NS_LOGGER.info(u'Write to `{0}`'.format(f.name)) for section in self.sections.keys(): f.write(u'[{0}]\n'.format(section)) for (k, v) in self[section].items(): f.write(u'{0:15}= {1}\n'.format(k, v)) f.write(u'\n') f.close()
'Parse INI source as string. :param source: Source of INI :param update: Replace already defined items'
def parse(self, source, update=True, **params):
scanner = INIScanner(source) scanner.scan() section = self.default_section name = None for token in scanner.tokens: if (token[0] == u'KEY_VALUE'): (name, value) = re.split(u'[=:]', token[1], 1) (name, value) = (name.strip(), value.strip()) if ((not update) and (name in self[section])): continue self[section][name] = value elif (token[0] == u'SECTION'): section = token[1].strip(u'[]') elif (token[0] == u'CONTINUATION'): if (not name): raise SyntaxError(u'SyntaxError[@char {0}: {1}]'.format(token[2], u'Bad continuation.')) self[section][name] += (u'\n' + token[1].strip())
'Look name in self sections. :return :class:`inirama.Section`: section'
def __getitem__(self, name):
if (name not in self.sections): self.sections[name] = self.section_type(self) return self.sections[name]
'Init error information with default values.'
def __init__(self, linter='', col=1, lnum=1, type='E', text='unknown error', filename='', number='', **kwargs):
text = ' '.join(str(text).strip().split('\n')) if linter: text = ('%s [%s]' % (text, linter)) number = (number or text.split(' ', 1)[0]) if (not PATTERN_NUMBER.match(number)): number = '' self._info = dict(linter=linter, col=col, lnum=lnum, type=type[:1], text=text, filename=filename, number=number)
'Implement dictionary `get` method.'
def get(self, name, default=None):
return self._info.get(name, default)
'Init worker.'
def __init__(self, path_queue, result_queue):
threading.Thread.__init__(self) self.path_queue = path_queue self.result_queue = result_queue
'Run tasks from queue.'
def run(self):
while True: (path, params) = self.path_queue.get() errors = run(path, **params) self.result_queue.put(errors) self.path_queue.task_done()
'Init VIM environment.'
def __init__(self):
self.current = vim.current self.options = dict(encoding=vim.eval('&enc')) self.options['debug'] = self.var('g:pymode_debug', True)
'Return current working directory.'
@property def curdir(self):
return self.var('getcwd()')
'Return current buffer.'
@property def curbuf(self):
return self.current.buffer
'Return current window position. :return tuple: (row, col)'
@property def cursor(self):
return self.current.window.cursor
'Return source of current buffer.'
@property def source(self):
return '\n'.join(self.lines)
'Iterate by lines in current file. :return list:'
@property def lines(self):
if (not PY2): return self.curbuf return [l.decode(self.options.get('encoding')) for l in self.curbuf]
'Get vim variable. :return vimobj:'
@staticmethod def var(name, to_bool=False, silence=False, default=None):
try: value = vim.eval(name) except vim.error: if silence: return default raise if to_bool: try: value = bool(int(value)) except ValueError: value = value return value
'Show message to user. :return: :None'
@staticmethod def message(msg, history=False):
if history: return vim.command(('echom "%s"' % str(msg))) return vim.command(('call pymode#wide_message("%s")' % str(msg)))
'Return user input or default. :return str:'
def user_input(self, msg, default=''):
msg = ('%s %s ' % (self.prefix, msg)) if (default != ''): msg += ('[%s] ' % default) try: vim.command('echohl Debug') input_str = vim.eval(('input("%s> ")' % msg)) vim.command('echohl none') except KeyboardInterrupt: input_str = '' return (input_str or default)
'Get user confirmation. :return bool:'
def user_confirm(self, msg, yes=False):
default = ('yes' if yes else 'no') action = self.user_input(msg, default) return (action and 'yes'.startswith(action))
'Get one of many options. :return str: A choosen option'
def user_input_choices(self, msg, *options):
choices = [('%s %s' % (self.prefix, msg))] choices += [('%s. %s' % (num, opt)) for (num, opt) in enumerate(options, 1)] try: input_str = int(vim.eval(('inputlist(%s)' % self.prepare_value(choices)))) except (KeyboardInterrupt, ValueError): input_str = 0 if (not input_str): self.message('Cancelled!') return False try: return options[(input_str - 1)] except (IndexError, ValueError): self.error(('Invalid option: %s' % input_str)) return self.user_input_choices(msg, *options)
'Show error to user.'
@staticmethod def error(msg):
vim.command(('call pymode#error("%s")' % str(msg)))
'Print debug information.'
def debug(self, msg, *args):
if self.options.get('debug'): print(('%s %s [%s]' % (int(time.time()), msg, ', '.join([str(a) for a in args]))))
'Break Vim function.'
def stop(self, value=None):
cmd = 'return' if (value is not None): cmd += (' ' + self.prepare_value(value)) vim.command(cmd)
'Decorator. Make execution more silence. :return func:'
def catch_exceptions(self, func):
def _wrapper(*args, **kwargs): try: return func(*args, **kwargs) except (Exception, vim.error) as e: if self.options.get('debug'): raise self.error(e) return None return _wrapper
'Run vim function.'
def run(self, name, *args):
vim.command(('call %s(%s)' % (name, ', '.join([self.prepare_value(a) for a in args]))))