desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'check number of public methods'
@check_messages('too-few-public-methods', 'too-many-public-methods') def leave_classdef(self, node):
my_methods = sum((1 for method in node.mymethods() if (not method.name.startswith('_')))) all_methods = sum((1 for method in node.methods() if (not method.name.startswith('_')))) if (my_methods > self.config.max_public_methods): self.add_message('too-many-public-methods', node=node, args=(my_methods, self.config.max_public_methods)) if (node.type != 'class'): return if (all_methods < self.config.min_public_methods): self.add_message('too-few-public-methods', node=node, args=(all_methods, self.config.min_public_methods))
'check function name, docstring, arguments, redefinition, variable names, max locals'
@check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements') def visit_functiondef(self, node):
self._returns.append(0) args = node.args.args ignored_argument_names = self._ignored_argument_names if (args is not None): ignored_args_num = 0 if ignored_argument_names: ignored_args_num = sum((1 for arg in args if ignored_argument_names.match(arg.name))) argnum = (len(args) - ignored_args_num) if (argnum > self.config.max_args): self.add_message('too-many-arguments', node=node, args=(len(args), self.config.max_args)) else: ignored_args_num = 0 locnum = (len(node.locals) - ignored_args_num) if (locnum > self.config.max_locals): self.add_message('too-many-locals', node=node, args=(locnum, self.config.max_locals)) self._stmts = 1
'most of the work is done here on close: checks for max returns, branch, return in __init__'
@check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements') def leave_functiondef(self, node):
returns = self._returns.pop() if (returns > self.config.max_returns): self.add_message('too-many-return-statements', node=node, args=(returns, self.config.max_returns)) branches = self._branches[node] if (branches > self.config.max_branches): self.add_message('too-many-branches', node=node, args=(branches, self.config.max_branches)) if (self._stmts > self.config.max_statements): self.add_message('too-many-statements', node=node, args=(self._stmts, self.config.max_statements))
'count number of returns'
def visit_return(self, _):
if (not self._returns): return self._returns[(-1)] += 1
'default visit method -> increments the statements counter if necessary'
def visit_default(self, node):
if node.is_statement: self._stmts += 1
'increments the branches counter'
def visit_tryexcept(self, node):
branches = len(node.handlers) if node.orelse: branches += 1 self._inc_branch(node, branches) self._stmts += branches
'increments the branches counter'
def visit_tryfinally(self, node):
self._inc_branch(node, 2) self._stmts += 2
'increments the branches counter and checks boolean expressions'
@check_messages('too-many-boolean-expressions') def visit_if(self, node):
self._check_boolean_expressions(node) branches = 1 if (node.orelse and ((len(node.orelse) > 1) or (not isinstance(node.orelse[0], If)))): branches += 1 self._inc_branch(node, branches) self._stmts += branches
'Go through "if" node `node` and counts its boolean expressions if the "if" node test is a BoolOp node'
def _check_boolean_expressions(self, node):
condition = node.test if (not isinstance(condition, BoolOp)): return nb_bool_expr = _count_boolean_expressions(condition) if (nb_bool_expr > self.config.max_bool_expr): self.add_message('too-many-boolean-expressions', node=condition, args=(nb_bool_expr, self.config.max_bool_expr))
'increments the branches counter'
def visit_while(self, node):
branches = 1 if node.orelse: branches += 1 self._inc_branch(node, branches)
'increments the branches counter'
def _inc_branch(self, node, branchesnum=1):
self._branches[node.scope()] += branchesnum
'check the node has any spelling errors'
def _check_docstring(self, node):
docstring = node.doc if (not docstring): return start_line = (node.lineno + 1) if six.PY2: encoding = node.root().file_encoding docstring = docstring.decode((encoding or sys.getdefaultencoding()), 'replace') for (idx, line) in enumerate(docstring.splitlines()): self._check_spelling('wrong-spelling-in-docstring', line, (start_line + idx))
'Record the first non-junk token at the start of a line.'
def handle_line_start(self, pos):
if (self._line_start > (-1)): return check_token_position = pos if (self._tokens.token(pos) == _ASYNC_TOKEN): check_token_position += 1 self._is_block_opener = (self._tokens.token(check_token_position) in _CONTINUATION_BLOCK_OPENERS) self._line_start = pos
'Prepares the tracker for a new physical line (NL).'
def next_physical_line(self):
self._line_start = (-1) self._is_block_opener = False
'Prepares the tracker for a new logical line (NEWLINE). A new logical line only starts with block indentation.'
def next_logical_line(self):
self.next_physical_line() self.retained_warnings = [] self._cont_stack = []
'Returns the valid offsets for the token at the given position.'
def get_valid_offsets(self, idx):
stack_top = (-1) if ((self._tokens.token(idx) in ('}', 'for')) and (self._cont_stack[(-1)].token == ':')): stack_top = (-2) indent = self._cont_stack[stack_top] if (self._tokens.token(idx) in _CLOSING_BRACKETS): valid_offsets = indent.valid_outdent_offsets else: valid_offsets = indent.valid_continuation_offsets return (indent, valid_offsets.copy())
'Extracts indentation information for a hanging indent.'
def _hanging_indent_after_bracket(self, bracket, position):
indentation = _get_indent_length(self._tokens.line(position)) if (self._is_block_opener and (self._continuation_size == self._block_indent_size)): return _ContinuedIndent(HANGING_BLOCK, bracket, position, _Offsets((indentation + self._continuation_size), indentation), _BeforeBlockOffsets((indentation + self._continuation_size), (indentation + (self._continuation_size * 2)))) if (bracket == ':'): paren_align = self._cont_stack[(-1)].valid_outdent_offsets next_align = self._cont_stack[(-1)].valid_continuation_offsets.copy() next_align_keys = list(next_align.keys()) next_align[(next_align_keys[0] + self._continuation_size)] = True return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align) return _ContinuedIndent(HANGING, bracket, position, _Offsets(indentation, (indentation + self._continuation_size)), _Offsets((indentation + self._continuation_size)))
'Extracts indentation information for a continued indent.'
def _continuation_inside_bracket(self, bracket, pos):
indentation = _get_indent_length(self._tokens.line(pos)) token_start = self._tokens.start_col(pos) next_token_start = self._tokens.start_col((pos + 1)) if (self._is_block_opener and ((next_token_start - indentation) == self._block_indent_size)): return _ContinuedIndent(CONTINUED_BLOCK, bracket, pos, _Offsets(token_start), _BeforeBlockOffsets(next_token_start, (next_token_start + self._continuation_size))) return _ContinuedIndent(CONTINUED, bracket, pos, _Offsets(token_start), _Offsets(next_token_start))
'Pushes a new token for continued indentation on the stack. Tokens that can modify continued indentation offsets are: * opening brackets * \'lambda\' * : inside dictionaries push_token relies on the caller to filter out those interesting tokens. :param int token: The concrete token :param int position: The position of the token in the stream.'
def push_token(self, token, position):
if _token_followed_by_eol(self._tokens, position): self._cont_stack.append(self._hanging_indent_after_bracket(token, position)) else: self._cont_stack.append(self._continuation_inside_bracket(token, position))
'a new line has been encountered, process it if necessary'
def new_line(self, tokens, line_end, line_start):
if _last_token_on_line_is(tokens, line_end, ';'): self.add_message('unnecessary-semicolon', line=tokens.start_line(line_end)) line_num = tokens.start_line(line_start) line = tokens.line(line_start) if (tokens.type(line_start) not in _JUNK_TOKENS): self._lines[line_num] = line.split('\n')[0] self.check_lines(line, line_num)
'Check that there are not unnecessary parens after a keyword. Parens are unnecessary if there is exactly one balanced outer pair on a line, and it is followed by a colon, and contains no commas (i.e. is not a tuple). Args: tokens: list of Tokens; the entire list of Tokens. start: int; the position of the keyword in the token list.'
def _check_keyword_parentheses(self, tokens, start):
if (self._inside_brackets(':') and (tokens[start][1] == 'for')): self._pop_token() if (tokens[(start + 1)][1] != '('): return found_and_or = False depth = 0 keyword_token = tokens[start][1] line_num = tokens[start][2][0] for i in range(start, (len(tokens) - 1)): token = tokens[i] if (token[0] == tokenize.NL): return if (token[1] == '('): depth += 1 elif (token[1] == ')'): depth -= 1 if depth: continue if ((tokens[(i + 1)][1] in (':', ')', ']', '}', 'in')) or (tokens[(i + 1)][0] in (tokenize.NEWLINE, tokenize.ENDMARKER, tokenize.COMMENT))): if (i == (start + 2)): return if (keyword_token == 'not'): if (not found_and_or): self.add_message('superfluous-parens', line=line_num, args=keyword_token) elif (keyword_token in ('return', 'yield')): self.add_message('superfluous-parens', line=line_num, args=keyword_token) elif (keyword_token not in self._keywords_with_parens): if (not ((tokens[(i + 1)][1] == 'in') and found_and_or)): self.add_message('superfluous-parens', line=line_num, args=keyword_token) return elif (depth == 1): if (token[1] == ','): return elif (token[1] in ('and', 'or')): found_and_or = True elif (token[1] == 'yield'): return elif (token[1] == 'for'): return
'Extended check of PEP-484 type hint presence'
def _has_valid_type_annotation(self, tokens, i):
if (not self._inside_brackets('(')): return False bracket_level = 0 for token in tokens[(i - 1)::(-1)]: if (token[1] == ':'): return True if (token[1] == '('): return False if (token[1] == ']'): bracket_level += 1 elif (token[1] == '['): bracket_level -= 1 elif (token[1] == ','): if (not bracket_level): return False elif (token[0] not in (tokenize.NAME, tokenize.STRING)): return False return False
'Check the spacing of a single equals sign.'
def _check_equals_spacing(self, tokens, i):
if self._has_valid_type_annotation(tokens, i): self._check_space(tokens, i, (_MUST, _MUST)) elif (self._inside_brackets('(') or self._inside_brackets('lambda')): self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT)) else: self._check_space(tokens, i, (_MUST, _MUST))
'Check that a binary operator is surrounded by exactly one space.'
def _check_surrounded_by_space(self, tokens, i):
self._check_space(tokens, i, (_MUST, _MUST))
'process tokens and search for : _ non strict indentation (i.e. not always using the <indent> parameter as indent unit) _ too long lines (i.e. longer than <max_chars>) _ optionally bad construct (if given, bad_construct must be a compiled regular expression).'
def process_tokens(self, tokens):
self._bracket_stack = [None] indents = [0] check_equal = False line_num = 0 self._lines = {} self._visited_lines = {} token_handlers = self._prepare_token_dispatcher() self._last_line_ending = None last_blank_line_num = 0 self._current_line = ContinuedLineState(tokens, self.config) for (idx, (tok_type, token, start, _, line)) in enumerate(tokens): if (start[0] != line_num): line_num = start[0] if (tok_type == tokenize.INDENT): self.new_line(TokenWrapper(tokens), (idx - 1), (idx + 1)) else: self.new_line(TokenWrapper(tokens), (idx - 1), idx) if (tok_type == tokenize.NEWLINE): check_equal = True self._process_retained_warnings(TokenWrapper(tokens), idx) self._current_line.next_logical_line() self._check_line_ending(token, line_num) elif (tok_type == tokenize.INDENT): check_equal = False self.check_indent_level(token, (indents[(-1)] + 1), line_num) indents.append((indents[(-1)] + 1)) elif (tok_type == tokenize.DEDENT): check_equal = True if (len(indents) > 1): del indents[(-1)] elif (tok_type == tokenize.NL): if (not line.strip('\r\n')): last_blank_line_num = line_num self._check_continued_indentation(TokenWrapper(tokens), (idx + 1)) self._current_line.next_physical_line() elif (tok_type != tokenize.COMMENT): self._current_line.handle_line_start(idx) if check_equal: check_equal = False self.check_indent_level(line, indents[(-1)], line_num) if ((tok_type == tokenize.NUMBER) and token.endswith('l')): self.add_message('lowercase-l-suffix', line=line_num) try: handler = token_handlers[token] except KeyError: pass else: handler(tokens, idx) line_num -= 1 if (line_num > self.config.max_module_lines): symbol = self.linter.msgs_store.check_message_id('too-many-lines') names = (symbol.msgid, 'too-many-lines') line = next(filter(None, map(self.linter._pragma_lineno.get, names)), 1) self.add_message('too-many-lines', args=(line_num, self.config.max_module_lines), line=line) if ((line_num == last_blank_line_num) and (line_num > 0)): self.add_message('trailing-newlines', line=line_num)
'check the node line number and check it if not yet done'
@check_messages('multiple-statements') def visit_default(self, node):
if (not node.is_statement): return if (not node.root().pure_python): return prev_sibl = node.previous_sibling() if (prev_sibl is not None): prev_line = prev_sibl.fromlineno elif (isinstance(node.parent, nodes.TryFinally) and (node in node.parent.finalbody)): prev_line = (node.parent.body[0].tolineno + 1) else: prev_line = node.parent.statement().fromlineno line = node.fromlineno assert line, node if ((prev_line == line) and (self._visited_lines.get(line) != 2)): self._check_multi_statement_line(node, line) return if (line in self._visited_lines): return try: tolineno = node.blockstart_tolineno except AttributeError: tolineno = node.tolineno assert tolineno, node lines = [] for line in range(line, (tolineno + 1)): self._visited_lines[line] = 1 try: lines.append(self._lines[line].rstrip()) except KeyError: lines.append('')
'Check for lines containing multiple statements.'
def _check_multi_statement_line(self, node, line):
if isinstance(node, nodes.With): return if (isinstance(node, nodes.TryExcept) and isinstance(node.parent, nodes.TryFinally)): return if (isinstance(node.parent, nodes.If) and (not node.parent.orelse) and self.config.single_line_if_stmt): return if (isinstance(node.parent, nodes.ClassDef) and (len(node.parent.body) == 1) and self.config.single_line_class_stmt): return self.add_message('multiple-statements', node=node) self._visited_lines[line] = 2
'check lines have less than a maximum number of characters'
def check_lines(self, lines, i):
max_chars = self.config.max_line_length ignore_long_line = self.config.ignore_long_lines def check_line(line, i): if (not line.endswith('\n')): self.add_message('missing-final-newline', line=i) else: stripped_line = line.rstrip(' DCTB \n\r\x0b ') if ((not stripped_line) and (_EMPTY_LINE in self.config.no_space_check)): pass elif (line[len(stripped_line):] not in ('\n', '\r\n')): self.add_message('trailing-whitespace', line=i) line = stripped_line mobj = OPTION_RGX.search(line) if (mobj and (mobj.group(1).split('=', 1)[0].strip() == 'disable')): line = line.split('#')[0].rstrip() if ((len(line) > max_chars) and (not ignore_long_line.search(line))): self.add_message('line-too-long', line=i, args=(len(line), max_chars)) return (i + 1) unsplit_ends = {u'\x0b', u'\x0b', u'\x0c', u'\x0c', u'\x1c', u'\x1d', u'\x1e', u'\x85', u'\u2028', u'\u2029'} unsplit = [] for line in lines.splitlines(True): if (line[(-1)] in unsplit_ends): unsplit.append(line) continue if unsplit: unsplit.append(line) line = ''.join(unsplit) unsplit = [] i = check_line(line, i) if unsplit: check_line(''.join(unsplit), i)
'return the indent level of the string'
def check_indent_level(self, string, expected, line_num):
indent = self.config.indent_string if (indent == '\\t'): indent = ' DCTB ' level = 0 unit_size = len(indent) while (string[:unit_size] == indent): string = string[unit_size:] level += 1 suppl = '' while (string and (string[0] in ' DCTB ')): if (string[0] != indent[0]): if (string[0] == ' DCTB '): args = ('tab', 'space') else: args = ('space', 'tab') self.add_message('mixed-indentation', args=args, line=line_num) return level suppl += string[0] string = string[1:] if ((level != expected) or suppl): i_type = 'spaces' if (indent[0] == ' DCTB '): i_type = 'tabs' self.add_message('bad-indentation', line=line_num, args=(((level * unit_size) + len(suppl)), i_type, (expected * unit_size)))
'inspect the source file to find encoding problem or fixmes like notes'
def process_module(self, module):
if self.config.notes: notes = re.compile(('.*?#\\s*(%s)(:*\\s*.*)' % '|'.join(self.config.notes))) else: notes = None if module.file_encoding: encoding = module.file_encoding else: encoding = 'ascii' with module.stream() as stream: for (lineno, line) in enumerate(stream): line = self._check_encoding((lineno + 1), line, encoding) if ((line is not None) and notes): self._check_note(notes, (lineno + 1), line)
'init statistics'
def open(self):
self.stats = self.linter.add_stats(total_lines=0, code_lines=0, empty_lines=0, docstring_lines=0, comment_lines=0)
'update stats'
def process_tokens(self, tokens):
i = 0 tokens = list(tokens) while (i < len(tokens)): (i, lines_number, line_type) = get_type(tokens, i) self.stats['total_lines'] += lines_number self.stats[line_type] += lines_number
'Check if the given node is an actual elif This is a problem we\'re having with the builtin ast module, which splits `elif` branches into a separate if statement. Unfortunately we need to know the exact type in certain cases.'
def _is_actual_elif(self, node):
if isinstance(node.parent, astroid.If): orelse = node.parent.orelse if (orelse and (orelse == [node])): if self._elifs[self._if_counter]: return True return False
'Check if the given if node can be simplified. The if statement can be reduced to a boolean expression in some cases. For instance, if there are two branches and both of them return a boolean value that depends on the result of the statement\'s test, then this can be reduced to `bool(test)` without losing any functionality.'
def _check_simplifiable_if(self, node):
if self._is_actual_elif(node): return if ((len(node.orelse) != 1) or (len(node.body) != 1)): return first_branch = node.body[0] else_branch = node.orelse[0] if isinstance(first_branch, astroid.Return): if (not isinstance(else_branch, astroid.Return)): return first_branch_is_bool = self._is_bool_const(first_branch) else_branch_is_bool = self._is_bool_const(else_branch) reduced_to = "'return bool(test)'" elif isinstance(first_branch, astroid.Assign): if (not isinstance(else_branch, astroid.Assign)): return first_branch_is_bool = self._is_bool_const(first_branch) else_branch_is_bool = self._is_bool_const(else_branch) reduced_to = "'var = bool(test)'" else: return if ((not first_branch_is_bool) or (not else_branch_is_bool)): return if (not first_branch.value.value): return self.add_message('simplifiable-if-statement', node=node, args=(reduced_to,))
'Update and check the number of nested blocks'
def _check_nested_blocks(self, node):
if (not isinstance(node.scope(), astroid.FunctionDef)): return nested_blocks = self._nested_blocks[:] if (node.parent == node.scope()): self._nested_blocks = [node] else: for ancestor_node in reversed(self._nested_blocks): if (ancestor_node == node.parent): break self._nested_blocks.pop() if (isinstance(node, astroid.If) and self._elifs[self._if_counter]): if self._nested_blocks: self._nested_blocks.pop() self._nested_blocks.append(node) if (len(nested_blocks) > len(self._nested_blocks)): self._emit_nested_blocks_message_if_needed(nested_blocks)
'Get the duplicated types from the underlying isinstance calls. :param astroid.BoolOp node: Node which should contain a bunch of isinstance calls. :returns: Dictionary of the comparison objects from the isinstance calls, to duplicate values from consecutive calls. :rtype: dict'
@staticmethod def _duplicated_isinstance_types(node):
duplicated_objects = set() all_types = collections.defaultdict(set) for call in node.values: if ((not isinstance(call, astroid.Call)) or (len(call.args) != 2)): continue inferred = utils.safe_infer(call.func) if ((not inferred) or (not utils.is_builtin_object(inferred))): continue if (inferred.name != 'isinstance'): continue isinstance_object = call.args[0].as_string() isinstance_types = call.args[1] if (isinstance_object in all_types): duplicated_objects.add(isinstance_object) if isinstance(isinstance_types, astroid.Tuple): elems = [class_type.as_string() for class_type in isinstance_types.itered()] else: elems = [isinstance_types.as_string()] all_types[isinstance_object].update(elems) return {key: value for (key, value) in all_types.items() if (key in duplicated_objects)}
'Check isinstance calls which can be merged together.'
@utils.check_messages('consider-merging-isinstance') def visit_boolop(self, node):
if (node.op != 'or'): return first_args = self._duplicated_isinstance_types(node) for (duplicated_name, class_names) in first_args.items(): names = sorted((name for name in class_names)) self.add_message('consider-merging-isinstance', node=node, args=(duplicated_name, ', '.join(names)))
'Returns true if node is \'condition and true_value else false_value\' form. All of: condition, true_value and false_value should not be a complex boolean expression'
@staticmethod def _is_and_or_ternary(node):
return (isinstance(node, astroid.BoolOp) and (node.op == 'or') and (len(node.values) == 2) and isinstance(node.values[0], astroid.BoolOp) and (not isinstance(node.values[1], astroid.BoolOp)) and (node.values[0].op == 'and') and (not isinstance(node.values[0].values[1], astroid.BoolOp)) and (len(node.values[0].values) == 2))
'Returns true if node is \'[false_value,true_value][condition]\' form'
@staticmethod def _is_seq_based_ternary(node):
return (isinstance(node, astroid.Subscript) and isinstance(node.value, (astroid.Tuple, astroid.List)) and (len(node.value.elts) == 2) and isinstance(node.slice, astroid.Index))
'Emit a convention whenever range and len are used for indexing.'
@utils.check_messages('consider-using-enumerate') def visit_for(self, node):
if (not isinstance(node.iter, astroid.Call)): return if (not self._is_builtin(node.iter.func, 'range')): return if (len(node.iter.args) != 1): return if (not isinstance(node.iter.args[0], astroid.Call)): return second_func = node.iter.args[0].func if (not self._is_builtin(second_func, 'len')): return len_args = node.iter.args[0].args if ((not len_args) or (len(len_args) != 1)): return iterating_object = len_args[0] if (not isinstance(iterating_object, astroid.Name)): return for child in node.body: for subscript in child.nodes_of_class(astroid.Subscript): if (not isinstance(subscript.value, astroid.Name)): continue if (not isinstance(subscript.slice, astroid.Index)): continue if (not isinstance(subscript.slice.value, astroid.Name)): continue if (subscript.slice.value.name != node.target.name): continue if (iterating_object.name != subscript.value.name): continue if (subscript.value.scope() != node.scope()): continue self.add_message('consider-using-enumerate', node=node) return
'`not len(S)` must become `not S` regardless if the parent block is a test condition or something else (boolean expression) e.g. `if not len(S):`'
@utils.check_messages('len-as-condition') def visit_unaryop(self, node):
if (isinstance(node, astroid.UnaryOp) and (node.op == 'not') and _is_len_call(node.operand)): self.add_message('len-as-condition', node=node)
'Visit a CallFunc node.'
@utils.check_messages('bad-open-mode', 'redundant-unittest-assert', 'deprecated-method') def visit_call(self, node):
try: for inferred in node.func.infer(): if (inferred.root().name == OPEN_MODULE): if (getattr(node.func, 'name', None) in OPEN_FILES): self._check_open_mode(node) if (inferred.root().name == UNITTEST_CASE): self._check_redundant_assert(node, inferred) self._check_deprecated_method(node, inferred) except astroid.InferenceError: return
'Check that a datetime was infered. If so, emit boolean-datetime warning.'
def _check_datetime(self, node):
try: infered = next(node.infer()) except astroid.InferenceError: return if (isinstance(infered, Instance) and (infered.qname() == 'datetime.time')): self.add_message('boolean-datetime', node=node)
'Check that the mode argument of an open or file call is valid.'
def _check_open_mode(self, node):
try: mode_arg = utils.get_argument_from_call(node, position=1, keyword='mode') except utils.NoSuchArgumentError: return if mode_arg: mode_arg = utils.safe_infer(mode_arg) if (isinstance(mode_arg, astroid.Const) and (not _check_mode_str(mode_arg.value))): self.add_message('bad-open-mode', node=node, args=mode_arg.value)
'Set the given node as accessed.'
def set_accessed(self, node):
frame = node_frame_class(node) if (frame is None): return self._scopes[frame][node.attrname].append(node)
'Get the accessed variables for the given scope.'
def accessed(self, scope):
return self._scopes.get(scope, {})
'init visit variable _accessed'
def visit_classdef(self, node):
self._check_bases_classes(node) if ((node.type == 'class') and has_known_bases(node)): try: node.local_attr('__init__') except astroid.NotFoundError: self.add_message('no-init', args=node, node=node) self._check_slots(node) self._check_proper_bases(node) self._check_consistent_mro(node)
'Detect that a class has a consistent mro or duplicate bases.'
def _check_consistent_mro(self, node):
try: node.mro() except InconsistentMroError: self.add_message('inconsistent-mro', args=node.name, node=node) except DuplicateBasesError: self.add_message('duplicate-bases', args=node.name, node=node) except NotImplementedError: pass
'Detect that a class inherits something which is not a class or a type.'
def _check_proper_bases(self, node):
for base in node.bases: ancestor = safe_infer(base) if (ancestor in (astroid.YES, None)): continue if (isinstance(ancestor, astroid.Instance) and ancestor.is_subtype_of(('%s.type' % (BUILTINS,)))): continue if ((not isinstance(ancestor, astroid.ClassDef)) or _is_invalid_base_class(ancestor)): self.add_message('inherit-non-class', args=base.as_string(), node=node)
'close a class node: check that instance attributes are defined in __init__ and check access to existent members'
def leave_classdef(self, cnode):
if (self._ignore_mixin and (cnode.name[(-5):].lower() == 'mixin')): return accessed = self._accessed.accessed(cnode) if (cnode.type != 'metaclass'): self._check_accessed_members(cnode, accessed) if (not self.linter.is_message_enabled('attribute-defined-outside-init')): return defining_methods = self.config.defining_attr_methods current_module = cnode.root() for (attr, nodes) in six.iteritems(cnode.instance_attrs): nodes = [n for n in nodes if ((not isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))) and (n.root() is current_module))] if (not nodes): continue if any(((node.frame().name in defining_methods) for node in nodes)): continue for parent in cnode.instance_attr_ancestors(attr): attr_defined = False for node in parent.instance_attrs[attr]: if (node.frame().name in defining_methods): attr_defined = True if attr_defined: break else: try: cnode.local_attr(attr) except astroid.NotFoundError: for node in nodes: if (node.frame().name not in defining_methods): if _called_in_methods(node.frame(), cnode, defining_methods): continue self.add_message('attribute-defined-outside-init', args=attr, node=node)
'check method arguments, overriding'
def visit_functiondef(self, node):
if (not node.is_method()): return self._check_useless_super_delegation(node) klass = node.parent.frame() self._meth_could_be_func = True self._check_first_arg_for_type(node, (klass.type == 'metaclass')) if (node.name == '__init__'): self._check_init(node) return for overridden in klass.local_attr_ancestors(node.name): try: meth_node = overridden[node.name] except KeyError: continue if (not isinstance(meth_node, astroid.FunctionDef)): continue self._check_signature(node, meth_node, 'overridden', klass) break if node.decorators: for decorator in node.decorators.nodes: if (isinstance(decorator, astroid.Attribute) and (decorator.attrname in ('getter', 'setter', 'deleter'))): return if (isinstance(decorator, astroid.Name) and (decorator.name == 'property')): return try: overridden = klass.instance_attr(node.name)[0] overridden_frame = overridden.frame() if (isinstance(overridden_frame, astroid.FunctionDef) and (overridden_frame.type == 'method')): overridden_frame = overridden_frame.parent.frame() if (isinstance(overridden_frame, astroid.ClassDef) and klass.is_subtype_of(overridden_frame.qname())): args = (overridden.root().name, overridden.fromlineno) self.add_message('method-hidden', args=args, node=node) except astroid.NotFoundError: pass
'Check if the given function node is an useless method override We consider it *useless* if it uses the super() builtin, but having nothing additional whatsoever than not implementing the method at all. If the method uses super() to delegate an operation to the rest of the MRO, and if the method called is the same as the current one, the arguments passed to super() are the same as the parameters that were passed to this method, then the method could be removed altogether, by letting other implementation to take precedence.'
def _check_useless_super_delegation(self, function):
if (not function.is_method()): return if function.decorators: return body = function.body if (len(body) != 1): return statement = body[0] if (not isinstance(statement, (astroid.Expr, astroid.Return))): return call = statement.value if (not isinstance(call, astroid.Call)): return if (not isinstance(call.func, astroid.Attribute)): return try: super_call = next(call.func.expr.infer()) except astroid.InferenceError: return else: if (not isinstance(super_call, objects.Super)): return if (call.func.attrname != function.name): return current_scope = function.parent.scope() if (super_call.mro_pointer != current_scope): return if (not isinstance(super_call.type, astroid.Instance)): return if (super_call.type.name != current_scope.name): return params = _signature_from_arguments(function.args) args = _signature_from_call(call) if _definition_equivalent_to_call(params, args): self.add_message('useless-super-delegation', node=function, args=(function.name,))
'on method node, check if this method couldn\'t be a function ignore class, static and abstract methods, initializer, methods overridden from a parent class.'
def leave_functiondef(self, node):
if node.is_method(): if (node.args.args is not None): self._first_attrs.pop() if (not self.linter.is_message_enabled('no-self-use')): return class_node = node.parent.frame() if (self._meth_could_be_func and (node.type == 'method') and (node.name not in PYMETHODS) and (not (node.is_abstract() or overrides_a_method(class_node, node.name) or decorated_with_property(node) or (six.PY3 and _has_bare_super_call(node))))): self.add_message('no-self-use', node=node)
'check if the getattr is an access to a class member if so, register it. Also check for access to protected class member from outside its class (but ignore __special__ methods)'
def visit_attribute(self, node):
if self._uses_mandatory_method_param(node): self._accessed.set_accessed(node) return if (not self.linter.is_message_enabled('protected-access')): return self._check_protected_attribute_access(node)
'Check that the given assattr node is defined in the class slots.'
def _check_in_slots(self, node):
infered = safe_infer(node.expr) if (infered and isinstance(infered, astroid.Instance)): klass = infered._proxied if (('__slots__' not in klass.locals) or (not klass.newstyle)): return slots = klass.slots() if (slots is None): return if any(((('__slots__' not in ancestor.locals) and (ancestor.name != 'object')) for ancestor in klass.ancestors())): return if (not any(((slot.value == node.attrname) for slot in slots))): if (not any(((slot.value == '__dict__') for slot in slots))): if _is_attribute_property(node.attrname, klass): return if ((node.attrname in klass.locals) and _has_data_descriptor(klass, node.attrname)): return self.add_message('assigning-non-slot', args=(node.attrname,), node=node)
'Checks for uses of classmethod() or staticmethod() When a @classmethod or @staticmethod decorator should be used instead. A message will be emitted only if the assignment is at a class scope and only if the classmethod\'s argument belongs to the class where it is defined. `node` is an assign node.'
def _check_classmethod_declaration(self, node):
if (not isinstance(node.value, astroid.Call)): return func = node.value.func if ((not isinstance(func, astroid.Name)) or (func.name not in ('classmethod', 'staticmethod'))): return msg = ('no-classmethod-decorator' if (func.name == 'classmethod') else 'no-staticmethod-decorator') parent_class = node.scope() if (not isinstance(parent_class, astroid.ClassDef)): return classmeth_arg = node.value.args[0] if (not isinstance(classmeth_arg, astroid.Name)): return method_name = classmeth_arg.name if any(((method_name == member.name) for member in parent_class.mymethods())): self.add_message(msg, node=node.targets[0])
'Given an attribute access node (set or get), check if attribute access is legitimate. Call _check_first_attr with node before calling this method. Valid cases are: * self._attr in a method or cls._attr in a classmethod. Checked by _check_first_attr. * Klass._attr inside "Klass" class. * Klass2._attr inside "Klass" class when Klass2 is a base class of Klass.'
def _check_protected_attribute_access(self, node):
attrname = node.attrname if (is_attr_protected(attrname) and (attrname not in self.config.exclude_protected)): klass = node_frame_class(node) callee = node.expr.as_string() if (klass is None): self.add_message('protected-access', node=node, args=attrname) return if (isinstance(node.expr, astroid.Call) and isinstance(node.expr.func, astroid.Name) and (node.expr.func.name == 'super')): return if self._is_type_self_call(node.expr): return if (not ((callee == klass.name) or (callee in klass.basenames))): stmt = node.parent.statement() if (isinstance(stmt, astroid.Assign) and (len(stmt.targets) == 1) and isinstance(stmt.targets[0], astroid.AssignName)): name = stmt.targets[0].name if _is_attribute_property(name, klass): return self.add_message('protected-access', node=node, args=attrname)
'check if the name handle an access to a class member if so, register it'
def visit_name(self, node):
if (self._first_attrs and ((node.name == self._first_attrs[(-1)]) or (not self._first_attrs[(-1)]))): self._meth_could_be_func = False
'check that accessed members are defined'
def _check_accessed_members(self, node, accessed):
excs = ('AttributeError', 'Exception', 'BaseException') for (attr, nodes) in six.iteritems(accessed): try: node.local_attr(attr) continue except astroid.NotFoundError: pass try: next(node.instance_attr_ancestors(attr)) continue except StopIteration: pass try: defstmts = node.instance_attr(attr) except astroid.NotFoundError: pass else: defstmts = [stmt for stmt in defstmts if (stmt not in nodes)] if (not defstmts): continue scope = defstmts[0].scope() defstmts = [stmt for (i, stmt) in enumerate(defstmts) if ((i == 0) or (stmt.scope() is not scope))] if (len(defstmts) == 1): defstmt = defstmts[0] frame = defstmt.frame() lno = defstmt.fromlineno for _node in nodes: if ((_node.frame() is frame) and (_node.fromlineno < lno) and (not astroid.are_exclusive(_node.statement(), defstmt, excs))): self.add_message('access-member-before-definition', node=_node, args=(attr, lno))
'check the name of first argument, expect: * \'self\' for a regular method * \'cls\' for a class method or a metaclass regular method (actually valid-classmethod-first-arg value) * \'mcs\' for a metaclass class method (actually valid-metaclass-classmethod-first-arg) * not one of the above for a static method'
def _check_first_arg_for_type(self, node, metaclass=0):
if (node.args.args is None): return first_arg = (node.args.args and node.argnames()[0]) self._first_attrs.append(first_arg) first = self._first_attrs[(-1)] if (node.type == 'staticmethod'): if ((first_arg == 'self') or (first_arg in self.config.valid_classmethod_first_arg) or (first_arg in self.config.valid_metaclass_classmethod_first_arg)): self.add_message('bad-staticmethod-argument', args=first, node=node) return self._first_attrs[(-1)] = None elif (not node.args.args): self.add_message('no-method-argument', node=node) elif metaclass: if (node.type == 'classmethod'): self._check_first_arg_config(first, self.config.valid_metaclass_classmethod_first_arg, node, 'bad-mcs-classmethod-argument', node.name) else: self._check_first_arg_config(first, self.config.valid_classmethod_first_arg, node, 'bad-mcs-method-argument', node.name) elif (node.type == 'classmethod'): self._check_first_arg_config(first, self.config.valid_classmethod_first_arg, node, 'bad-classmethod-argument', node.name) elif (first != 'self'): self.add_message('no-self-argument', node=node)
'check that the given class node implements abstract methods from base classes'
def _check_bases_classes(self, node):
def is_abstract(method): return method.is_abstract(pass_is_abstract=False) if class_is_abstract(node): return methods = sorted(unimplemented_abstract_methods(node, is_abstract).items(), key=(lambda item: item[0])) for (name, method) in methods: owner = method.parent.frame() if (owner is node): continue if (name in node.locals): continue self.add_message('abstract-method', node=node, args=(name, owner.name))
'check that the __init__ method call super or ancestors\'__init__ method'
def _check_init(self, node):
if ((not self.linter.is_message_enabled('super-init-not-called')) and (not self.linter.is_message_enabled('non-parent-init-called'))): return klass_node = node.parent.frame() to_call = _ancestors_to_call(klass_node) not_called_yet = dict(to_call) for stmt in node.nodes_of_class(astroid.Call): expr = stmt.func if ((not isinstance(expr, astroid.Attribute)) or (expr.attrname != '__init__')): continue if (isinstance(expr.expr, astroid.Call) and isinstance(expr.expr.func, astroid.Name) and (expr.expr.func.name == 'super')): return try: for klass in expr.expr.infer(): if (klass is astroid.YES): continue if (isinstance(klass, astroid.Instance) and isinstance(klass._proxied, astroid.ClassDef) and is_builtin_object(klass._proxied) and (klass._proxied.name == 'super')): return elif isinstance(klass, objects.Super): return try: del not_called_yet[klass] except KeyError: if (klass not in to_call): self.add_message('non-parent-init-called', node=expr, args=klass.name) except astroid.InferenceError: continue for (klass, method) in six.iteritems(not_called_yet): cls = node_frame_class(method) if ((klass.name == 'object') or (cls and (cls.name == 'object'))): continue self.add_message('super-init-not-called', args=klass.name, node=node)
'check that the signature of the two given methods match'
def _check_signature(self, method1, refmethod, class_type, cls):
if (not (isinstance(method1, astroid.FunctionDef) and isinstance(refmethod, astroid.FunctionDef))): self.add_message('method-check-failed', args=(method1, refmethod), node=method1) return instance = cls.instantiate_class() method1 = function_to_method(method1, instance) refmethod = function_to_method(refmethod, instance) if ((method1.args.args is None) or (refmethod.args.args is None)): return if is_attr_private(method1.name): return if method1.decorators: for decorator in method1.decorators.nodes: if (isinstance(decorator, astroid.Attribute) and (decorator.attrname == 'setter')): return if _different_parameters(refmethod, method1, dummy_parameter_regex=self._dummy_rgx): self.add_message('arguments-differ', args=(class_type, method1.name), node=method1) elif (len(method1.args.defaults) < len(refmethod.args.defaults)): self.add_message('signature-differs', args=(class_type, method1.name), node=method1)
'Check that attribute lookup name use first attribute variable name Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.'
def _uses_mandatory_method_param(self, node):
return self._is_mandatory_method_param(node.expr)
'Check if astroid.Name corresponds to first attribute variable name Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.'
def _is_mandatory_method_param(self, node):
return (self._first_attrs and isinstance(node, astroid.Name) and (node.name == self._first_attrs[(-1)]))
'Clear checker state after previous module.'
def visit_module(self, node):
self._future_division = False self._future_absolute_import = False
'Detect when a "bad" built-in is referenced.'
def visit_name(self, node):
found_node = node.lookup(node.name)[0] if _is_builtin(found_node): if (node.name in self._bad_builtins): message = (node.name.lower() + '-builtin') self.add_message(message, node=node)
'Look for indexing exceptions.'
@utils.check_messages('indexing-exception') def visit_subscript(self, node):
try: for inferred in node.value.infer(): if (not isinstance(inferred, astroid.Instance)): continue if utils.inherit_from_std_ex(inferred): self.add_message('indexing-exception', node=node) except astroid.InferenceError: return
'Look for accessing message on exceptions.'
@utils.check_messages('exception-message-attribute') def visit_attribute(self, node):
try: for inferred in node.expr.infer(): if (isinstance(inferred, astroid.Instance) and utils.inherit_from_std_ex(inferred)): if (node.attrname == 'message'): self.add_message('exception-message-attribute', node=node) if isinstance(inferred, astroid.Module): self._warn_if_deprecated(node, inferred.name, {node.attrname}, report_on_modules=False) except astroid.InferenceError: return
'Visit an except handler block and check for exception unpacking.'
@utils.check_messages('unpacking-in-except') def visit_excepthandler(self, node):
if isinstance(node.name, (astroid.Tuple, astroid.List)): self.add_message('unpacking-in-except', node=node)
'Visit a raise statement and check for raising strings or old-raise-syntax.'
@utils.check_messages('raising-string', 'old-raise-syntax') def visit_raise(self, node):
if ((node.exc is not None) and (node.inst is not None) and (node.tback is None)): self.add_message('old-raise-syntax', node=node) if (node.exc is None): return expr = node.exc if self._check_raise_value(node, expr): return else: try: value = next(astroid.unpack_infer(expr)) except astroid.InferenceError: return self._check_raise_value(node, value)
'Check the new string formatting.'
def _check_new_format(self, node, func):
if (isinstance(node.func, astroid.Attribute) and (not isinstance(node.func.expr, astroid.Const))): return try: strnode = next(func.bound.infer()) except astroid.InferenceError: return if (not isinstance(strnode, astroid.Const)): return if (not isinstance(strnode.value, six.string_types)): return if (node.starargs or node.kwargs): return try: (positional, named) = get_args(node) except astroid.InferenceError: return try: (fields, num_args, manual_pos) = parse_format_method_string(strnode.value) except utils.IncompleteFormatString: self.add_message('bad-format-string', node=node) return named_fields = set((field[0] for field in fields if isinstance(field[0], six.string_types))) if (num_args and manual_pos): self.add_message('format-combined-specification', node=node) return check_args = False num_args += sum((1 for field in named_fields if (field == ''))) if named_fields: for field in named_fields: if ((field not in named) and field): self.add_message('missing-format-argument-key', node=node, args=(field,)) for field in named: if (field not in named_fields): self.add_message('unused-format-string-argument', node=node, args=(field,)) num_args = (num_args or manual_pos) if (positional or num_args): empty = any((True for field in named_fields if (field == ''))) if (named or empty): check_args = True else: check_args = True if check_args: num_args = (num_args or manual_pos) if (positional > num_args): self.add_message('too-many-format-args', node=node) elif (positional < num_args): self.add_message('too-few-format-args', node=node) self._check_new_format_specifiers(node, fields, named)
'Check attribute and index access in the format string ("{0.a}" and "{0[a]}").'
def _check_new_format_specifiers(self, node, fields, named):
for (key, specifiers) in fields: if (key == ''): key = 0 if isinstance(key, numbers.Number): try: argname = utils.get_argument_from_call(node, key) except utils.NoSuchArgumentError: continue else: if (key not in named): continue argname = named[key] if (argname in (astroid.YES, None)): continue try: argument = next(argname.infer()) except astroid.InferenceError: continue if ((not specifiers) or (argument is astroid.YES)): continue if (argument.parent and isinstance(argument.parent, astroid.Arguments)): continue previous = argument parsed = [] for (is_attribute, specifier) in specifiers: if (previous is astroid.YES): break parsed.append((is_attribute, specifier)) if is_attribute: try: previous = previous.getattr(specifier)[0] except astroid.NotFoundError: if (hasattr(previous, 'has_dynamic_getattr') and previous.has_dynamic_getattr()): break path = get_access_path(key, parsed) self.add_message('missing-format-attribute', args=(specifier, path), node=node) break else: warn_error = False if hasattr(previous, 'getitem'): try: previous = previous.getitem(astroid.Const(specifier)) except (astroid.AstroidIndexError, astroid.AstroidTypeError, astroid.AttributeInferenceError): warn_error = True except astroid.InferenceError: break if (previous is astroid.Uninferable): break else: try: previous.getattr('__getitem__') break except astroid.NotFoundError: warn_error = True if warn_error: path = get_access_path(key, parsed) self.add_message('invalid-format-index', args=(specifier, path), node=node) break try: previous = next(previous.infer()) except astroid.InferenceError: break
'check for bad escapes in a non-raw string. prefix: lowercase string of eg \'ur\' string prefix markers. string_body: the un-parsed body of the string, not including the quote marks. start_row: integer line number in the source.'
def process_non_raw_string_token(self, prefix, string_body, start_row):
i = 0 while True: i = string_body.find('\\', i) if (i == (-1)): break next_char = string_body[(i + 1)] match = string_body[i:(i + 2)] if (next_char in self.UNICODE_ESCAPE_CHARACTERS): if ('u' in prefix): pass elif ((_PY3K or self._unicode_literals) and ('b' not in prefix)): pass else: self.add_message('anomalous-unicode-escape-in-string', line=start_row, args=(match,)) elif (next_char not in self.ESCAPE_CHARACTERS): self.add_message('anomalous-backslash-in-string', line=start_row, args=(match,)) i += 2
'append a file to search for similarities'
def append_stream(self, streamid, stream, encoding=None):
if (encoding is None): readlines = stream.readlines else: readlines = (lambda : [line.decode(encoding) for line in stream]) try: self.linesets.append(LineSet(streamid, readlines(), self.ignore_comments, self.ignore_docstrings, self.ignore_imports)) except UnicodeDecodeError: pass
'start looking for similarities and display results on stdout'
def run(self):
self._display_sims(self._compute_sims())
'compute similarities in appended files'
def _compute_sims(self):
no_duplicates = defaultdict(list) for (num, lineset1, idx1, lineset2, idx2) in self._iter_sims(): duplicate = no_duplicates[num] for couples in duplicate: if (((lineset1, idx1) in couples) or ((lineset2, idx2) in couples)): couples.add((lineset1, idx1)) couples.add((lineset2, idx2)) break else: duplicate.append(set([(lineset1, idx1), (lineset2, idx2)])) sims = [] for (num, ensembles) in six.iteritems(no_duplicates): for couples in ensembles: sims.append((num, couples)) sims.sort() sims.reverse() return sims
'display computed similarities on stdout'
def _display_sims(self, sims):
nb_lignes_dupliquees = 0 for (num, couples) in sims: print() print(num, 'similar lines in', len(couples), 'files') couples = sorted(couples) for (lineset, idx) in couples: print(('==%s:%s' % (lineset.name, idx))) for line in lineset._real_lines[idx:(idx + num)]: print(' ', line.rstrip()) nb_lignes_dupliquees += (num * (len(couples) - 1)) nb_total_lignes = sum([len(lineset) for lineset in self.linesets]) print(('TOTAL lines=%s duplicates=%s percent=%.2f' % (nb_total_lignes, nb_lignes_dupliquees, ((nb_lignes_dupliquees * 100.0) / nb_total_lignes))))
'find similarities in the two given linesets'
def _find_common(self, lineset1, lineset2):
lines1 = lineset1.enumerate_stripped lines2 = lineset2.enumerate_stripped find = lineset2.find index1 = 0 min_lines = self.min_lines while (index1 < len(lineset1)): skip = 1 num = 0 for index2 in find(lineset1[index1]): non_blank = 0 for (num, ((_, line1), (_, line2))) in enumerate(zip(lines1(index1), lines2(index2))): if (line1 != line2): if (non_blank > min_lines): (yield (num, lineset1, index1, lineset2, index2)) skip = max(skip, num) break if line1: non_blank += 1 else: num += 1 if (non_blank > min_lines): (yield (num, lineset1, index1, lineset2, index2)) skip = max(skip, num) index1 += skip
'iterate on similarities among all files, by making a cartesian product'
def _iter_sims(self):
for (idx, lineset) in enumerate(self.linesets[:(-1)]): for lineset2 in self.linesets[(idx + 1):]: for sim in self._find_common(lineset, lineset2): (yield sim)
'return an iterator on stripped lines, starting from a given index if specified, else 0'
def enumerate_stripped(self, start_at=0):
idx = start_at if start_at: lines = self._stripped_lines[start_at:] else: lines = self._stripped_lines for line in lines: (yield (idx, line)) idx += 1
'return positions of the given stripped line in this set'
def find(self, stripped_line):
return self._index.get(stripped_line, ())
'create the index for this set'
def _mk_index(self):
index = defaultdict(list) for (line_no, line) in enumerate(self._stripped_lines): if line: index[line].append(line_no) return index
'method called to set an option (registered in the options list) overridden to report options setting to Similar'
def set_option(self, optname, value, action=None, optdict=None):
BaseChecker.set_option(self, optname, value, action, optdict) if (optname == 'min-similarity-lines'): self.min_lines = self.config.min_similarity_lines elif (optname == 'ignore-comments'): self.ignore_comments = self.config.ignore_comments elif (optname == 'ignore-docstrings'): self.ignore_docstrings = self.config.ignore_docstrings elif (optname == 'ignore-imports'): self.ignore_imports = self.config.ignore_imports
'init the checkers: reset linesets and statistics information'
def open(self):
self.linesets = [] self.stats = self.linter.add_stats(nb_duplicated_lines=0, percent_duplicated_lines=0)
'process a module the module\'s content is accessible via the stream object stream must implement the readlines method'
def process_module(self, node):
with node.stream() as stream: self.append_stream(self.linter.current_name, stream, node.file_encoding)
'compute and display similarities on closing (i.e. end of parsing)'
def close(self):
total = sum((len(lineset) for lineset in self.linesets)) duplicated = 0 stats = self.stats for (num, couples) in self._compute_sims(): msg = [] for (lineset, idx) in couples: msg.append(('==%s:%s' % (lineset.name, idx))) msg.sort() for line in lineset._real_lines[idx:(idx + num)]: msg.append(line.rstrip()) self.add_message('R0801', args=(len(couples), '\n'.join(msg))) duplicated += (num * (len(couples) - 1)) stats['nb_duplicated_lines'] = duplicated stats['percent_duplicated_lines'] = (total and ((duplicated * 100.0) / total))
'visit module : update consumption analysis variable checks globals doesn\'t overrides builtins'
def visit_module(self, node):
self._to_consume = [(copy.copy(node.locals), {}, 'module')] for (name, stmts) in six.iteritems(node.locals): if (utils.is_builtin(name) and (not utils.is_inside_except(stmts[0]))): if self._should_ignore_redefined_builtin(stmts[0]): continue self.add_message('redefined-builtin', args=name, node=stmts[0])
'leave module: check globals'
@utils.check_messages('unused-import', 'unused-wildcard-import', 'redefined-builtin', 'undefined-all-variable', 'invalid-all-object', 'unused-variable') def leave_module(self, node):
assert (len(self._to_consume) == 1) not_consumed = self._to_consume.pop()[0] if ('__all__' in node.locals): self._check_all(node, not_consumed) self._check_globals(not_consumed) if ((not self.config.init_import) and node.package): return self._check_imports(not_consumed)
'visit class: update consumption analysis variable'
def visit_classdef(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'class'))
'leave class: update consumption analysis variable'
def leave_classdef(self, _):
self._to_consume.pop()
'visit lambda: update consumption analysis variable'
def visit_lambda(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'lambda'))
'leave lambda: update consumption analysis variable'
def leave_lambda(self, _):
self._to_consume.pop()
'visit genexpr: update consumption analysis variable'
def visit_generatorexp(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
'leave genexpr: update consumption analysis variable'
def leave_generatorexp(self, _):
self._to_consume.pop()
'visit dictcomp: update consumption analysis variable'
def visit_dictcomp(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
'leave dictcomp: update consumption analysis variable'
def leave_dictcomp(self, _):
self._to_consume.pop()
'visit setcomp: update consumption analysis variable'
def visit_setcomp(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
'leave setcomp: update consumption analysis variable'
def leave_setcomp(self, _):
self._to_consume.pop()
'visit function: update consumption analysis variable and check locals'
def visit_functiondef(self, node):
self._to_consume.append((copy.copy(node.locals), {}, 'function')) if (not (self.linter.is_message_enabled('redefined-outer-name') or self.linter.is_message_enabled('redefined-builtin'))): return globs = node.root().globals for (name, stmt) in node.items(): if utils.is_inside_except(stmt): continue if ((name in globs) and (not isinstance(stmt, astroid.Global))): definition = globs[name][0] if (isinstance(definition, astroid.ImportFrom) and (definition.modname == FUTURE)): continue line = definition.fromlineno dummy_rgx = self.config.dummy_variables_rgx if (not dummy_rgx.match(name)): self.add_message('redefined-outer-name', args=(name, line), node=stmt) elif (utils.is_builtin(name) and (not self._should_ignore_redefined_builtin(stmt))): self.add_message('redefined-builtin', args=name, node=stmt)